signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ClusterManagerMetrics { /** * Create a map of session status - > metrics . * @ return the map . */ private Map < SessionStatus , MetricsTimeVaryingInt > createSessionStatusToMetricsMap ( ) { } }
Map < SessionStatus , MetricsTimeVaryingInt > m = new HashMap < SessionStatus , MetricsTimeVaryingInt > ( ) ; for ( SessionStatus endState : SESSION_END_STATES ) { String name = endState . toString ( ) . toLowerCase ( ) + "_sessions" ; m . put ( endState , new MetricsTimeVaryingInt ( name , registry ) ) ; } return m ;
public class CmsStringUtil { /** * Returns < code > true < / code > if the provided Objects are either both < code > null < / code > * or equal according to { @ link Object # equals ( Object ) } . < p > * @ param value1 the first object to compare * @ param value2 the second object to compare * @ return < code > true < / code > if the provided Objects are either both < code > null < / code > * or equal according to { @ link Object # equals ( Object ) } */ public static boolean isEqual ( Object value1 , Object value2 ) { } }
if ( value1 == null ) { return ( value2 == null ) ; } return value1 . equals ( value2 ) ;
public class GenericGenerators { /** * Generates instructions for a try - catch block . * @ param tryCatchBlockNode try catch block node to populate to with label with relevant information * @ param exceptionType exception type to catch ( { @ code null } means catch any exception ) * @ param tryInsnList instructions to execute for try block * @ param catchInsnList instructions to execute for catch block * @ return instructions for a try catch block * @ throws NullPointerException if any argument other than { @ code exceptionType } is { @ code null } or contains { @ code null } * @ throws IllegalArgumentException if { @ code exceptionType } is not an object type ( technically must inherit from { @ link Throwable } , * but no way to check this ) */ public static InsnList tryCatchBlock ( TryCatchBlockNode tryCatchBlockNode , Type exceptionType , InsnList tryInsnList , InsnList catchInsnList ) { } }
Validate . notNull ( tryInsnList ) ; // exceptionType can be null Validate . notNull ( catchInsnList ) ; if ( exceptionType != null ) { Validate . isTrue ( exceptionType . getSort ( ) == Type . OBJECT ) ; } InsnList ret = new InsnList ( ) ; LabelNode tryLabelNode = new LabelNode ( ) ; LabelNode catchLabelNode = new LabelNode ( ) ; LabelNode endLabelNode = new LabelNode ( ) ; tryCatchBlockNode . start = tryLabelNode ; tryCatchBlockNode . end = catchLabelNode ; tryCatchBlockNode . handler = catchLabelNode ; tryCatchBlockNode . type = exceptionType == null ? null : exceptionType . getInternalName ( ) ; ret . add ( tryLabelNode ) ; ret . add ( tryInsnList ) ; ret . add ( new JumpInsnNode ( Opcodes . GOTO , endLabelNode ) ) ; ret . add ( catchLabelNode ) ; ret . add ( catchInsnList ) ; ret . add ( endLabelNode ) ; return ret ;
public class StateMachine { /** * Add a valid transition from one state to one or more states */ public StateMachine < T > withTransition ( T from , T to , T ... moreTo ) { } }
transitions . put ( from , EnumSet . of ( to , moreTo ) ) ; return this ;
public class WaveformFinder { /** * Ask the specified player for the waveform preview in the specified slot with the specified rekordbox ID , * using cached media instead if it is available , and possibly giving up if we are in passive mode . * @ param trackReference uniquely identifies the desired waveform preview * @ param failIfPassive will prevent the request from taking place if we are in passive mode , so that automatic * waveform updates will use available caches only * @ return the waveform preview found , if any */ private WaveformPreview requestPreviewInternal ( final DataReference trackReference , final boolean failIfPassive ) { } }
// First check if we are using cached data for this slot MetadataCache cache = MetadataFinder . getInstance ( ) . getMetadataCache ( SlotReference . getSlotReference ( trackReference ) ) ; if ( cache != null ) { return cache . getWaveformPreview ( null , trackReference ) ; } // Then see if any registered metadata providers can offer it for us . final MediaDetails sourceDetails = MetadataFinder . getInstance ( ) . getMediaDetailsFor ( trackReference . getSlotReference ( ) ) ; if ( sourceDetails != null ) { final WaveformPreview provided = MetadataFinder . getInstance ( ) . allMetadataProviders . getWaveformPreview ( sourceDetails , trackReference ) ; if ( provided != null ) { return provided ; } } // At this point , unless we are allowed to actively request the data , we are done . We can always actively // request tracks from rekordbox . if ( MetadataFinder . getInstance ( ) . isPassive ( ) && failIfPassive && trackReference . slot != CdjStatus . TrackSourceSlot . COLLECTION ) { return null ; } // We have to actually request the preview using the dbserver protocol . ConnectionManager . ClientTask < WaveformPreview > task = new ConnectionManager . ClientTask < WaveformPreview > ( ) { @ Override public WaveformPreview useClient ( Client client ) throws Exception { return getWaveformPreview ( trackReference . rekordboxId , SlotReference . getSlotReference ( trackReference ) , client ) ; } } ; try { return ConnectionManager . getInstance ( ) . invokeWithClientSession ( trackReference . player , task , "requesting waveform preview" ) ; } catch ( Exception e ) { logger . error ( "Problem requesting waveform preview, returning null" , e ) ; } return null ;
public class ServerLogReaderPreTransactional { /** * Reads from the edit log until it reaches an operation which can * be considered a namespace notification ( like FILE _ ADDED , FILE _ CLOSED * or NODE _ DELETED ) . * @ return the notification object or null if nothing is to be returned * at the moment . * @ throws IOException raised when a fatal error occurred . */ public NamespaceNotification getNamespaceNotification ( ) throws IOException { } }
FSEditLogOp op = null ; NamespaceNotification notification = null ; // Keep looping until we reach an operation that can be // considered a notification . while ( true ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "edits.size=" + editsFile . length ( ) + " editsNew.size=" + editsNewFile . length ( ) ) ; } try { op = inputStream . readOp ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "inputStream.readOP() returned " + op ) ; } } catch ( IOException e ) { LOG . warn ( "inputStream.readOp() failed" , e ) ; tryReloadingEditLog ( ) ; return null ; } catch ( Exception e2 ) { LOG . error ( "Error reading log operation" , e2 ) ; throw new IOException ( e2 ) ; } // No operation to read at the moment from the transaction log if ( op == null ) { core . getMetrics ( ) . reachedEditLogEnd . inc ( ) ; handleNullRead ( ) ; trySwitchingEditLog ( ) ; return null ; } else { core . getMetrics ( ) . readOperations . inc ( ) ; readNullAfterStreamFinished = false ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Read operation: " + op + " with txId=" + op . getTransactionId ( ) ) ; } if ( ServerLogReaderUtil . shouldSkipOp ( expectedTransactionId , op ) ) { updateStreamPosition ( ) ; continue ; } expectedTransactionId = ServerLogReaderUtil . checkTransactionId ( expectedTransactionId , op ) ; updateStreamPosition ( ) ; // Test if it can be considered a notification notification = ServerLogReaderUtil . createNotification ( op ) ; if ( notification != null ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Emitting " + NotifierUtils . asString ( notification ) ) ; } core . getMetrics ( ) . readNotifications . inc ( ) ; return notification ; } }
public class ManualDescriptor { /** * indexed setter for meSHList - sets an indexed value - A collection of objects of type uima . julielab . uima . MeSHHeading , O * @ generated * @ param i index in the array to set * @ param v value to set into the array */ public void setMeSHList ( int i , MeshHeading v ) { } }
if ( ManualDescriptor_Type . featOkTst && ( ( ManualDescriptor_Type ) jcasType ) . casFeat_meSHList == null ) jcasType . jcas . throwFeatMissing ( "meSHList" , "de.julielab.jules.types.pubmed.ManualDescriptor" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( ManualDescriptor_Type ) jcasType ) . casFeatCode_meSHList ) , i ) ; jcasType . ll_cas . ll_setRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( ManualDescriptor_Type ) jcasType ) . casFeatCode_meSHList ) , i , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class KamStoreServiceImpl { /** * { @ inheritDoc } */ @ Override public List < Kam > getCatalog ( ) throws KamStoreServiceException { } }
List < Kam > list = new ArrayList < Kam > ( ) ; try { for ( KamInfo kamInfo : kamCatalogDao . getCatalog ( ) ) { list . add ( convert ( kamInfo ) ) ; } } catch ( SQLException e ) { logger . warn ( e . getMessage ( ) ) ; throw new KamStoreServiceException ( e . getMessage ( ) ) ; } return list ;
public class IntVector { /** * Removes the first occurrence of the argument from this vector . * If the object is found in this vector , each component in the vector * with an index greater or equal to the object ' s index is shifted * downward to have an index one smaller than the value it had * previously . * @ param s Int to remove from array * @ return True if the int was removed , false if it was not found */ public final boolean removeElement ( int s ) { } }
for ( int i = 0 ; i < m_firstFree ; i ++ ) { if ( m_map [ i ] == s ) { if ( ( i + 1 ) < m_firstFree ) System . arraycopy ( m_map , i + 1 , m_map , i - 1 , m_firstFree - i ) ; else m_map [ i ] = java . lang . Integer . MIN_VALUE ; m_firstFree -- ; return true ; } } return false ;
public class ReuseOracle { /** * Filters all the query elements corresponding to " reflexive " edges in the reuse tree , executes the shorter query , * and fills the filtered outputs into the resulting output word . * @ param query * the input query with " reflexive " symbols ( may be a suffix of the original query , if a system state is * reused ) . * @ param partialOutput * the output information from the tree with { @ code null } entries for all " non - reflexive " edges . * @ param processQuery * a function that actually processes the ( shortened ) query . * @ return the query result including the outputs of the " reflexive " symbol executions . */ private QueryResult < S , O > filterAndProcessQuery ( Word < I > query , Word < O > partialOutput , Function < Word < I > , QueryResult < S , O > > processQuery ) { } }
final LinkedList < I > filteredQueryList = new LinkedList < > ( query . asList ( ) ) ; final Iterator < I > queryIterator = filteredQueryList . iterator ( ) ; // filter " reflexive " edges for ( final O outputSymbol : partialOutput ) { queryIterator . next ( ) ; if ( outputSymbol != null ) { queryIterator . remove ( ) ; } } // process the query final QueryResult < S , O > res = processQuery . apply ( Word . fromList ( filteredQueryList ) ) ; final WordBuilder < O > wordBuilder = new WordBuilder < > ( ) ; final Iterator < O > resultIterator = res . output . iterator ( ) ; // insert back the a priori available outputs of " reflexive " edges for ( final O output : partialOutput ) { if ( output == null ) { wordBuilder . add ( resultIterator . next ( ) ) ; } else { wordBuilder . add ( output ) ; } } return new QueryResult < > ( wordBuilder . toWord ( ) , res . newState ) ;
public class Boot { /** * NOTE : This method cannot be run from jar */ public static void main ( Class main , String [ ] args , String [ ] packageNamesToWeave ) throws Exception { } }
for ( String packageName : packageNamesToWeave ) { weavePackage ( packageName ) ; } ArrayList < String > l = new ArrayList < String > ( Arrays . asList ( args ) ) ; l . add ( 0 , "-mainClass" ) ; l . add ( 1 , main . getName ( ) ) ; _init . boot2 ( l . toArray ( new String [ 0 ] ) ) ;
public class HudsonPrivateSecurityRealm { /** * Creates a user account . Requires { @ link Jenkins # ADMINISTER } */ @ Restricted ( NoExternalUse . class ) public User createAccountByAdmin ( StaplerRequest req , StaplerResponse rsp , String addUserView , String successView ) throws IOException , ServletException { } }
checkPermission ( Jenkins . ADMINISTER ) ; User u = createAccount ( req , rsp , false , addUserView ) ; if ( u != null && successView != null ) { rsp . sendRedirect ( successView ) ; } return u ;
public class CellUtil { /** * 设置单元格值 < br > * 根据传入的styleSet自动匹配样式 < br > * 当为头部样式时默认赋值头部样式 , 但是头部中如果有数字 、 日期等类型 , 将按照数字 、 日期样式设置 * @ param cell 单元格 * @ param value 值 * @ param styleSet 单元格样式集 , 包括日期等样式 * @ param isHeader 是否为标题单元格 */ public static void setCellValue ( Cell cell , Object value , StyleSet styleSet , boolean isHeader ) { } }
final CellStyle headCellStyle = styleSet . getHeadCellStyle ( ) ; final CellStyle cellStyle = styleSet . getCellStyle ( ) ; if ( isHeader && null != headCellStyle ) { cell . setCellStyle ( headCellStyle ) ; } else if ( null != cellStyle ) { cell . setCellStyle ( cellStyle ) ; } if ( null == value ) { cell . setCellValue ( StrUtil . EMPTY ) ; } else if ( value instanceof FormulaCellValue ) { // 公式 cell . setCellFormula ( ( ( FormulaCellValue ) value ) . getValue ( ) ) ; } else if ( value instanceof Date ) { if ( null != styleSet && null != styleSet . getCellStyleForDate ( ) ) { cell . setCellStyle ( styleSet . getCellStyleForDate ( ) ) ; } cell . setCellValue ( ( Date ) value ) ; } else if ( value instanceof Calendar ) { cell . setCellValue ( ( Calendar ) value ) ; } else if ( value instanceof Boolean ) { cell . setCellValue ( ( Boolean ) value ) ; } else if ( value instanceof RichTextString ) { cell . setCellValue ( ( RichTextString ) value ) ; } else if ( value instanceof Number ) { if ( ( value instanceof Double || value instanceof Float ) && null != styleSet && null != styleSet . getCellStyleForNumber ( ) ) { cell . setCellStyle ( styleSet . getCellStyleForNumber ( ) ) ; } cell . setCellValue ( ( ( Number ) value ) . doubleValue ( ) ) ; } else { cell . setCellValue ( value . toString ( ) ) ; }
public class Bootstrap2FieldAttrProcessor { /** * { @ inheritDoc } */ @ Override protected void addAttributesToInputElement ( Element element , String fieldName ) { } }
element . setAttribute ( StandardDialect . PREFIX + ":" + AbstractSpringFieldAttrProcessor . ATTR_NAME , "*{" + fieldName + "}" ) ;
public class FileUtils { /** * Copy file a file from one location to another . * @ param in Source file * @ param out Target file * @ throws IOException if any error occurred . */ public static void copyFile ( File in , File out ) throws IOException { } }
FileChannel inChannel = new FileInputStream ( in ) . getChannel ( ) ; FileChannel outChannel = new FileOutputStream ( out ) . getChannel ( ) ; try { inChannel . transferTo ( 0 , inChannel . size ( ) , outChannel ) ; } catch ( IOException e ) { throw e ; } finally { if ( inChannel != null ) { inChannel . close ( ) ; } if ( outChannel != null ) { outChannel . close ( ) ; } }
public class KeyrefPaser { /** * Write alt element * @ param srcElem element content */ private void writeAlt ( Element srcElem ) throws SAXException { } }
final AttributesImpl atts = new AttributesImpl ( ) ; XMLUtils . addOrSetAttribute ( atts , ATTRIBUTE_NAME_CLASS , TOPIC_ALT . toString ( ) ) ; getContentHandler ( ) . startElement ( NULL_NS_URI , TOPIC_ALT . localName , TOPIC_ALT . localName , atts ) ; domToSax ( srcElem , false ) ; getContentHandler ( ) . endElement ( NULL_NS_URI , TOPIC_ALT . localName , TOPIC_ALT . localName ) ;
public class JsonObject { /** * Returns the property value as object . * @ param property * the property * @ return the value as entity */ public JsonObject getAsObject ( String property ) { } }
if ( ! super . has ( property ) ) { super . set ( property , new JsonValue ( new JsonObject ( ) ) , false ) ; } return get ( property ) . getAsObject ( ) ;
public class Site { /** * Create a native VoltDB execution engine */ ExecutionEngine initializeEE ( ) { } }
String hostname = CoreUtils . getHostnameOrAddress ( ) ; HashinatorConfig hashinatorConfig = TheHashinator . getCurrentConfig ( ) ; ExecutionEngine eeTemp = null ; Deployment deploy = m_context . cluster . getDeployment ( ) . get ( "deployment" ) ; final int defaultDrBufferSize = Integer . getInteger ( "DR_DEFAULT_BUFFER_SIZE" , 512 * 1024 ) ; // 512KB int configuredTimeout = Integer . getInteger ( "MAX_EXPORT_BUFFER_FLUSH_INTERVAL" , 4 * 1000 ) ; final int exportFlushTimeout = configuredTimeout > 0 ? configuredTimeout : 4 * 1000 ; int tempTableMaxSize = deploy . getSystemsettings ( ) . get ( "systemsettings" ) . getTemptablemaxsize ( ) ; if ( System . getProperty ( "TEMP_TABLE_MAX_SIZE" ) != null ) { // Allow a system property to override the deployment setting // for testing purposes . tempTableMaxSize = Integer . getInteger ( "TEMP_TABLE_MAX_SIZE" ) ; } try { // NATIVE _ EE _ JNI and NATIVE _ EE _ LARGE _ JNI if ( m_backend . isDefaultJNITarget ) { eeTemp = new ExecutionEngineJNI ( m_context . cluster . getRelativeIndex ( ) , m_siteId , m_partitionId , m_context . getNodeSettings ( ) . getLocalSitesCount ( ) , CoreUtils . getHostIdFromHSId ( m_siteId ) , hostname , m_context . cluster . getDrclusterid ( ) , defaultDrBufferSize , tempTableMaxSize , hashinatorConfig , m_isLowestSiteId , exportFlushTimeout ) ; } else if ( m_backend == BackendTarget . NATIVE_EE_SPY_JNI ) { Class < ? > spyClass = Class . forName ( "org.mockito.Mockito" ) ; Method spyMethod = spyClass . getDeclaredMethod ( "spy" , Object . class ) ; ExecutionEngine internalEE = new ExecutionEngineJNI ( m_context . cluster . getRelativeIndex ( ) , m_siteId , m_partitionId , m_context . getNodeSettings ( ) . getLocalSitesCount ( ) , CoreUtils . getHostIdFromHSId ( m_siteId ) , hostname , m_context . cluster . getDrclusterid ( ) , defaultDrBufferSize , tempTableMaxSize , hashinatorConfig , m_isLowestSiteId , exportFlushTimeout ) ; eeTemp = ( ExecutionEngine ) spyMethod . invoke ( null , internalEE ) ; } else if ( m_backend . isIPC ) { // set up the EE over IPC eeTemp = new ExecutionEngineIPC ( m_context . cluster . getRelativeIndex ( ) , m_siteId , m_partitionId , m_context . getNodeSettings ( ) . getLocalSitesCount ( ) , CoreUtils . getHostIdFromHSId ( m_siteId ) , hostname , m_context . cluster . getDrclusterid ( ) , defaultDrBufferSize , tempTableMaxSize , m_backend , VoltDB . instance ( ) . getConfig ( ) . m_ipcPort , hashinatorConfig , m_isLowestSiteId , exportFlushTimeout ) ; } else { /* This seems very bad . */ throw new VoltAbortException ( String . format ( "Unexpected BackendTarget value %s" , m_backend ) ) ; } eeTemp . loadCatalog ( m_startupConfig . m_timestamp , m_startupConfig . m_serializedCatalog ) ; eeTemp . setBatchTimeout ( m_context . cluster . getDeployment ( ) . get ( "deployment" ) . getSystemsettings ( ) . get ( "systemsettings" ) . getQuerytimeout ( ) ) ; } // just print error info an bail if we run into an error here catch ( final Exception ex ) { hostLog . l7dlog ( Level . FATAL , LogKeys . host_ExecutionSite_FailedConstruction . name ( ) , new Object [ ] { m_siteId , m_siteIndex } , ex ) ; VoltDB . crashLocalVoltDB ( ex . getMessage ( ) , true , ex ) ; } return eeTemp ;
public class OmsLabeler { /** * getNeighbours will get the pixel value of i ' s neighbour that ' s ox and oy * away from i , if the point is outside the image , then 0 is returned . * This version gets from source image . * @ param d _ w * @ param d _ h */ private int getNeighbours ( int [ ] src1d , int i , int ox , int oy , int d_w , int d_h ) { } }
int x , y , result ; x = ( i % d_w ) + ox ; // d _ w and d _ h are assumed to be set to the y = ( i / d_w ) + oy ; // width and height of scr1d if ( ( x < 0 ) || ( x >= d_w ) || ( y < 0 ) || ( y >= d_h ) ) { result = 0 ; } else { result = src1d [ y * d_w + x ] & 0x000000ff ; } return result ;
public class Configuration { /** * Set the value at the given index in the statement * @ param < T > * @ param stmt statement * @ param path path * @ param i one based index in statement * @ param value value to bind * @ throws SQLException */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public < T > void set ( PreparedStatement stmt , Path < ? > path , int i , T value ) throws SQLException { if ( value == null || value instanceof Null ) { Integer sqlType = null ; if ( path != null ) { ColumnMetadata columnMetadata = ColumnMetadata . getColumnMetadata ( path ) ; if ( columnMetadata . hasJdbcType ( ) ) { sqlType = columnMetadata . getJdbcType ( ) ; } } if ( sqlType != null ) { stmt . setNull ( i , sqlType ) ; } else { stmt . setNull ( i , Types . NULL ) ; } } else { getType ( path , ( Class ) value . getClass ( ) ) . setValue ( stmt , i , value ) ; }
public class CommerceDiscountRelUtil { /** * Returns a range of all the commerce discount rels where commerceDiscountId = & # 63 ; and classNameId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceDiscountRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceDiscountId the commerce discount ID * @ param classNameId the class name ID * @ param start the lower bound of the range of commerce discount rels * @ param end the upper bound of the range of commerce discount rels ( not inclusive ) * @ return the range of matching commerce discount rels */ public static List < CommerceDiscountRel > findByCD_CN ( long commerceDiscountId , long classNameId , int start , int end ) { } }
return getPersistence ( ) . findByCD_CN ( commerceDiscountId , classNameId , start , end ) ;
public class RuleLoaderImpl { /** * Prints all of the { @ link RulePhase } objects in the order that they should execute . This is primarily for debug purposes and should be called * before the entire { @ link RuleProvider } list is sorted , as this will allow us to print the { @ link RulePhase } list without the risk of * user - introduced cycles making the sort impossible . */ private void printRulePhases ( List < RuleProvider > allProviders ) { } }
List < RuleProvider > unsortedPhases = new ArrayList < > ( ) ; for ( RuleProvider provider : allProviders ) { if ( provider instanceof RulePhase ) unsortedPhases . add ( provider ) ; } List < RuleProvider > sortedPhases = RuleProviderSorter . sort ( unsortedPhases ) ; StringBuilder rulePhaseSB = new StringBuilder ( ) ; for ( RuleProvider phase : sortedPhases ) { Class < ? > unproxiedClass = Proxies . unwrap ( phase ) . getClass ( ) ; rulePhaseSB . append ( "\tPhase: " ) . append ( unproxiedClass . getSimpleName ( ) ) . append ( System . lineSeparator ( ) ) ; } LOG . info ( "Rule Phases: [\n" + rulePhaseSB . toString ( ) + "]" ) ;
public class MetadataDeployer { /** * { @ inheritDoc } */ public Deployment deploy ( URL url , Context context , ClassLoader parent ) throws DeployException { } }
Connector c = ( Connector ) context . get ( Constants . ATTACHMENT_MERGED_METADATA ) ; if ( c == null ) c = ( Connector ) context . get ( Constants . ATTACHMENT_RA_XML_METADATA ) ; if ( c == null ) throw new DeployException ( "No metadata for " + url . toExternalForm ( ) + " found" ) ; try { File archive = new File ( url . toURI ( ) ) ; Metadata m = registerMetadata ( archive . getName ( ) , c . copy ( ) , archive ) ; return new MetadataDeployment ( url , m , metadataRepository ) ; } catch ( Throwable t ) { throw new DeployException ( "Deployment " + url . toExternalForm ( ) + " failed" , t ) ; }
public class DescribeDimensionKeysResult { /** * If < code > PartitionBy < / code > was present in the request , < code > PartitionKeys < / code > contains the breakdown of * dimension keys by the specified partitions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPartitionKeys ( java . util . Collection ) } or { @ link # withPartitionKeys ( java . util . Collection ) } if you want * to override the existing values . * @ param partitionKeys * If < code > PartitionBy < / code > was present in the request , < code > PartitionKeys < / code > contains the breakdown * of dimension keys by the specified partitions . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeDimensionKeysResult withPartitionKeys ( ResponsePartitionKey ... partitionKeys ) { } }
if ( this . partitionKeys == null ) { setPartitionKeys ( new java . util . ArrayList < ResponsePartitionKey > ( partitionKeys . length ) ) ; } for ( ResponsePartitionKey ele : partitionKeys ) { this . partitionKeys . add ( ele ) ; } return this ;
public class Word2VecModelThrift { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case VOCAB : return isSetVocab ( ) ; case LAYER_SIZE : return isSetLayerSize ( ) ; case VECTORS : return isSetVectors ( ) ; } throw new IllegalStateException ( ) ;
public class RenderUtils { /** * Renders disabled installer line . The same as installer line , but with ' - ' before installer name and * without markers . * @ param type disabled installer class * @ return rendered disabled installer line */ public static String renderDisabledInstaller ( final Class < FeatureInstaller > type ) { } }
return String . format ( "-%-19s %-38s" , FeatureUtils . getInstallerExtName ( type ) , brackets ( renderClass ( type ) ) ) ;
public class XmlStringTools { /** * Wrap a text inside a tag . * @ param buffer * StringBuffer to fill * @ param text * the text to wrap * @ param tag * the tag to use * @ return the buffer */ public static StringBuffer appendTextInsideTag ( StringBuffer buffer , String text , String tag ) { } }
return appendTextInsideTag ( buffer , text , tag , EMPTY_MAP ) ;
public class RectifyImageOps { /** * Adjust the rectification such that only pixels which overlap the original left image can be seen . For use with * calibrated stereo images having a known baseline . Image processing is easier since only the " true " image pixels * are visible , but information along the image border has been discarded . The rectification matrices are * overwritten with adjusted values on output . * @ param paramLeft Intrinsic parameters for left camera . Not modified . * @ param rectifyLeft Rectification matrix for left image . Input and Output . Modified . * @ param rectifyRight Rectification matrix for right image . Input and Output . Modified . * @ param rectifyK Rectification calibration matrix . Input and Output . Modified . */ public static void allInsideLeft ( CameraPinholeBrown paramLeft , DMatrixRMaj rectifyLeft , DMatrixRMaj rectifyRight , DMatrixRMaj rectifyK ) { } }
ImplRectifyImageOps_F64 . allInsideLeft ( paramLeft , rectifyLeft , rectifyRight , rectifyK ) ;
public class MathUtils { /** * See : http : / / stackoverflow . com / questions / 466204 / rounding - off - to - nearest - power - of - 2 * @ param v the number to getFromOrigin the next power of 2 for * @ return the next power of 2 for the passed in value */ public static long nextPowOf2 ( long v ) { } }
v -- ; v |= v >> 1 ; v |= v >> 2 ; v |= v >> 4 ; v |= v >> 8 ; v |= v >> 16 ; v ++ ; return v ;
public class ScanningQueryEngine { /** * Create a { @ link ExtractFromRow } implementation that accesses the REFERENCE value ( s ) in the properties of the node * identified by the supplied selector names . * @ param selectorName the name of the selector containing the node ( s ) to be accessed ; may not be null * @ param context the context in which the query is to be executed ; may not be null * @ param columns the result column definition ; may not be null * @ param sources the query sources for the repository ; may not be null * @ param defaultType the type that should be used by default , or null if an exception should be thrown when the type for the * property name could not be determined * @ return the dynamic operation implementation ; never null */ protected ExtractFromRow createExtractReferencesFromRow ( final String selectorName , QueryContext context , Columns columns , QuerySources sources , TypeFactory < ? > defaultType ) { } }
final NodeCache cache = context . getNodeCache ( sources . getWorkspaceName ( ) ) ; // Find the expected property type of the value . . . assert columns != null ; final int indexInRow = columns . getSelectorIndex ( selectorName ) ; final TypeFactory < ? > typeFactory = context . getTypeSystem ( ) . getStringFactory ( ) ; final boolean trace = LOGGER . isTraceEnabled ( ) ; return new ExtractFromRow ( ) { @ Override public Object getValueInRow ( RowAccessor row ) { CachedNode node = row . getNode ( indexInRow ) ; if ( node == null ) return null ; List < Object > values = null ; for ( Iterator < org . modeshape . jcr . value . Property > iter = node . getProperties ( cache ) ; iter . hasNext ( ) ; ) { org . modeshape . jcr . value . Property prop = iter . next ( ) ; if ( prop == null || prop . isEmpty ( ) ) continue ; if ( prop . isReference ( ) || prop . isSimpleReference ( ) ) { if ( prop . isSingle ( ) ) { Object value = prop . getFirstValue ( ) ; if ( value != null ) { if ( values == null ) values = new LinkedList < > ( ) ; values . add ( typeFactory . create ( value ) ) ; } } else { assert prop . isMultiple ( ) ; for ( Object value : prop ) { if ( value == null ) continue ; if ( values == null ) values = new LinkedList < > ( ) ; values . add ( typeFactory . create ( value ) ) ; } } } } if ( values == null || values . isEmpty ( ) ) return null ; if ( trace ) { LOGGER . trace ( "Found references in '{0}': {1}" , node . getPath ( cache ) , values ) ; } return values . toArray ( ) ; } @ Override public TypeFactory < ? > getType ( ) { return typeFactory ; } @ Override public String toString ( ) { return "(references " + selectorName + ")" ; } } ;
public class BasePasswordManagementService { /** * Orders security questions consistently . * @ param questionMap A map of question / answer key / value pairs * @ return A list of questions in a consistent order */ public static List < String > canonicalizeSecurityQuestions ( final Map < String , String > questionMap ) { } }
val keys = new ArrayList < String > ( questionMap . keySet ( ) ) ; keys . sort ( String . CASE_INSENSITIVE_ORDER ) ; return keys ;
public class SoapServerActionBuilder { /** * Generic request builder for receiving SOAP messages on server . * @ return */ public SoapServerRequestActionBuilder receive ( ) { } }
SoapServerRequestActionBuilder soapServerRequestActionBuilder = new SoapServerRequestActionBuilder ( action , soapServer ) . withApplicationContext ( applicationContext ) ; return soapServerRequestActionBuilder ;
public class ParticipantCreator { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( identifier != null ) { request . addPostParam ( "Identifier" , identifier ) ; } if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( proxyIdentifier != null ) { request . addPostParam ( "ProxyIdentifier" , proxyIdentifier ) ; } if ( proxyIdentifierSid != null ) { request . addPostParam ( "ProxyIdentifierSid" , proxyIdentifierSid ) ; }
public class XCasePartImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setTypeGuard ( JvmTypeReference newTypeGuard ) { } }
if ( newTypeGuard != typeGuard ) { NotificationChain msgs = null ; if ( typeGuard != null ) msgs = ( ( InternalEObject ) typeGuard ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XCASE_PART__TYPE_GUARD , null , msgs ) ; if ( newTypeGuard != null ) msgs = ( ( InternalEObject ) newTypeGuard ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XCASE_PART__TYPE_GUARD , null , msgs ) ; msgs = basicSetTypeGuard ( newTypeGuard , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XCASE_PART__TYPE_GUARD , newTypeGuard , newTypeGuard ) ) ;
public class HBaseRequestAdapter { /** * < p > adapt . < / p > * @ param delete a { @ link org . apache . hadoop . hbase . client . Delete } object . * @ return a { @ link RowMutation } object . */ public RowMutation adapt ( Delete delete ) { } }
RowMutation rowMutation = newRowMutationModel ( delete . getRow ( ) ) ; adapt ( delete , rowMutation ) ; return rowMutation ;
public class StackTracePrinter { /** * Marked as deprecated because of the erroneous name . Call printAllStackTraces instead . * @ param filter only thread where the name of the thread contains this given { @ code filter } key are printed . If the filter is null , no filtering will be performed . * @ param logger the logger used for printing . * @ param logLevel the level to print . */ @ Deprecated public static void printAllStackTrackes ( final String filter , final Logger logger , final LogLevel logLevel ) { } }
printAllStackTraces ( filter , logger , logLevel ) ;
public class PortAllocator { /** * Check if the port is available on the host . This is racy but it ' s better than nothing . * @ param port Port number to check . * @ return True if port is available . False otherwise . */ private boolean portAvailable ( final int port ) { } }
ServerSocket socket = null ; try { socket = new ServerSocket ( port ) ; return true ; } catch ( IOException ignored ) { return false ; } finally { if ( socket != null ) { try { socket . close ( ) ; } catch ( IOException e ) { log . error ( "Couldn't close socket on port {} when checking availability: {}" , port , e ) ; } } }
public class RepositoryCache { /** * Creates a new workspace in the repository coupled with external document * store . * @ param name the name of the repository * @ param connectors connectors to the external systems . * @ return workspace cache for the new workspace . */ public WorkspaceCache createExternalWorkspace ( String name , Connectors connectors ) { } }
String [ ] tokens = name . split ( ":" ) ; String sourceName = tokens [ 0 ] ; String workspaceName = tokens [ 1 ] ; this . workspaceNames . add ( workspaceName ) ; refreshRepositoryMetadata ( true ) ; ConcurrentMap < NodeKey , CachedNode > nodeCache = cacheForWorkspace ( ) . asMap ( ) ; ExecutionContext context = context ( ) ; // the name of the external connector is used for source name and workspace name String sourceKey = NodeKey . keyForSourceName ( sourceName ) ; String workspaceKey = NodeKey . keyForWorkspaceName ( workspaceName ) ; // ask external system to determine root identifier . Connector connector = connectors . getConnectorForSourceName ( sourceName ) ; if ( connector == null ) { throw new IllegalArgumentException ( JcrI18n . connectorNotFound . text ( sourceName ) ) ; } FederatedDocumentStore documentStore = new FederatedDocumentStore ( connectors , this . documentStore ( ) . localStore ( ) ) ; String rootId = connector . getRootDocumentId ( ) ; // Compute the root key for this workspace . . . NodeKey rootKey = new NodeKey ( sourceKey , workspaceKey , rootId ) ; // We know that this workspace is not the system workspace , so find it . . . final WorkspaceCache systemWorkspaceCache = workspaceCachesByName . get ( systemWorkspaceName ) ; WorkspaceCache workspaceCache = new WorkspaceCache ( context , getKey ( ) , workspaceName , systemWorkspaceCache , documentStore , translator , rootKey , nodeCache , changeBus , repositoryEnvironment ( ) ) ; workspaceCachesByName . put ( workspaceName , workspaceCache ) ; return workspace ( workspaceName ) ;
public class ServletHttpResponse { public void resetBuffer ( ) { } }
if ( isCommitted ( ) ) throw new IllegalStateException ( "Committed" ) ; ( ( HttpOutputStream ) _httpResponse . getOutputStream ( ) ) . resetBuffer ( ) ; if ( _writer != null ) _writer . reset ( ) ;
public class DistributedMigratorRangeMonitor { /** * Claims migration range tasks that have been queued by the leader and are ready to scan . */ private List < ClaimedTask > claimMigrationRangeTasks ( int max ) { } }
try { Date claimTime = new Date ( ) ; List < ScanRangeTask > migrationRangeTasks = _workflow . claimScanRangeTasks ( max , QUEUE_CLAIM_TTL ) ; if ( migrationRangeTasks . isEmpty ( ) ) { return ImmutableList . of ( ) ; } List < ClaimedTask > newlyClaimedTasks = Lists . newArrayListWithCapacity ( migrationRangeTasks . size ( ) ) ; for ( ScanRangeTask task : migrationRangeTasks ) { final ClaimedTask claimedTask = new ClaimedTask ( task , claimTime ) ; // Record that the task is claimed locally boolean alreadyClaimed = _claimedTasks . putIfAbsent ( task . getId ( ) , claimedTask ) != null ; if ( alreadyClaimed ) { _log . warn ( "Workflow returned migration range task that is already claimed: {}" , task ) ; // Do not acknowledge the task , let it expire naturally . Eventually it should come up again // after the previous claim has been released . } else { _log . info ( "Claimed migration range task: {}" , task ) ; newlyClaimedTasks . add ( claimedTask ) ; // Schedule a follow - up to ensure the scanning service assigns it a thread // in a reasonable amount of time . _backgroundService . schedule ( new Runnable ( ) { @ Override public void run ( ) { validateClaimedTaskHasStarted ( claimedTask ) ; } } , CLAIM_START_TIMEOUT . toMillis ( ) , TimeUnit . MILLISECONDS ) ; } } return newlyClaimedTasks ; } catch ( Exception e ) { _log . error ( "Failed to start next available migration range" , e ) ; return ImmutableList . of ( ) ; }
public class ApiOvhTelephony { /** * Add an outplan notification on the billing account * REST : POST / telephony / { billingAccount } / outplanNotification * @ param percentage [ required ] The notification percentage of maximum outplan * @ param block [ required ] The blocking type of the associate lines * @ param notifyEmail [ required ] Override the nichandle email for this notification * @ param billingAccount [ required ] The name of your billingAccount */ public OvhConsumptionThreshold billingAccount_outplanNotification_POST ( String billingAccount , OvhOutplanNotificationBlockEnum block , String notifyEmail , Double percentage ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/outplanNotification" ; StringBuilder sb = path ( qPath , billingAccount ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "block" , block ) ; addBody ( o , "notifyEmail" , notifyEmail ) ; addBody ( o , "percentage" , percentage ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhConsumptionThreshold . class ) ;
public class FileOperations { /** * Delete an existing file . * @ param aFile * The file to be deleted . May not be < code > null < / code > . * @ return A non - < code > null < / code > error code . */ @ Nonnull public static FileIOError deleteFile ( @ Nonnull final File aFile ) { } }
ValueEnforcer . notNull ( aFile , "File" ) ; if ( ! FileHelper . existsFile ( aFile ) ) return EFileIOErrorCode . SOURCE_DOES_NOT_EXIST . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; // Is the parent directory writable ? final File aParentDir = aFile . getParentFile ( ) ; if ( aParentDir != null && ! aParentDir . canWrite ( ) ) return EFileIOErrorCode . SOURCE_PARENT_NOT_WRITABLE . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; try { Files . delete ( aFile . toPath ( ) ) ; return EFileIOErrorCode . NO_ERROR . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; } catch ( final NoSuchFileException ex ) { return EFileIOErrorCode . SOURCE_DOES_NOT_EXIST . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; } catch ( final DirectoryNotEmptyException ex ) { return EFileIOErrorCode . OPERATION_FAILED . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; } catch ( final IOException ex ) { return EFileIOErrorCode . IO_ERROR . getAsIOError ( EFileIOOperation . DELETE_FILE , aFile ) ; } catch ( final SecurityException ex ) { return EFileIOErrorCode . getSecurityAsIOError ( EFileIOOperation . DELETE_FILE , ex ) ; }
public class BasicChronology { /** * Get the number of weeks in the year . * @ param year the year to use * @ return number of weeks in the year */ int getWeeksInYear ( int year ) { } }
long firstWeekMillis1 = getFirstWeekOfYearMillis ( year ) ; long firstWeekMillis2 = getFirstWeekOfYearMillis ( year + 1 ) ; return ( int ) ( ( firstWeekMillis2 - firstWeekMillis1 ) / DateTimeConstants . MILLIS_PER_WEEK ) ;
public class ContextedRuntimeException { /** * Sets information helpful to a developer in diagnosing and correcting the problem . * For the information to be meaningful , the value passed should have a reasonable * toString ( ) implementation . * Any existing values with the same labels are removed before the new one is added . * Note : This exception is only serializable if the object added as value is serializable . * @ param label a textual label associated with information , { @ code null } not recommended * @ param value information needed to understand exception , may be { @ code null } * @ return { @ code this } , for method chaining , not { @ code null } */ @ Override public ContextedRuntimeException setContextValue ( final String label , final Object value ) { } }
exceptionContext . setContextValue ( label , value ) ; return this ;
public class Question { /** * 对问题进行分词 * @ return 分词结果 */ public List < String > getWords ( ) { } }
List < String > result = new ArrayList < > ( ) ; List < Word > words = WordParser . parse ( question . replace ( "?" , "" ) . replace ( "?" , "" ) ) ; for ( Word word : words ) { result . add ( word . getText ( ) ) ; } return result ;
public class BytecodeInjectReactive { /** * Add Method to ConstPool . If method was not in the ConstPool will add and return index , otherwise will return index of already existing entry of constpool */ private static int addMethod ( ConstPool cPool , CtMethod method ) { } }
// addMethodrefInfo is a safe add , if constant already present it return the existing value without adding . return cPool . addMethodrefInfo ( cPool . getThisClassInfo ( ) , method . getName ( ) , method . getSignature ( ) ) ;
public class BatchGetRepositoriesResult { /** * Returns a list of repository names for which information could not be found . * @ param repositoriesNotFound * Returns a list of repository names for which information could not be found . */ public void setRepositoriesNotFound ( java . util . Collection < String > repositoriesNotFound ) { } }
if ( repositoriesNotFound == null ) { this . repositoriesNotFound = null ; return ; } this . repositoriesNotFound = new java . util . ArrayList < String > ( repositoriesNotFound ) ;
public class CollUtil { /** * 去掉集合中的多个元素 * @ param collection 集合 * @ param elesRemoved 被去掉的元素数组 * @ return 原集合 * @ since 4.1.0 */ @ SuppressWarnings ( "unchecked" ) public static < T > Collection < T > removeAny ( Collection < T > collection , T ... elesRemoved ) { } }
collection . removeAll ( newHashSet ( elesRemoved ) ) ; return collection ;
public class ConsoleUtil { /** * 从控制台获取命令并且处理 * @ param consumer 处理控制台获取的命令 , 当输入的是exit时系统退出 */ public static void start ( Consumer < String > consumer ) { } }
try { Console console = System . console ( ) ; CustomReader reader ; if ( console != null ) { System . out . println ( "Console对象存在,使用Console对象" ) ; reader = console :: readLine ; } else { System . out . println ( "Console对象不存在,使用Reader" ) ; BufferedReader bufferedReader = new BufferedReader ( new InputStreamReader ( System . in ) ) ; reader = bufferedReader :: readLine ; } work ( reader , consumer ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; System . err . println ( "系统异常,即将退出" ) ; System . exit ( 1 ) ; }
import java . util . * ; class CheckForDuplicates { /** * This function checks whether if the provided list of integers contains any duplicate values . * > > > check _ for _ duplicates ( [ 1 , 2 , 3 , 4 , 5 ] ) * False * > > > check _ for _ duplicates ( [ 1 , 2 , 3 , 4 , 4 ] ) * True * > > > check _ for _ duplicates ( [ 1 , 1 , 2 , 2 , 3 , 3 , 4 , 4 , 5 ] ) * True * @ param inputArray A list of integers . * @ return Returns a boolean indicating presence of duplicates in the input list . */ public static boolean checkForDuplicates ( List < Integer > inputArray ) { } }
Set < Integer > uniqueElements = new HashSet < Integer > ( inputArray ) ; return inputArray . size ( ) != uniqueElements . size ( ) ;
public class StringExpression { /** * Create a { @ code this . indexOf ( str ) } expression * < p > Get the index of the given substring in this String < / p > * @ param str string * @ return this . indexOf ( str ) * @ see java . lang . String # indexOf ( String ) */ public NumberExpression < Integer > indexOf ( Expression < String > str ) { } }
return Expressions . numberOperation ( Integer . class , Ops . INDEX_OF , mixin , str ) ;
public class BatchMeterUsageResult { /** * Contains all UsageRecords processed by BatchMeterUsage . These records were either honored by AWS Marketplace * Metering Service or were invalid . * @ param results * Contains all UsageRecords processed by BatchMeterUsage . These records were either honored by AWS * Marketplace Metering Service or were invalid . */ public void setResults ( java . util . Collection < UsageRecordResult > results ) { } }
if ( results == null ) { this . results = null ; return ; } this . results = new java . util . ArrayList < UsageRecordResult > ( results ) ;
public class ClassInfoList { /** * Generate a and save a . dot file , which can be fed into GraphViz for layout and visualization of the class * graph . * Methods , fields and annotations are shown if enabled , via { @ link ClassGraph # enableMethodInfo ( ) } , * { @ link ClassGraph # enableFieldInfo ( ) } and { @ link ClassGraph # enableAnnotationInfo ( ) } . * Only public classes , methods , and fields are shown , unless { @ link ClassGraph # ignoreClassVisibility ( ) } , * { @ link ClassGraph # ignoreMethodVisibility ( ) } , and / or { @ link ClassGraph # ignoreFieldVisibility ( ) } has / have been * called . * @ param file * the file to save the GraphViz . dot file to . * @ throws IOException * if the file could not be saved . * @ throws IllegalArgumentException * if this { @ link ClassInfoList } is empty or { @ link ClassGraph # enableClassInfo ( ) } was not called * before scanning ( since there would be nothing to graph ) . */ public void generateGraphVizDotFile ( final File file ) throws IOException { } }
try ( PrintWriter writer = new PrintWriter ( file ) ) { writer . print ( generateGraphVizDotFile ( ) ) ; }
public class SemanticAPI { /** * 提交语音 * @ param accessToken 接口调用凭证 * @ param voiceId 语音唯一标识 * @ param uri 文件格式 只支持mp3,16k , 单声道 , 最大1M * @ return BaseResult * @ since 2.8.22 */ public static BaseResult addvoicetorecofortext ( String accessToken , String voiceId , URI uri ) { } }
return addvoicetorecofortext ( accessToken , voiceId , null , uri ) ;
public class PropertyAccessor { /** * Sets a value of the * < a href = " http : / / docs . oracle . com / javase / tutorial / javabeans / index . html " target = " _ blank " > JavaBeans < / a > property . * Examples : * < pre > * / / import static { @ link org . fest . reflect . core . Reflection # property ( String ) org . fest . reflect . core . Reflection . property } ; * / / Equivalent to " String name = person . getName ( ) " * String name = { @ link org . fest . reflect . core . Reflection # property ( String ) property } ( " name " ) . { @ link org . fest . reflect . beanproperty . PropertyName # ofType ( Class ) ofType } ( String . class ) . { @ link org . fest . reflect . beanproperty . PropertyType # in ( Object ) in } ( person ) . { @ link org . fest . reflect . beanproperty . PropertyAccessor # get ( ) get } ( ) ; * / / Equivalent to " person . setName ( " Yoda " ) " * { @ link org . fest . reflect . core . Reflection # property ( String ) property } ( " name " ) . { @ link org . fest . reflect . beanproperty . PropertyName # ofType ( Class ) ofType } ( String . class ) . { @ link org . fest . reflect . beanproperty . PropertyType # in ( Object ) in } ( person ) . { @ link org . fest . reflect . beanproperty . PropertyAccessor # set ( Object ) set } ( " Yoda " ) ; * / / Equivalent to " List & lt ; String & gt ; powers = jedi . getPowers ( ) " * List & lt ; String & gt ; powers = { @ link org . fest . reflect . core . Reflection # property ( String ) property } ( " powers " ) . { @ link org . fest . reflect . beanproperty . PropertyName # ofType ( org . fest . reflect . reference . TypeRef ) ofType } ( new { @ link org . fest . reflect . reference . TypeRef TypeRef } & lt ; List & lt ; String & gt ; & gt ; ( ) { } ) . { @ link org . fest . reflect . beanproperty . PropertyTypeRef # in ( Object ) in } ( jedi ) . { @ link org . fest . reflect . beanproperty . PropertyAccessor # get ( ) get } ( ) ; * / / Equivalent to " jedi . setPowers ( powers ) " * List & lt ; String & gt ; powers = new ArrayList & lt ; String & gt ; ( ) ; * powers . add ( " heal " ) ; * { @ link org . fest . reflect . core . Reflection # property ( String ) property } ( " powers " ) . { @ link org . fest . reflect . beanproperty . PropertyName # ofType ( org . fest . reflect . reference . TypeRef ) ofType } ( new { @ link org . fest . reflect . reference . TypeRef TypeRef } & lt ; List & lt ; String & gt ; & gt ; ( ) { } ) . { @ link org . fest . reflect . beanproperty . PropertyTypeRef # in ( Object ) in } ( jedi ) . { @ link org . fest . reflect . beanproperty . PropertyAccessor # set ( Object ) set } ( powers ) ; * < / pre > * @ param value the value to set . * @ throws ReflectionError if the given value cannot be set . */ public void set ( @ Nullable T value ) { } }
try { descriptor . getWriteMethod ( ) . invoke ( target , value ) ; } catch ( Exception e ) { String format = "Failed to set value %s in property '%s'" ; String msg = String . format ( format , String . valueOf ( value ) , descriptor . getName ( ) ) ; throw new ReflectionError ( msg , e ) ; }
public class MetricsSystem { /** * Converts a simple string to a qualified metric name based on the process type . * @ param name the name of the metric * @ return the metric with instance and id tags */ public static String getMetricName ( String name ) { } }
switch ( CommonUtils . PROCESS_TYPE . get ( ) ) { case CLIENT : return getClientMetricName ( name ) ; case MASTER : return getMasterMetricName ( name ) ; case PROXY : return getProxyMetricName ( name ) ; case WORKER : return getWorkerMetricName ( name ) ; case JOB_MASTER : return getJobMasterMetricName ( name ) ; case JOB_WORKER : return getJobWorkerMetricName ( name ) ; default : throw new IllegalStateException ( "Unknown process type" ) ; }
public class JavaClasspathParser { /** * Reads and decode an XML classpath string . Returns a two - dimensional array , where the number of elements in the row is fixed to 2 . The first * element is an array of raw classpath entries and the second element is an array of referenced entries that may have been stored by the client * earlier . See { @ link IJavaProject # getReferencedClasspathEntries ( ) } for more details . * @ param projectName * - the name of project containing the . classpath file * @ param projectRootAbsoluteFullPath * - the path to project containing the . classpath file * @ param xmlClasspath * - path to the XML * @ param unknownElements * - map of unknow elements * @ return the set of CLasspath ENtries extracted from the . classpath * @ throws IOException * - exception during parsing of . classpath * @ throws ClasspathEntry . AssertionFailedException * - exception during parsing of . classpath */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) public static IClasspathEntry [ ] [ ] decodeClasspath ( String projectName , IPath projectRootAbsoluteFullPath , String xmlClasspath , Map < IPath , UnknownXmlElements > unknownElements ) throws IOException , ClasspathEntry . AssertionFailedException { } }
final List < IClasspathEntry > paths = new ArrayList < > ( ) ; IClasspathEntry defaultOutput = null ; final Element cpElement ; try ( StringReader reader = new StringReader ( xmlClasspath ) ; ) { final DocumentBuilder parser = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; cpElement = parser . parse ( new InputSource ( reader ) ) . getDocumentElement ( ) ; } catch ( SAXException e ) { throw new IOException ( Messages . file_badFormat ) ; } catch ( ParserConfigurationException e ) { throw new IOException ( Messages . file_badFormat ) ; } if ( ! cpElement . getNodeName ( ) . equalsIgnoreCase ( "classpath" ) ) { // $ NON - NLS - 1 $ throw new IOException ( Messages . file_badFormat ) ; } NodeList list = cpElement . getElementsByTagName ( ClasspathEntry . TAG_CLASSPATHENTRY ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; ++ i ) { final Node node = list . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { final IClasspathEntry entry = elementDecode ( ( Element ) node , projectName , projectRootAbsoluteFullPath , unknownElements ) ; if ( entry != null ) { if ( entry . getContentKind ( ) == ClasspathEntry . K_OUTPUT ) { // separate output defaultOutput = entry ; } else { paths . add ( entry ) ; } } } } final int pathSize = paths . size ( ) ; final IClasspathEntry [ ] [ ] entries = new IClasspathEntry [ 2 ] [ ] ; entries [ 0 ] = new IClasspathEntry [ pathSize + ( defaultOutput == null ? 0 : 1 ) ] ; paths . toArray ( entries [ 0 ] ) ; if ( defaultOutput != null ) { // ensure output is last item entries [ 0 ] [ pathSize ] = defaultOutput ; } paths . clear ( ) ; list = cpElement . getElementsByTagName ( ClasspathEntry . TAG_REFERENCED_ENTRY ) ; length = list . getLength ( ) ; for ( int i = 0 ; i < length ; ++ i ) { final Node node = list . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { final IClasspathEntry entry = elementDecode ( ( Element ) node , projectName , projectRootAbsoluteFullPath , unknownElements ) ; if ( entry != null ) { paths . add ( entry ) ; } } } entries [ 1 ] = new IClasspathEntry [ paths . size ( ) ] ; paths . toArray ( entries [ 1 ] ) ; return entries ;
public class ProxyOverrider { /** * Deactivates all proxy overrides restoring the pre - existing proxy settings if any . */ public void deactivateAll ( ) { } }
for ( String scheme : new String [ ] { "http" , "https" } ) { InetSocketAddress originalProxy = originalProxies . remove ( scheme ) ; if ( originalProxy != null ) { System . setProperty ( scheme + ".proxyHost" , originalProxy . getHostName ( ) ) ; System . setProperty ( scheme + ".proxyPort" , Integer . toString ( originalProxy . getPort ( ) ) ) ; } else { System . clearProperty ( scheme + ".proxyHost" ) ; System . clearProperty ( scheme + ".proxyPort" ) ; } } if ( originalNonProxyHosts . isEmpty ( ) ) { System . clearProperty ( "http.nonProxyHosts" ) ; } else { System . setProperty ( "http.nonProxyHosts" , Joiner . on ( '|' ) . join ( originalNonProxyHosts ) ) ; } originalNonProxyHosts . clear ( ) ;
public class Parser { /** * A { @ link Parser } that runs { @ code this } 1 or more times separated by { @ code delim } . * < p > The return values are collected in a { @ link List } . */ public final Parser < List < T > > sepBy1 ( Parser < ? > delim ) { } }
final Parser < T > afterFirst = delim . asDelimiter ( ) . next ( this ) ; return next ( ( Function < T , Parser < List < T > > > ) firstValue -> new RepeatAtLeastParser < T > ( afterFirst , 0 , ListFactory . arrayListFactoryWithFirstElement ( firstValue ) ) ) ;
public class SetDirtyOnChangeHandler { /** * Constructor . * @ param field The basefield owner of this listener ( usually null and set on setOwner ( ) ) . * @ param fldTarget The field to set to modified if this field changes . * @ param bIfNewRecord Only set to dirty if the target field ' s record is new . * @ param bIfCurrentRecord Only set to dirty if the target field ' s record is current . */ public void init ( BaseField field , BaseField fldTarget , boolean bIfNewRecord , boolean bIfCurrentRecord ) { } }
m_fldTarget = fldTarget ; m_bIfNewRecord = bIfNewRecord ; m_bIfCurrentRecord = bIfCurrentRecord ; super . init ( field ) ; m_bScreenMove = true ; // Only respond to user change m_bInitMove = false ; m_bReadMove = false ;
public class CdnClient { /** * Get detailed information of a domain . * @ param request The request containing all of the options related to the domain . * @ return getDomainConfig of the getDomainConfig operation returned by the service . */ public GetDomainConfigResponse getDomainConfig ( GetDomainConfigRequest request ) { } }
checkNotNull ( request , "The parameter request should NOT be null." ) ; InternalRequest internalRequest = createRequest ( request , HttpMethodName . GET , DOMAIN , request . getDomain ( ) , "config" ) ; return invokeHttpClient ( internalRequest , GetDomainConfigResponse . class ) ;
public class PluginGroup { /** * Stops the { @ link Plugin } s managed by this { @ link PluginGroup } . */ CompletableFuture < Void > stop ( CentralDogmaConfig config , ProjectManager projectManager , CommandExecutor commandExecutor , MeterRegistry meterRegistry ) { } }
return startStop . stop ( new PluginContext ( config , projectManager , commandExecutor , meterRegistry ) ) ;
public class CallCenterApp { /** * Core benchmark code . * Connect . Initialize . Run the loop . Cleanup . Print Results . * @ throws InterruptedException * @ throws IOException * @ throws NoConnectionsException * @ throws ProcCallException */ public void run ( ) throws InterruptedException , NoConnectionsException , IOException , ProcCallException { } }
System . out . print ( HORIZONTAL_RULE ) ; System . out . println ( " Setup & Initialization" ) ; System . out . println ( HORIZONTAL_RULE ) ; // connect to one or more servers , loop until success connect ( config . servers ) ; System . out . print ( HORIZONTAL_RULE ) ; System . out . println ( " Starting Benchmark" ) ; System . out . println ( HORIZONTAL_RULE ) ; // Run the benchmark loop for the requested warmup time // The throughput may be throttled depending on client configuration System . out . println ( "Warming up..." ) ; final long warmupEndTime = System . currentTimeMillis ( ) + ( 1000l * config . warmup ) ; long now = System . currentTimeMillis ( ) ; while ( warmupEndTime > now ) { CallEvent call = networkTransformer . next ( now ) ; if ( call == null ) { try { Thread . sleep ( 1 ) ; } catch ( InterruptedException e ) { } } else { sendEvent ( call ) ; } now = System . currentTimeMillis ( ) ; } // reset the stats after warmup fullStatsContext . fetchAndResetBaseline ( ) ; periodicStatsContext . fetchAndResetBaseline ( ) ; // print periodic statistics to the console benchmarkStartTS = System . currentTimeMillis ( ) ; schedulePeriodicStats ( ) ; // Run the benchmark loop for the requested duration // The throughput may be throttled depending on client configuration System . out . println ( "\nRunning benchmark..." ) ; final long benchmarkEndTime = System . currentTimeMillis ( ) + ( 1000l * config . duration ) ; now = System . currentTimeMillis ( ) ; while ( benchmarkEndTime > now ) { CallEvent call = networkTransformer . next ( now ) ; if ( call == null ) { try { Thread . sleep ( 1 ) ; } catch ( InterruptedException e ) { } } else { sendEvent ( call ) ; } now = System . currentTimeMillis ( ) ; } // cancel periodic stats printing timer . cancel ( ) ; // drain any messages that are waiting to be sent immediately CallEvent call = null ; while ( ( call = networkTransformer . drain ( ) ) != null ) { sendEvent ( call ) ; } // block until all outstanding txns return client . drain ( ) ; // print out some debugging stats callSimulator . printSummary ( ) ; // print the summary results printResults ( ) ; // close down the client connections client . close ( ) ;
public class AbstractManagedType { /** * ( non - Javadoc ) * @ see javax . persistence . metamodel . ManagedType # getMap ( java . lang . String , * java . lang . Class , java . lang . Class ) */ @ Override public < K , V > MapAttribute < ? super X , K , V > getMap ( String paramName , Class < K > keyClazz , Class < V > valueClazz ) { } }
PluralAttribute < X , ? , ? > declaredAttrib = getDeclaredPluralAttribute ( paramName ) ; if ( onCheckMapAttribute ( declaredAttrib , valueClazz ) ) { if ( valueClazz != null && valueClazz . equals ( ( ( MapAttribute < X , K , V > ) declaredAttrib ) . getKeyJavaType ( ) ) ) { return ( MapAttribute < X , K , V > ) declaredAttrib ; } } PluralAttribute < ? super X , ? , ? > superAttrib = getPluralAttriute ( paramName ) ; if ( onCheckMapAttribute ( superAttrib , valueClazz ) ) { if ( valueClazz != null && valueClazz . equals ( ( ( MapAttribute < ? super X , K , V > ) superAttrib ) . getKeyJavaType ( ) ) ) { return ( MapAttribute < ? super X , K , V > ) superAttrib ; } } throw new IllegalArgumentException ( "attribute of the given name and type is not present in the managed MapAttribute type, for name:" + paramName + " , value type:" + valueClazz + "key tpye:" + keyClazz ) ;
public class MicroServiceTemplateSupport { /** * sql */ public Map getSingleInfoService ( String sql ) throws Exception { } }
/* String realSql = sql + " limit 1 " ; List retList = getInfoListAllServiceInnerExBySql ( realSql , null ) ; if ( retList = = null | | retList . size ( ) < = 0 ) { return null ; return ( Map ) retList . get ( 0 ) ; */ // for oracle String realSql = sql ; Map retMap = getInnerDao ( ) . querySingleObjJoinByCondition ( realSql ) ; CheckModelTypeUtil . addMetaCols ( retMap ) ; CheckModelTypeUtil . changeNoStrCols ( retMap ) ; return retMap ;
public class NamespaceRegistryImpl { /** * Registers all the remote commands */ private void initRemoteCommands ( final RepositoryEntry config ) { } }
this . id = UUID . randomUUID ( ) . toString ( ) ; registerNamespace = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.core.NamespaceRegistryImpl-registerNamespace-" + config . getName ( ) ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { if ( ! id . equals ( args [ 0 ] ) ) { try { registerNamespace ( ( String ) args [ 1 ] , ( String ) args [ 2 ] , false ) ; } catch ( Exception e ) { LOG . warn ( "Could not register the namespace on other cluster nodes" , e ) ; } } return true ; } } ) ; unregisterNamespace = rpcService . registerCommand ( new RemoteCommand ( ) { public String getId ( ) { return "org.exoplatform.services.jcr.impl.core.NamespaceRegistryImpl-unregisterNamespace-" + config . getName ( ) ; } public Serializable execute ( Serializable [ ] args ) throws Throwable { if ( ! id . equals ( args [ 0 ] ) ) { try { unregisterNamespace ( ( String ) args [ 1 ] , false ) ; } catch ( Exception e ) { LOG . warn ( "Could not unregister the namespace on other cluster nodes" , e ) ; } } return true ; } } ) ;
public class OptimizerNode { protected void prunePlanAlternatives ( List < PlanNode > plans ) { } }
if ( plans . isEmpty ( ) ) { throw new CompilerException ( "No plan meeting the requirements could be created @ " + this + ". Most likely reason: Too restrictive plan hints." ) ; } // shortcut for the simple case if ( plans . size ( ) == 1 ) { return ; } // we can only compare plan candidates that made equal choices // at the branching points . for each choice at a branching point , // we need to keep the cheapest ( wrt . interesting properties ) . // if we do not keep candidates for each branch choice , we might not // find branch compatible candidates when joining the branches back . // for pruning , we are quasi AFTER the node , so in the presence of // branches , we need form the per - branch - choice groups by the choice // they made at the latest un - joined branching node . Note that this is // different from the check for branch compatibility of candidates , as // this happens on the input sub - plans and hence BEFORE the node ( therefore // it is relevant to find the latest ( partially ) joined branch point . if ( this . openBranches == null || this . openBranches . isEmpty ( ) ) { prunePlanAlternativesWithCommonBranching ( plans ) ; } else { // partition the candidates into groups that made the same sub - plan candidate // choice at the latest unclosed branch point final OptimizerNode [ ] branchDeterminers = new OptimizerNode [ this . openBranches . size ( ) ] ; for ( int i = 0 ; i < branchDeterminers . length ; i ++ ) { branchDeterminers [ i ] = this . openBranches . get ( this . openBranches . size ( ) - 1 - i ) . getBranchingNode ( ) ; } // this sorter sorts by the candidate choice at the branch point Comparator < PlanNode > sorter = new Comparator < PlanNode > ( ) { @ Override public int compare ( PlanNode o1 , PlanNode o2 ) { for ( OptimizerNode branchDeterminer : branchDeterminers ) { PlanNode n1 = o1 . getCandidateAtBranchPoint ( branchDeterminer ) ; PlanNode n2 = o2 . getCandidateAtBranchPoint ( branchDeterminer ) ; int hash1 = System . identityHashCode ( n1 ) ; int hash2 = System . identityHashCode ( n2 ) ; if ( hash1 != hash2 ) { return hash1 - hash2 ; } } return 0 ; } } ; Collections . sort ( plans , sorter ) ; List < PlanNode > result = new ArrayList < PlanNode > ( ) ; List < PlanNode > turn = new ArrayList < PlanNode > ( ) ; final PlanNode [ ] determinerChoice = new PlanNode [ branchDeterminers . length ] ; while ( ! plans . isEmpty ( ) ) { // take one as the determiner turn . clear ( ) ; PlanNode determiner = plans . remove ( plans . size ( ) - 1 ) ; turn . add ( determiner ) ; for ( int i = 0 ; i < determinerChoice . length ; i ++ ) { determinerChoice [ i ] = determiner . getCandidateAtBranchPoint ( branchDeterminers [ i ] ) ; } // go backwards through the plans and find all that are equal boolean stillEqual = true ; for ( int k = plans . size ( ) - 1 ; k >= 0 && stillEqual ; k -- ) { PlanNode toCheck = plans . get ( k ) ; for ( int i = 0 ; i < branchDeterminers . length ; i ++ ) { PlanNode checkerChoice = toCheck . getCandidateAtBranchPoint ( branchDeterminers [ i ] ) ; if ( checkerChoice != determinerChoice [ i ] ) { // not the same anymore stillEqual = false ; break ; } } if ( stillEqual ) { // the same plans . remove ( k ) ; turn . add ( toCheck ) ; } } // now that we have only plans with the same branch alternatives , prune ! if ( turn . size ( ) > 1 ) { prunePlanAlternativesWithCommonBranching ( turn ) ; } result . addAll ( turn ) ; } // after all turns are complete plans . clear ( ) ; plans . addAll ( result ) ; }
public class UserAttrs { /** * Set user - defined - attribute * @ param path * @ param attribute user : attribute name . user : can be omitted . * @ param value * @ param options * @ throws IOException */ public static final void setDoubleAttribute ( Path path , String attribute , double value , LinkOption ... options ) throws IOException { } }
attribute = attribute . startsWith ( "user:" ) ? attribute : "user:" + attribute ; Files . setAttribute ( path , attribute , Primitives . writeDouble ( value ) , options ) ;
public class Postconditions { /** * < p > Evaluate the given { @ code predicate } using { @ code value } as input . < / p > * < p > The function throws { @ link PostconditionViolationException } if the * predicate is false . < / p > * @ param value The value * @ param predicate The predicate * @ param describer A describer for the predicate * @ param < T > The type of values * @ return value * @ throws PostconditionViolationException If the predicate is false */ public static < T > T checkPostcondition ( final T value , final Predicate < T > predicate , final Function < T , String > describer ) { } }
final boolean ok ; try { ok = predicate . test ( value ) ; } catch ( final Throwable e ) { throw failed ( e , value , singleViolation ( failedPredicate ( e ) ) ) ; } return innerCheck ( value , ok , describer ) ;
public class AbstractFedoraBinary { /** * ( non - Javadoc ) * @ see org . fcrepo . kernel . api . models . FedoraBinary # getContentDigest ( ) */ @ Override public URI getContentDigest ( ) { } }
LOGGER . debug ( "getContentDigest getting digest info" ) ; try { // Determine which digest algorithm to use final String algorithm = hasDescriptionProperty ( DEFAULT_DIGEST_ALGORITHM ) ? property2values . apply ( getDescriptionProperty ( DEFAULT_DIGEST_ALGORITHM ) ) . findFirst ( ) . get ( ) . getString ( ) : ContentDigest . DEFAULT_ALGORITHM ; final String algorithmWithoutStringType = algorithm . replace ( FIELD_DELIMITER + XSDstring . getURI ( ) , "" ) ; if ( hasDescriptionProperty ( CONTENT_DIGEST ) ) { // Select the stored digest that matches the digest algorithm final Optional < Value > digestValue = property2values . apply ( getDescriptionProperty ( CONTENT_DIGEST ) ) . filter ( digest -> { try { final URI digestUri = URI . create ( digest . getString ( ) ) ; return algorithmWithoutStringType . equalsIgnoreCase ( ContentDigest . getAlgorithm ( digestUri ) ) ; } catch ( final RepositoryException e ) { LOGGER . warn ( "Exception thrown when getting digest property {}, {}" , digest , e . getMessage ( ) ) ; return false ; } } ) . findFirst ( ) ; // Success , return the digest value if ( digestValue . isPresent ( ) ) { return URI . create ( digestValue . get ( ) . getString ( ) ) ; } } LOGGER . warn ( "No digest value was found to match the algorithm: {}" , algorithmWithoutStringType ) ; } catch ( final RepositoryException e ) { LOGGER . warn ( "Could not get content digest: {}" , e . getMessage ( ) ) ; } return ContentDigest . missingChecksum ( ) ;
public class VisualizeAssociationScoreApp { /** * Extracts image information and then passes that info onto scorePanel for display . Data is not * recycled to avoid threading issues . */ private void processImage ( ) { } }
final List < Point2D_F64 > leftPts = new ArrayList < > ( ) ; final List < Point2D_F64 > rightPts = new ArrayList < > ( ) ; final List < TupleDesc > leftDesc = new ArrayList < > ( ) ; final List < TupleDesc > rightDesc = new ArrayList < > ( ) ; final ProgressMonitor progressMonitor = new ProgressMonitor ( this , "Compute Feature Information" , "" , 0 , 4 ) ; extractImageFeatures ( progressMonitor , 0 , imageLeft , leftDesc , leftPts ) ; extractImageFeatures ( progressMonitor , 2 , imageRight , rightDesc , rightPts ) ; SwingUtilities . invokeLater ( new Runnable ( ) { public void run ( ) { progressMonitor . close ( ) ; scorePanel . setScorer ( controlPanel . getSelected ( ) ) ; scorePanel . setLocation ( leftPts , rightPts , leftDesc , rightDesc ) ; repaint ( ) ; } } ) ;
public class RSAUtils { /** * 私钥签名 * @ param data * @ param privateKey * @ return * @ throws Exception */ public static String encryptByPrivateKey ( String data , String privateKey ) throws Exception { } }
PKCS8EncodedKeySpec priPKCS8 = new PKCS8EncodedKeySpec ( base64 . decode ( privateKey ) ) ; KeyFactory keyf = KeyFactory . getInstance ( "RSA" ) ; PrivateKey priKey = keyf . generatePrivate ( priPKCS8 ) ; java . security . Signature signature = java . security . Signature . getInstance ( "SHA256WithRSA" ) ; signature . initSign ( priKey ) ; signature . update ( data . getBytes ( "UTf-8" ) ) ; byte [ ] signed = signature . sign ( ) ; return base64 . encodeToString ( signed ) ;
public class UtlProperties { /** * < p > Evaluate string set properties * from string with comma delimeter * and removed new lines and trailing spaces . < / p > * @ param pSource string * @ return LinkedHashSet < String > properties set */ public final LinkedHashSet < String > evalPropsStringsSet ( final String pSource ) { } }
String sourceCorr = pSource . replace ( "\n" , "" ) ; LinkedHashSet < String > resultSet = new LinkedHashSet < String > ( ) ; for ( String str : sourceCorr . split ( "," ) ) { resultSet . add ( str . trim ( ) ) ; } return resultSet ;
public class StaticCATProducer { /** * Calls the internal send method that will send a message and inform the * client as to the outcome . * @ param request * @ param conversation * @ param requestNumber * @ param allocatedFromBufferPool * @ param partOfExchange */ static void rcvSendSessMsg ( CommsServerByteBuffer request , Conversation conversation , int requestNumber , boolean allocatedFromBufferPool , boolean partOfExchange ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "rcvSendSessMsg" ) ; final boolean optimizedTx = CommsUtils . requiresOptimizedTransaction ( conversation ) ; sendSessMsg ( request , conversation , requestNumber , partOfExchange , allocatedFromBufferPool , true , optimizedTx ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "rcvSendSessMsg" ) ;
public class OrderedFuture { /** * Adds a new ordered future . */ private CompletableFuture < T > orderedFuture ( ) { } }
if ( ! complete ) { synchronized ( orderedFutures ) { if ( ! complete ) { CompletableFuture < T > future = new CompletableFuture < > ( ) ; orderedFutures . add ( future ) ; return future ; } } } // Completed if ( error == null ) { return CompletableFuture . completedFuture ( result ) ; } else { return Futures . exceptionalFuture ( error ) ; }
public class ScheduledInstanceRecurrenceRequest { /** * The days . For a monthly schedule , this is one or more days of the month ( 1-31 ) . For a weekly schedule , this is * one or more days of the week ( 1-7 , where 1 is Sunday ) . You can ' t specify this value with a daily schedule . If the * occurrence is relative to the end of the month , you can specify only a single day . * @ return The days . For a monthly schedule , this is one or more days of the month ( 1-31 ) . For a weekly schedule , * this is one or more days of the week ( 1-7 , where 1 is Sunday ) . You can ' t specify this value with a daily * schedule . If the occurrence is relative to the end of the month , you can specify only a single day . */ public java . util . List < Integer > getOccurrenceDays ( ) { } }
if ( occurrenceDays == null ) { occurrenceDays = new com . amazonaws . internal . SdkInternalList < Integer > ( ) ; } return occurrenceDays ;
public class Ix { /** * Maps each element from this sequence into subsequent Iterable sequences whose elements are * concatenated in order . * Note that flatMap and concatMap operations are the same in the Iterable world . * The result ' s iterator ( ) forwards the call remove ( ) to the current inner Iterator . * @ param < R > the result value type * @ param mapper the function * @ return the new Ix instance * @ throws NullPointerException if mapper is null * @ since 1.0 * @ see # flatMap ( IxFunction ) */ public final < R > Ix < R > concatMap ( IxFunction < ? super T , ? extends Iterable < ? extends R > > mapper ) { } }
return new IxFlattenIterable < T , R > ( this , nullCheck ( mapper , "mapper is null" ) ) ;
public class IterUtil { /** * 将Entry集合转换为HashMap * @ param < K > 键类型 * @ param < V > 值类型 * @ param entryIter entry集合 * @ return Map */ public static < K , V > HashMap < K , V > toMap ( Iterable < Entry < K , V > > entryIter ) { } }
final HashMap < K , V > map = new HashMap < K , V > ( ) ; if ( isNotEmpty ( entryIter ) ) { for ( Entry < K , V > entry : entryIter ) { map . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return map ;
public class SortedProperties { /** * Overriden to be able to write properties sorted by keys to the disk * @ see java . util . Hashtable # keys ( ) */ @ SuppressWarnings ( "unchecked" ) @ Override public synchronized Enumeration < Object > keys ( ) { } }
// sort elements based on detector ( prop key ) names Set < ? > set = keySet ( ) ; return ( Enumeration < Object > ) sortKeys ( ( Set < String > ) set ) ;
public class QuickSelect { /** * Sorts the array such that the values in the array up to and including * ' k ' are sorted the least to greatest . This implies that the array * itself is modified . For convenience the ' k ' element is returned . * @ param data The unsorted list * @ param k The element of the sorted list that is to be found * @ param maxIndex Only element up to this value are considered * @ return the ' k ' th largest element */ public static Comparable select ( Comparable [ ] data , int k , int maxIndex ) { } }
int i , j , mid ; int n = maxIndex ; Comparable a ; int l = 0 ; int ir = n - 1 ; Comparable temp ; for ( ; ; ) { if ( ir <= l + 1 ) { if ( ir == l + 1 && data [ ir ] . compareTo ( data [ l ] ) < 0 ) { temp = data [ l ] ; data [ l ] = data [ ir ] ; data [ ir ] = temp ; } return data [ k ] ; } else { mid = ( l + ir ) >> 1 ; int lp1 = l + 1 ; temp = data [ mid ] ; data [ mid ] = data [ lp1 ] ; data [ lp1 ] = temp ; if ( data [ l ] . compareTo ( data [ ir ] ) > 0 ) { temp = data [ l ] ; data [ l ] = data [ ir ] ; data [ ir ] = temp ; } if ( data [ lp1 ] . compareTo ( data [ ir ] ) > 0 ) { temp = data [ lp1 ] ; data [ lp1 ] = data [ ir ] ; data [ ir ] = temp ; } if ( data [ l ] . compareTo ( data [ lp1 ] ) > 0 ) { temp = data [ lp1 ] ; data [ lp1 ] = data [ l ] ; data [ l ] = temp ; } i = lp1 ; j = ir ; a = data [ lp1 ] ; for ( ; ; ) { do i ++ ; while ( data [ i ] . compareTo ( a ) < 0 ) ; do j -- ; while ( data [ j ] . compareTo ( a ) > 0 ) ; if ( j < i ) break ; temp = data [ i ] ; data [ i ] = data [ j ] ; data [ j ] = temp ; } data [ lp1 ] = data [ j ] ; data [ j ] = a ; if ( j >= k ) ir = j - 1 ; if ( j <= k ) l = i ; } }
public class DescribeConversionTasksRequest { /** * The conversion task IDs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setConversionTaskIds ( java . util . Collection ) } or { @ link # withConversionTaskIds ( java . util . Collection ) } if * you want to override the existing values . * @ param conversionTaskIds * The conversion task IDs . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeConversionTasksRequest withConversionTaskIds ( String ... conversionTaskIds ) { } }
if ( this . conversionTaskIds == null ) { setConversionTaskIds ( new com . amazonaws . internal . SdkInternalList < String > ( conversionTaskIds . length ) ) ; } for ( String ele : conversionTaskIds ) { this . conversionTaskIds . add ( ele ) ; } return this ;
public class EnumSetFlagger { /** * Returns bit flag constructed from EnumSet . Flags bit position is according * to Enum ordinal . * < p > Throws IllegalArgumentException if flag overflows * @ param < E > Enum type * @ param eSet * @ return */ public static < E extends Enum < E > > int getFlag ( EnumSet < E > eSet ) { } }
int flag = 0 ; for ( Enum < E > en : eSet ) { int ordinal = en . ordinal ( ) ; if ( ordinal >= Integer . SIZE ) { throw new IllegalArgumentException ( eSet + " contains too many enums for int" ) ; } flag |= 1 << ordinal ; } return flag ;
public class DatabaseInformationFull { /** * ROLE _ AUTHORIZATION _ DESCRIPTORS < p > * < b > Function < / b > < p > * Contains a representation of the role authorization descriptors . < p > * < b > Definition < / b > * < pre class = " SqlCodeExample " > * CREATE TABLE ROLE _ AUTHORIZATION _ DESCRIPTORS ( * ROLE _ NAME INFORMATION _ SCHEMA . SQL _ IDENTIFIER , * GRANTEE INFORMATION _ SCHEMA . SQL _ IDENTIFIER , * GRANTOR INFORMATION _ SCHEMA . SQL _ IDENTIFIER , * IS _ GRANTABLE INFORMATION _ SCHEMA . CHARACTER _ DATA * CONSTRAINT ROLE _ AUTHORIZATION _ DESCRIPTORS _ IS _ GRANTABLE _ CHECK * CHECK ( IS _ GRANTABLE IN * ( ' YES ' , ' NO ' ) ) , * CONSTRAINT ROLE _ AUTHORIZATION _ DESCRIPTORS _ PRIMARY _ KEY * PRIMARY KEY ( ROLE _ NAME , GRANTEE ) , * CONSTRAINT ROLE _ AUTHORIZATION _ DESCRIPTORS _ CHECK _ ROLE _ NAME * CHECK ( ROLE _ NAME IN * ( SELECT AUTHORIZATION _ NAME * FROM AUTHORIZATIONS * WHERE AUTHORIZATION _ TYPE = ' ROLE ' ) ) , * CONSTRAINT ROLE _ AUTHORIZATION _ DESCRIPTORS _ FOREIGN _ KEY _ AUTHORIZATIONS _ GRANTOR * FOREIGN KEY ( GRANTOR ) * REFERENCES AUTHORIZATIONS , * CONSTRAINT ROLE _ AUTHORIZATION _ DESCRIPTORS _ FOREIGN _ KEY _ AUTHORIZATIONS _ GRANTEE * FOREIGN KEY ( GRANTEE ) * REFERENCES AUTHORIZATIONS * < / pre > * < b > Description < / b > < p > * < ol > * < li > The value of ROLE _ NAME is the & lt ; role name & gt ; of some * & lt ; role granted & gt ; by the & lt ; grant role statement & gt ; or * the & lt ; role name & gt ; of a & lt ; role definition & gt ; . < p > * < li > The value of GRANTEE is an & lt ; authorization identifier & gt ; , * possibly PUBLIC , or & lt ; role name & gt ; specified as a * & lt ; grantee & gt ; contained in a & lt ; grant role statement & gt ; , * or the & lt ; authorization identifier & gt ; of the current * SQLsession when the & lt ; role definition & gt ; is executed . < p > * < li > The value of GRANTOR is the & lt ; authorization identifier & gt ; * of the user or role who granted the role identified by * ROLE _ NAME to the user or role identified by the value of * GRANTEE . < p > * < li > The values of IS _ GRANTABLE have the following meanings : < p > * < table border cellpadding = " 3 " > * < tr > * < td nowrap > YES < / td > * < td nowrap > The described role is grantable . < / td > * < tr > * < tr > * < td nowrap > NO < / td > * < td nowrap > The described role is not grantable . < / td > * < tr > * < / table > < p > * < / ol > * @ return Table */ Table ROLE_AUTHORIZATION_DESCRIPTORS ( ) { } }
Table t = sysTables [ ROLE_AUTHORIZATION_DESCRIPTORS ] ; if ( t == null ) { t = createBlankTable ( sysTableHsqlNames [ ROLE_AUTHORIZATION_DESCRIPTORS ] ) ; addColumn ( t , "ROLE_NAME" , SQL_IDENTIFIER ) ; // not null addColumn ( t , "GRANTEE" , SQL_IDENTIFIER ) ; // not null addColumn ( t , "GRANTOR" , SQL_IDENTIFIER ) ; // not null addColumn ( t , "IS_GRANTABLE" , YES_OR_NO ) ; // not null // true PK HsqlName name = HsqlNameManager . newInfoSchemaObjectName ( sysTableHsqlNames [ ROLE_AUTHORIZATION_DESCRIPTORS ] . name , false , SchemaObject . INDEX ) ; t . createPrimaryKey ( name , new int [ ] { 0 , 1 } , true ) ; return t ; } PersistentStore store = database . persistentStoreCollection . getStore ( t ) ; // Intermediate holders String grantorName = SqlInvariants . SYSTEM_AUTHORIZATION_NAME ; Iterator grantees ; Grantee granteeObject ; String granteeName ; Iterator roles ; String roleName ; String isGrantable ; Object [ ] row ; // Column number mappings final int role_name = 0 ; final int grantee = 1 ; final int grantor = 2 ; final int is_grantable = 3 ; // Initialization grantees = session . getGrantee ( ) . visibleGrantees ( ) . iterator ( ) ; while ( grantees . hasNext ( ) ) { granteeObject = ( Grantee ) grantees . next ( ) ; granteeName = granteeObject . getNameString ( ) ; roles = granteeObject . getDirectRoles ( ) . iterator ( ) ; isGrantable = granteeObject . isAdmin ( ) ? Tokens . T_YES : Tokens . T_NO ; ; while ( roles . hasNext ( ) ) { Grantee role = ( Grantee ) roles . next ( ) ; row = t . getEmptyRowData ( ) ; row [ role_name ] = role . getNameString ( ) ; row [ grantee ] = granteeName ; row [ grantor ] = grantorName ; row [ is_grantable ] = isGrantable ; t . insertSys ( store , row ) ; } } return t ;
public class DOMUtil { /** * Return the inner text of the first child with the given name . */ public static String getChildText ( Element parent , String childName ) { } }
Element child = getChild ( parent , childName ) ; if ( child == null ) return null ; return getInnerText ( child ) ;
public class JsApiHdrsImpl { /** * Get the Report DiscardMsg field from the message header . * Javadoc description supplied by the SIBusMessage interface . */ @ Override public final Boolean getReportDiscardMsg ( ) { } }
Boolean value = ( Boolean ) getApi ( ) . getField ( JsApiAccess . REPORTDISCARDMSG_VALUE ) ; return ( value == null ) ? Boolean . FALSE : value ;
public class LastaToActionFilter { protected void showBoot ( FwAssistantDirector assistantDirector ) { } }
if ( logger . isInfoEnabled ( ) ) { final FwCoreDirection coreDirection = assistantDirector . assistCoreDirection ( ) ; final String domainTitle = coreDirection . assistDomainTitle ( ) ; final String environmentTitle = coreDirection . assistEnvironmentTitle ( ) ; final String frameworkDebugExp = coreDirection . isFrameworkDebug ( ) ? " *frameworkDebug" : "" ; logger . info ( "_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/" ) ; logger . info ( " the system has been initialized:" ) ; logger . info ( "" ) ; logger . info ( " -> " + domainTitle + " (" + environmentTitle + ")" + frameworkDebugExp ) ; logger . info ( "_/_/_/_/_/_/_/_/_/_/" ) ; }
public class Parser { /** * Method is used to parse String data to the proper Java types . * @ param schema Input schema to parse the String data by . * @ param input Java type specific to the schema supplied . * @ return Java type for the * @ throws DataException Exception is thrown when there is an exception thrown while parsing the input string . * @ throws UnsupportedOperationException Exception is thrown if there is no type parser registered for the schema . * @ throws NullPointerException Exception is thrown if the schema passed is not optional and a null input value is passed . */ public Object parseString ( Schema schema , String input ) { } }
checkSchemaAndInput ( schema , input ) ; if ( null == input ) { return null ; } TypeParser parser = findParser ( schema ) ; try { Object result = parser . parseString ( input , schema ) ; return result ; } catch ( Exception ex ) { String message = String . format ( "Could not parse '%s' to '%s'" , input , parser . expectedClass ( ) . getSimpleName ( ) ) ; throw new DataException ( message , ex ) ; }
public class Status { /** * Method to get the key to the label . * @ return key to the label */ public String getLabelKey ( ) { } }
final StringBuilder keyStr = new StringBuilder ( ) ; return keyStr . append ( getStatusGroup ( ) . getName ( ) ) . append ( "/Key.Status." ) . append ( key ) . toString ( ) ;
public class FaxReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Fax ResourceSet */ @ Override @ SuppressWarnings ( "checkstyle:linelength" ) public Page < Fax > firstPage ( final TwilioRestClient client ) { } }
Request request = new Request ( HttpMethod . GET , Domains . FAX . toString ( ) , "/v1/Faxes" , client . getRegion ( ) ) ; addQueryParams ( request ) ; return pageForRequest ( client , request ) ;
public class MergeVisitor { /** * Enables the static methods of the java . lang . String class to be called from the templates . If the key " String " is * already reserved by the user , this method simply returns the input parameter . * @ param userCallables * @ return all the template callables */ public TemplateCallable [ ] addDefaults ( TemplateCallable [ ] userCallables ) { } }
TemplateCallable strFunctionality = new TemplateCallable ( String . class . getSimpleName ( ) , String . class ) ; for ( TemplateCallable u : userCallables ) { if ( u . equals ( strFunctionality ) ) { return userCallables ; } } return GeneralUtils . concat ( userCallables , new TemplateCallable [ ] { strFunctionality } ) ;
public class StatusWatermarkValve { /** * Feed a { @ link Watermark } into the valve . If the input triggers the valve to output a new Watermark , * { @ link ValveOutputHandler # handleWatermark ( Watermark ) } will be called to process the new Watermark . * @ param watermark the watermark to feed to the valve * @ param channelIndex the index of the channel that the fed watermark belongs to ( index starting from 0) */ public void inputWatermark ( Watermark watermark , int channelIndex ) { } }
// ignore the input watermark if its input channel , or all input channels are idle ( i . e . overall the valve is idle ) . if ( lastOutputStreamStatus . isActive ( ) && channelStatuses [ channelIndex ] . streamStatus . isActive ( ) ) { long watermarkMillis = watermark . getTimestamp ( ) ; // if the input watermark ' s value is less than the last received watermark for its input channel , ignore it also . if ( watermarkMillis > channelStatuses [ channelIndex ] . watermark ) { channelStatuses [ channelIndex ] . watermark = watermarkMillis ; // previously unaligned input channels are now aligned if its watermark has caught up if ( ! channelStatuses [ channelIndex ] . isWatermarkAligned && watermarkMillis >= lastOutputWatermark ) { channelStatuses [ channelIndex ] . isWatermarkAligned = true ; } // now , attempt to find a new min watermark across all aligned channels findAndOutputNewMinWatermarkAcrossAlignedChannels ( ) ; } }
public class Server { /** * Add Web Application . * @ param virtualHost Virtual host name or null * @ param contextPathSpec The context path spec . Which must be of * the form / or / path / * * @ param webApp The Web application directory or WAR file . * @ return The WebApplicationContext * @ exception IOException */ public WebApplicationContext addWebApplication ( String virtualHost , String contextPathSpec , String webApp ) throws IOException { } }
WebApplicationContext appContext = newWebApplicationContext ( webApp ) ; appContext . setContextPath ( contextPathSpec ) ; addContext ( virtualHost , appContext ) ; if ( log . isDebugEnabled ( ) ) log . debug ( "Web Application " + appContext + " added" ) ; return appContext ;
public class PackagesReportFooting { /** * SetupSFields Method . */ public void setupSFields ( ) { } }
this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassProject . CLASS_PROJECT_FILE ) . getField ( ClassProject . DESCRIPTION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ;
public class SuspiciousUninitializedArray { /** * overrides the visitor to reset the stack * @ param classContext * the context object of the currently parsed class */ @ Override public void visitClassContext ( ClassContext classContext ) { } }
try { isEnum = classContext . getJavaClass ( ) . isEnum ( ) ; stack = new OpcodeStack ( ) ; uninitializedRegs = new BitSet ( ) ; arrayAliases = new HashMap < > ( ) ; storedUVs = new HashMap < > ( ) ; super . visitClassContext ( classContext ) ; } finally { stack = null ; uninitializedRegs = null ; arrayAliases = null ; storedUVs = null ; }
public class ElasticSearchUtils { /** * Create an ES Index . * @ param indexName * @ return true if the index has been created and false if the index has not been created . * @ throws ElasticsearchException */ public boolean createSingleIndex ( String indexName ) throws ElasticsearchException { } }
CreateIndexRequest indexRequest = new CreateIndexRequest ( indexName ) ; CreateIndexResponse res = this . client . admin ( ) . indices ( ) . create ( indexRequest ) . actionGet ( ) ; return indexExists ( indexName ) ;
public class ChangeObjects { /** * method to remove a current annotation of a PolymerNotation * @ param polymer * PolymerNotation * @ return PolymerNotation with no annotation */ public final static PolymerNotation removeAnnotationOfPolmyer ( PolymerNotation polymer ) { } }
return new PolymerNotation ( polymer . getPolymerID ( ) , polymer . getPolymerElements ( ) , null ) ;
public class snmpmib { /** * Use this API to fetch all the snmpmib resources that are configured on netscaler . */ public static snmpmib get ( nitro_service service , options option ) throws Exception { } }
snmpmib obj = new snmpmib ( ) ; snmpmib [ ] response = ( snmpmib [ ] ) obj . get_resources ( service , option ) ; return response [ 0 ] ;
public class WebSocketPlugin { /** * { @ inheritDoc } */ @ Override public void setApplication ( MultiThreadedApplicationAdapter application ) { } }
log . info ( "WebSocketPlugin application: {}" , application ) ; // get the app scope final IScope appScope = application . getScope ( ) ; // put if not already there managerMap . putIfAbsent ( appScope , new WebSocketScopeManager ( ) ) ; // add the app scope to the manager managerMap . get ( appScope ) . setApplication ( appScope ) ; super . setApplication ( application ) ;
public class AbstractJMXAgent { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . jmx . JMXAgent # register ( javax . management . ObjectName , java . lang . Object ) */ @ Override public final void register ( ObjectName name , Object mBean ) throws JMSException { } }
log . debug ( "Registering object " + name ) ; try { this . mBeanServer . registerMBean ( mBean , name ) ; } catch ( Exception e ) { throw new FFMQException ( "Cannot register MBean" , "JMX_ERROR" , e ) ; }
public class CmsWidgetDialogParameter { /** * Initializes a widget parameter with the given values . < p > * @ param value the initial value of the parameter * @ param defaultValue the default value of the parameter * @ param name the id of the parameter * @ param widget the widget used for this parameter * @ param dialog the dialog this parameter is used on * @ param minOccurs the required minimum numer of occurences of this parameter * @ param maxOccurs the maximum allowed numer of occurences of this parameter * @ param index the index of this parameter in the list */ protected void init ( String value , String defaultValue , String name , I_CmsWidget widget , String dialog , int minOccurs , int maxOccurs , int index ) { } }
if ( defaultValue == null ) { m_defaultValue = "" ; } else { m_defaultValue = defaultValue ; } if ( value == null ) { m_value = m_defaultValue ; } else { m_value = value ; } m_name = name ; m_widget = widget ; if ( maxOccurs < MAX_OCCURENCES ) { m_maxOccurs = maxOccurs ; } else { m_maxOccurs = MAX_OCCURENCES ; } if ( minOccurs >= 0 ) { m_minOccurs = minOccurs ; } else { m_minOccurs = 0 ; } if ( m_minOccurs > m_maxOccurs ) { m_minOccurs = m_maxOccurs ; } m_dialogPage = dialog ; m_error = null ; setindex ( index ) ;