signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class StringColumn { /** * { @ inheritDoc } */
@ Override protected String decodeData ( byte [ ] buffer ) { } } | // buffer is guaranteed non - null and proper length
String ret = new String ( buffer ) ; ret = ret . trim ( ) ; return ret . isEmpty ( ) ? EMPTY : ret ; |
public class OriginationRouteMarshaller { /** * Marshall the given parameter object . */
public void marshall ( OriginationRoute originationRoute , ProtocolMarshaller protocolMarshaller ) { } } | if ( originationRoute == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( originationRoute . getHost ( ) , HOST_BINDING ) ; protocolMarshaller . marshall ( originationRoute . getPort ( ) , PORT_BINDING ) ; protocolMarshaller . marshall ( originationRoute . getProtocol ( ) , PROTOCOL_BINDING ) ; protocolMarshaller . marshall ( originationRoute . getPriority ( ) , PRIORITY_BINDING ) ; protocolMarshaller . marshall ( originationRoute . getWeight ( ) , WEIGHT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class OWLLiteralImpl_CustomFieldSerializer { /** * Serializes the content of the object into the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } .
* @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the
* object ' s content to
* @ param instance the object instance to serialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the serialization operation is not
* successful */
@ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLLiteralImpl instance ) throws SerializationException { } } | serialize ( streamWriter , instance ) ; |
public class EnvController { /** * list
* @ return */
@ RequestMapping ( value = "/list" , method = RequestMethod . GET ) @ ResponseBody public JsonObjectBase list ( ) { } } | List < EnvListVo > envListVos = envMgr . getVoList ( ) ; return buildListSuccess ( envListVos , envListVos . size ( ) ) ; |
public class DirectoryLoaderAdaptor { /** * Load the value for a specific key */
protected Object load ( final IndexScopedKey key ) { } } | try { return key . accept ( loadVisitor ) ; } catch ( Exception e ) { throw log . exceptionInCacheLoader ( e ) ; } |
public class WordBasedSegment { /** * 原子分词
* @ param sSentence
* @ param start
* @ param end
* @ return
* @ deprecated 应该使用字符数组的版本 */
private static List < AtomNode > atomSegment ( String sSentence , int start , int end ) { } } | if ( end < start ) { throw new RuntimeException ( "start=" + start + " < end=" + end ) ; } List < AtomNode > atomSegment = new ArrayList < AtomNode > ( ) ; int pCur = 0 , nCurType , nNextType ; StringBuilder sb = new StringBuilder ( ) ; char c ; // by zhenyulu :
// TODO : 使用一系列正则表达式将句子中的完整成分 ( 百分比 、 日期 、 电子邮件 、 URL等 ) 预先提取出来
char [ ] charArray = sSentence . substring ( start , end ) . toCharArray ( ) ; int [ ] charTypeArray = new int [ charArray . length ] ; // 生成对应单个汉字的字符类型数组
for ( int i = 0 ; i < charArray . length ; ++ i ) { c = charArray [ i ] ; charTypeArray [ i ] = CharType . get ( c ) ; if ( c == '.' && i < ( charArray . length - 1 ) && CharType . get ( charArray [ i + 1 ] ) == CharType . CT_NUM ) charTypeArray [ i ] = CharType . CT_NUM ; else if ( c == '.' && i < ( charArray . length - 1 ) && charArray [ i + 1 ] >= '0' && charArray [ i + 1 ] <= '9' ) charTypeArray [ i ] = CharType . CT_SINGLE ; else if ( charTypeArray [ i ] == CharType . CT_LETTER ) charTypeArray [ i ] = CharType . CT_SINGLE ; } // 根据字符类型数组中的内容完成原子切割
while ( pCur < charArray . length ) { nCurType = charTypeArray [ pCur ] ; if ( nCurType == CharType . CT_CHINESE || nCurType == CharType . CT_INDEX || nCurType == CharType . CT_DELIMITER || nCurType == CharType . CT_OTHER ) { String single = String . valueOf ( charArray [ pCur ] ) ; if ( single . length ( ) != 0 ) atomSegment . add ( new AtomNode ( single , nCurType ) ) ; pCur ++ ; } // 如果是字符 、 数字或者后面跟随了数字的小数点 “ . ” 则一直取下去 。
else if ( pCur < charArray . length - 1 && ( ( nCurType == CharType . CT_SINGLE ) || nCurType == CharType . CT_NUM ) ) { sb . delete ( 0 , sb . length ( ) ) ; sb . append ( charArray [ pCur ] ) ; boolean reachEnd = true ; while ( pCur < charArray . length - 1 ) { nNextType = charTypeArray [ ++ pCur ] ; if ( nNextType == nCurType ) sb . append ( charArray [ pCur ] ) ; else { reachEnd = false ; break ; } } atomSegment . add ( new AtomNode ( sb . toString ( ) , nCurType ) ) ; if ( reachEnd ) pCur ++ ; } // 对于所有其它情况
else { atomSegment . add ( new AtomNode ( charArray [ pCur ] , nCurType ) ) ; pCur ++ ; } } // logger . trace ( " 原子分词 : " + atomSegment ) ;
return atomSegment ; |
public class BaseDestinationHandler { /** * Initialize non - persistent fields . These fields are common to both MS
* reconstitution of DestinationHandlers and initial creation .
* Feature 174199.2.4
* @ param messageProcessor the message processor instance
* @ param durableSubscriptionsTable the topicspace durable subscriptions
* HashMap from the DestinationManager . Can be null if this
* BaseDestinationHandler is point to point ( local or remote ) .
* @ param transaction the transaction to use for non persistent
* initialization . Can be null , in which case an auto transaction
* will be used . */
void initializeNonPersistent ( MessageProcessor messageProcessor , HashMap < String , Object > durableSubscriptionsTable , TransactionCommon transaction ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "initializeNonPersistent" , new Object [ ] { messageProcessor , durableSubscriptionsTable , transaction } ) ; // Check if we are running in an ND environment . If not we can skip
// some performance intensive WLM work
_singleServer = messageProcessor . isSingleServer ( ) ; if ( isPubSub ( ) ) { _pubSubRealization . initialise ( true , durableSubscriptionsTable ) ; } else { _ptoPRealization . initialise ( ) ; _remoteQueuePoints = new Index ( ) ; createControlAdapter ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initializeNonPersistent" ) ; |
public class EventRecurrence { /** * Determines whether two integer arrays contain identical elements .
* The native implementation over - allocated the arrays ( and may have stuff left over from
* a previous run ) , so we can ' t just check the arrays - - the separately - maintained count
* field also matters . We assume that a null array will have a count of zero , and that the
* array can hold as many elements as the associated count indicates .
* TODO : replace this with Arrays . equals ( ) when the old parser goes away . */
private static boolean arraysEqual ( int [ ] array1 , int count1 , int [ ] array2 , int count2 ) { } } | if ( count1 != count2 ) { return false ; } for ( int i = 0 ; i < count1 ; i ++ ) { if ( array1 [ i ] != array2 [ i ] ) return false ; } return true ; |
public class BigtableInstanceAdminClient { /** * Asynchronously gets the cluster representation by ID .
* < p > Sample code :
* < pre > { @ code
* ApiFuture < Cluster > clusterFuture = client . getClusterAsync ( " my - instance " , " my - cluster " ) ;
* Cluster cluster = clusterFuture . get ( ) ;
* } < / pre > */
@ SuppressWarnings ( "WeakerAccess" ) public ApiFuture < Cluster > getClusterAsync ( String instanceId , String clusterId ) { } } | String name = NameUtil . formatClusterName ( projectId , instanceId , clusterId ) ; com . google . bigtable . admin . v2 . GetClusterRequest request = com . google . bigtable . admin . v2 . GetClusterRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return ApiFutures . transform ( stub . getClusterCallable ( ) . futureCall ( request ) , new ApiFunction < com . google . bigtable . admin . v2 . Cluster , Cluster > ( ) { @ Override public Cluster apply ( com . google . bigtable . admin . v2 . Cluster proto ) { return Cluster . fromProto ( proto ) ; } } , MoreExecutors . directExecutor ( ) ) ; |
public class SQLExpressions { /** * Truncate the given date expression
* @ param unit date part to truncate to
* @ param expr truncated date */
public static < D extends Comparable > DateExpression < D > datetrunc ( DatePart unit , DateExpression < D > expr ) { } } | return Expressions . dateOperation ( expr . getType ( ) , DATE_TRUNC_OPS . get ( unit ) , expr ) ; |
public class Disk { /** * Creates a snapshot for this disk given the snapshot ' s name .
* @ return a zone operation for snapshot creation
* @ throws ComputeException upon failure */
public Operation createSnapshot ( String snapshot , OperationOption ... options ) { } } | return compute . create ( SnapshotInfo . of ( SnapshotId . of ( snapshot ) , getDiskId ( ) ) , options ) ; |
public class ExtensionResult { /** * Utility method , returning the ( singleton ) object indicating , no result could be computed .
* @ param < S >
* ( hypothesis ) state type
* @ param < I >
* input alphabet type
* @ param < O >
* output alphabet type
* @ return the empty result */
@ SuppressWarnings ( "unchecked" ) public static < S , I , O > ExtensionResult < S , I , O > empty ( ) { } } | return ( ExtensionResult < S , I , O > ) EMPTY ; |
public class LdapCacheService { /** * Factory Methods */
static < R , K > LdapCacheService < R , K > createNoCacheService ( final LdapSearcher < R , K > searcher ) { } } | return new LdapCacheService < R , K > ( searcher , CacheMode . OFF , 0 , false , 0 ) ; |
public class Normalization { /** * Normalize by zero mean unit variance
* @ param frame the data to normalize
* @ return a zero mean unit variance centered
* rdd */
public static DataRowsFacade zeromeanUnitVariance ( DataRowsFacade frame , List < String > skipColumns ) { } } | List < String > columnsList = DataFrames . toList ( frame . get ( ) . columns ( ) ) ; columnsList . removeAll ( skipColumns ) ; String [ ] columnNames = DataFrames . toArray ( columnsList ) ; // first row is std second row is mean , each column in a row is for a particular column
List < Row > stdDevMean = stdDevMeanColumns ( frame , columnNames ) ; for ( int i = 0 ; i < columnNames . length ; i ++ ) { String columnName = columnNames [ i ] ; double std = ( ( Number ) stdDevMean . get ( 0 ) . get ( i ) ) . doubleValue ( ) ; double mean = ( ( Number ) stdDevMean . get ( 1 ) . get ( i ) ) . doubleValue ( ) ; if ( std == 0.0 ) std = 1 ; // All same value - > ( x - x ) / 1 = 0
frame = dataRows ( frame . get ( ) . withColumn ( columnName , frame . get ( ) . col ( columnName ) . minus ( mean ) . divide ( std ) ) ) ; } return frame ; |
public class DigestAuthenticator { public void sendChallenge ( UserRealm realm , HttpRequest request , HttpResponse response , boolean stale ) throws IOException { } } | response . setField ( HttpFields . __WwwAuthenticate , "Digest realm=\"" + realm . getName ( ) + "\", domain=\"" + response . getHttpContext ( ) . getContextPath ( ) + "\", nonce=\"" + newNonce ( request ) + "\", algorithm=MD5, qop=\"auth\"" + ( useStale ? ( " stale=" + stale ) : "" ) ) ; response . sendError ( HttpResponse . __401_Unauthorized ) ; |
public class ClassFileWriter { /** * Generate code to load the given double on stack .
* @ param k the constant */
public void addPush ( double k ) { } } | if ( k == 0.0 ) { // zero
add ( ByteCode . DCONST_0 ) ; if ( 1.0 / k < 0 ) { // Negative zero
add ( ByteCode . DNEG ) ; } } else if ( k == 1.0 || k == - 1.0 ) { add ( ByteCode . DCONST_1 ) ; if ( k < 0 ) { add ( ByteCode . DNEG ) ; } } else { addLoadConstant ( k ) ; } |
public class DomHelper { /** * Create or update an element in the DOM . The id will be generated .
* @ param parent
* the parent group
* @ param name
* the local group name of the element ( should be unique within the group )
* @ param type
* the type of the element ( tag name , e . g . ' image ' )
* @ param style
* The style to apply on the element .
* @ return the created or updated element or null if creation failed */
public Element createOrUpdateElement ( Object parent , String name , String type , Style style ) { } } | return createOrUpdateElement ( parent , name , type , style , true ) ; |
public class DropTargetHelper { /** * Register a new DropPasteWorkerInterface .
* @ param worker The new worker */
public void registerDropPasteWorker ( DropPasteWorkerInterface worker ) { } } | this . dropPasteWorkerSet . add ( worker ) ; defaultDropTarget . setDefaultActions ( defaultDropTarget . getDefaultActions ( ) | worker . getAcceptableActions ( defaultDropTarget . getComponent ( ) ) ) ; |
public class MithraTransactionalPortal { /** * / * returns null if the members of the list are not all participating in the transaction */
private List checkTransactionParticipationAndWaitForOtherTransactions ( List list , MithraTransaction tx ) { } } | if ( list == null ) return null ; List result = list ; if ( this . getTxParticipationMode ( tx ) . mustParticipateInTxOnRead ( ) ) { for ( int i = 0 ; i < list . size ( ) ; i ++ ) { MithraTransactionalObject mto = ( MithraTransactionalObject ) list . get ( i ) ; if ( ! mto . zIsParticipatingInTransaction ( tx ) ) { result = null ; mto . zWaitForExclusiveWriteTx ( tx ) ; } } } return result ; |
public class TargetSession { /** * Sets the target name and retrieves the target ( if it exists ) from the TargetServer
* @ param targetName */
public void setTargetName ( String targetName ) { } } | if ( targetName == null ) target = null ; target = targetServer . getTarget ( targetName ) ; |
public class WriterFactoryImpl { /** * { @ inheritDoc } */
@ Override public AnnotationTypeRequiredMemberWriter getAnnotationTypeRequiredMemberWriter ( AnnotationTypeWriter annotationTypeWriter ) { } } | TypeElement te = annotationTypeWriter . getAnnotationTypeElement ( ) ; return new AnnotationTypeRequiredMemberWriterImpl ( ( SubWriterHolderWriter ) annotationTypeWriter , te ) ; |
public class WaveUtils { /** * Re - writes the data size in the header ( bytes 4-8 ) of the WAVE ( . wav ) input stream . < br >
* It needs to be read in order to calculate the size .
* @ param is the input stream
* @ return A new input stream that includes the data header in the header
* @ throws IOException Signals that an I / O exception has occurred . */
public static InputStream reWriteWaveHeader ( InputStream is ) throws IOException { } } | byte [ ] audioBytes = toByteArray ( is ) ; int filesize = audioBytes . length - WAVE_HEADER_SIZE ; writeInt ( filesize , audioBytes , WAVE_SIZE_POS ) ; writeInt ( filesize - WAVE_HEADER_SIZE , audioBytes , WAVE_METADATA_POS ) ; return new ByteArrayInputStream ( audioBytes ) ; |
public class RichByteBufferPool { /** * Actual creates the proxy which will handle the calls to the real WsByteBuffer .
* @ param actualBuffer
* @ return */
RichByteBufferImpl getFromPool ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getFromPool" ) ; RichByteBufferImpl buffer = ( RichByteBufferImpl ) byteBufferWrapperPool . remove ( ) ; if ( buffer == null ) { buffer = new RichByteBufferImpl ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getFromPool" , buffer ) ; return buffer ; |
public class AnnotationTypeBuilder { /** * Build the annotation type field documentation .
* @ param node the XML element that specifies which components to document
* @ param memberDetailsTree the content tree to which the documentation will be added
* @ throws DocletException if there is a problem building the documentation */
public void buildAnnotationTypeFieldDetails ( XMLNode node , Content memberDetailsTree ) throws DocletException { } } | configuration . getBuilderFactory ( ) . getAnnotationTypeFieldsBuilder ( writer ) . buildChildren ( node , memberDetailsTree ) ; |
public class LocalLockMediator { /** * Acquire the lock specified by { @ code kc } .
* For any particular key - column , whatever value of { @ code requestor } is
* passed to this method must also be passed to the associated later call to
* { @ link # unlock ( KeyColumn , ExpectedValueCheckingTransaction ) } .
* If some requestor { @ code r } calls this method on a KeyColumn { @ code k }
* and this method returns true , then subsequent calls to this method by
* { @ code r } on { @ code l } merely attempt to update the { @ code expiresAt }
* timestamp . This differs from typical lock reentrance : multiple successful
* calls to this method do not require an equal number of calls to
* { @ code # unlock ( ) } . One { @ code # unlock ( ) } call is enough , no matter how
* many times a { @ code requestor } called { @ code lock } beforehand . Note that
* updating the timestamp may fail , in which case the lock is considered to
* have expired and the calling context should assume it no longer holds the
* lock specified by { @ code kc } .
* The number of nanoseconds elapsed since the UNIX Epoch is not readily
* available within the JVM . When reckoning expiration times , this method
* uses the approximation implemented by
* { @ link com . thinkaurelius . titan . diskstorage . util . NanoTime # getApproxNSSinceEpoch ( false ) } .
* The current implementation of this method returns true when given an
* { @ code expiresAt } argument in the past . Future implementations may return
* false instead .
* @ param kc lock identifier
* @ param requestor the object locking { @ code kc }
* @ param expires instant at which this lock will automatically expire
* @ return true if the lock is acquired , false if it was not acquired */
public boolean lock ( KeyColumn kc , T requestor , Timepoint expires ) { } } | assert null != kc ; assert null != requestor ; AuditRecord < T > audit = new AuditRecord < > ( requestor , expires ) ; AuditRecord < T > inmap = locks . putIfAbsent ( kc , audit ) ; boolean success = false ; if ( null == inmap ) { // Uncontended lock succeeded
if ( log . isTraceEnabled ( ) ) { log . trace ( "New local lock created: {} namespace={} txn={}" , kc , name , requestor ) ; } success = true ; } else if ( inmap . equals ( audit ) ) { // requestor has already locked kc ; update expiresAt
success = locks . replace ( kc , inmap , audit ) ; if ( log . isTraceEnabled ( ) ) { if ( success ) { log . trace ( "Updated local lock expiration: {} namespace={} txn={} oldexp={} newexp={}" , kc , name , requestor , inmap . expires , audit . expires ) ; } else { log . trace ( "Failed to update local lock expiration: {} namespace={} txn={} oldexp={} newexp={}" , kc , name , requestor , inmap . expires , audit . expires ) ; } } } else if ( 0 > inmap . expires . compareTo ( times . getTime ( ) ) ) { // the recorded lock has expired ; replace it
success = locks . replace ( kc , inmap , audit ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Discarding expired lock: {} namespace={} txn={} expired={}" , kc , name , inmap . holder , inmap . expires ) ; } } else { // we lost to a valid lock
if ( log . isTraceEnabled ( ) ) { log . trace ( "Local lock failed: {} namespace={} txn={} (already owned by {})" , kc , name , requestor , inmap ) ; } } if ( success ) { expiryQueue . add ( new ExpirableKeyColumn ( kc , expires ) ) ; } return success ; |
public class SpanUtil { /** * Converts from an Accumulo Key to a Fluo RowColumn
* @ param key Key
* @ return RowColumn */
public static RowColumn toRowColumn ( Key key ) { } } | if ( key == null ) { return RowColumn . EMPTY ; } if ( ( key . getRow ( ) == null ) || key . getRow ( ) . getLength ( ) == 0 ) { return RowColumn . EMPTY ; } Bytes row = ByteUtil . toBytes ( key . getRow ( ) ) ; if ( ( key . getColumnFamily ( ) == null ) || key . getColumnFamily ( ) . getLength ( ) == 0 ) { return new RowColumn ( row ) ; } Bytes cf = ByteUtil . toBytes ( key . getColumnFamily ( ) ) ; if ( ( key . getColumnQualifier ( ) == null ) || key . getColumnQualifier ( ) . getLength ( ) == 0 ) { return new RowColumn ( row , new Column ( cf ) ) ; } Bytes cq = ByteUtil . toBytes ( key . getColumnQualifier ( ) ) ; if ( ( key . getColumnVisibility ( ) == null ) || key . getColumnVisibility ( ) . getLength ( ) == 0 ) { return new RowColumn ( row , new Column ( cf , cq ) ) ; } Bytes cv = ByteUtil . toBytes ( key . getColumnVisibility ( ) ) ; return new RowColumn ( row , new Column ( cf , cq , cv ) ) ; |
public class CmsDriverManager { /** * Returns < code > true < / code > if a user is member of the given group . < p >
* @ param dbc the current database context
* @ param username the name of the user to check
* @ param groupname the name of the group to check
* @ param readRoles if to read roles or groups
* @ return < code > true < / code > , if the user is in the group , < code > false < / code > otherwise
* @ throws CmsException if something goes wrong */
public boolean userInGroup ( CmsDbContext dbc , String username , String groupname , boolean readRoles ) throws CmsException { } } | List < CmsGroup > groups = getGroupsOfUser ( dbc , username , readRoles ) ; for ( int i = 0 ; i < groups . size ( ) ; i ++ ) { CmsGroup group = groups . get ( i ) ; if ( groupname . equals ( group . getName ( ) ) || groupname . substring ( 1 ) . equals ( group . getName ( ) ) ) { return true ; } } return false ; |
public class NewOnChangeHandler { /** * The Field has Changed .
* Do an addNew ( ) on the target record .
* @ param bDisplayOption If true , display the change .
* @ param iMoveMode The type of move being done ( init / read / screen ) .
* @ return The error code ( or NORMAL _ RETURN if okay ) .
* Field Changed , do an addNew on this record . */
public int fieldChanged ( boolean bDisplayOption , int iMoveMode ) { } } | try { m_recTarget . addNew ( ) ; } catch ( DBException e ) { return e . getErrorCode ( ) ; } return DBConstants . NORMAL_RETURN ; |
public class CmsSourceEditor { /** * Returns the syntax highlighting type for the currently edited resource . < p >
* @ param resource the resource to edit
* @ return the syntax highlighting type */
public CodeMirrorLanguage getHighlightMode ( CmsResource resource ) { } } | if ( resource != null ) { // determine resource type
int type = resource . getTypeId ( ) ; if ( CmsResourceTypeJsp . isJspTypeId ( type ) ) { // JSP file
return CodeMirrorLanguage . JSP ; } if ( CmsResourceTypeXmlContent . isXmlContent ( resource ) || CmsResourceTypeXmlPage . isXmlPage ( resource ) ) { // XML content file or XML page file
return CodeMirrorLanguage . XML ; } // all other files will be matched according to their suffix
int dotIndex = resource . getName ( ) . lastIndexOf ( '.' ) ; if ( dotIndex != - 1 ) { String suffix = resource . getName ( ) . substring ( dotIndex + 1 ) . toLowerCase ( ) ; for ( CodeMirrorLanguage lang : CodeMirrorLanguage . values ( ) ) { if ( lang . isSupportedFileType ( suffix ) ) { return lang ; } } } } // return HTML type as default
return CodeMirrorLanguage . HTML ; |
public class Project { /** * A collection of regression plans that belong to this project .
* @ param filter Criteria to filter on . Project ( s ) will be set automatically .
* If null , all related regression items in the project are returned .
* @ param includeSubprojects Specifies whether to include items from sub - project or not .
* This only adds open subprojects .
* @ return RegressionPlan . */
public Collection < RegressionPlan > getRegressionPlans ( RegressionPlanFilter filter , boolean includeSubprojects ) { } } | filter = ( filter != null ) ? filter : new RegressionPlanFilter ( ) ; filter . project . clear ( ) ; if ( includeSubprojects ) { for ( Project project : getThisAndAllChildProjects ( ) ) { filter . project . add ( project ) ; } } else { filter . project . add ( this ) ; } return getInstance ( ) . get ( ) . regressionPlans ( filter ) ; |
public class XLinkUtils { /** * Naive model to view assignment .
* The current model is choosen for the first occurence of the view . */
private static Map < String , ModelDescription > assigneModelsToViews ( Map < ModelDescription , XLinkConnectorView [ ] > modelsToViews ) { } } | HashMap < String , ModelDescription > viewsToModels = new HashMap < String , ModelDescription > ( ) ; for ( ModelDescription modelInfo : modelsToViews . keySet ( ) ) { List < XLinkConnectorView > currentViewList = Arrays . asList ( modelsToViews . get ( modelInfo ) ) ; for ( XLinkConnectorView view : currentViewList ) { if ( ! viewsToModels . containsKey ( view . getViewId ( ) ) ) { viewsToModels . put ( view . getViewId ( ) , modelInfo ) ; } } } return viewsToModels ; |
public class DescribeStacksResult { /** * An array of < code > Stack < / code > objects that describe the stacks .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setStacks ( java . util . Collection ) } or { @ link # withStacks ( java . util . Collection ) } if you want to override the
* existing values .
* @ param stacks
* An array of < code > Stack < / code > objects that describe the stacks .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeStacksResult withStacks ( Stack ... stacks ) { } } | if ( this . stacks == null ) { setStacks ( new com . amazonaws . internal . SdkInternalList < Stack > ( stacks . length ) ) ; } for ( Stack ele : stacks ) { this . stacks . add ( ele ) ; } return this ; |
public class SraReader { /** * Read a run set from the specified URL .
* @ param url URL , must not be null
* @ return a run set read from the specified URL
* @ throws IOException if an I / O error occurs */
public static RunSet readRunSet ( final URL url ) throws IOException { } } | checkNotNull ( url ) ; try ( BufferedReader reader = Resources . asCharSource ( url , Charsets . UTF_8 ) . openBufferedStream ( ) ) { return readRunSet ( reader ) ; } |
public class MethodUtils { /** * Gets the wrapper object class for the given primitive type class .
* For example , passing < code > boolean . class < / code > returns < code > Boolean . class < / code >
* @ param primitiveType the primitive type class for which a match is to be found
* @ return the wrapper type associated with the given primitive
* or null if no match is found */
public static Class < ? > getPrimitiveWrapper ( Class < ? > primitiveType ) { } } | // does anyone know a better strategy than comparing names ?
if ( boolean . class . equals ( primitiveType ) ) { return Boolean . class ; } else if ( float . class . equals ( primitiveType ) ) { return Float . class ; } else if ( long . class . equals ( primitiveType ) ) { return Long . class ; } else if ( int . class . equals ( primitiveType ) ) { return Integer . class ; } else if ( short . class . equals ( primitiveType ) ) { return Short . class ; } else if ( byte . class . equals ( primitiveType ) ) { return Byte . class ; } else if ( double . class . equals ( primitiveType ) ) { return Double . class ; } else if ( char . class . equals ( primitiveType ) ) { return Character . class ; } else { return null ; } |
public class JSSEHelper { /** * This method creates an SSLContext for use by an SSL application or component .
* Precedence logic will determine which parameters are used for creating the
* SSLContext . The selection precedence rules are :
* < ol >
* < li > Direct - The sslAliasName parameter , when specified , will be used to choose
* the alias directly from the SSL configurations . < / li >
* < li > Dynamic - The remoteHost / remotePort String ( s ) will contain the target
* host , or host and port . A SSL configuration to be use for an outbound connection
* will be selected based on the host or host and port configured . < / li >
* < / ol >
* When Java 2 Security is enabled , access to call this method requires
* WebSphereRuntimePermission " getSSLConfig " to be granted .
* @ param sslAliasName - Used in direct selection . The alias name of a
* specific SSL configuration ( optional ) . You can pass in " null " here .
* If sslAliasName is provided but does not exist it will check
* connection information for a match . Then look for a default if no
* match with the connection information .
* @ param connectionInfo - This refers to the remote connection information . The
* current properties known by the runtime include :
* Example OUTBOUND case ( endpoint refers more to protocol used since
* outbound names are not well - known ) :
* < ul >
* < li > com . ibm . ssl . remoteHost = " hostname . ibm . com " < / li >
* < li > com . ibm . ssl . remotePort = " 9809 " < / li >
* < li > com . ibm . ssl . direction = " outbound " < / li >
* < / ul > < / p >
* Example INBOUND case ( endpoint name matches serverindex endpoint ) :
* < code >
* com . ibm . ssl . direction = " inbound "
* < / code > < / p >
* It ' s highly recommended to supply these properties when possible .
* @ param listener - This is used to notify the
* caller of this API that the SSL configuration changed in the runtime .
* It ' s up to the caller to decide if they want to call this API again
* to get the new SSLContext for the configuration . Passing in NULL
* indicates no notification is desired . See the
* com . ibm . websphere . ssl . SSLConfigChangeListener interface for more
* information .
* @ return SSLContext
* @ throws com . ibm . websphere . ssl . SSLException
* @ ibm - api */
public SSLContext getSSLContext ( String sslAliasName , Map < String , Object > connectionInfo , SSLConfigChangeListener listener ) throws SSLException { } } | return getSSLContext ( sslAliasName , connectionInfo , listener , true ) ; |
public class GoogleMapShapeConverter { /** * Transform the bounding box in the feature projection to web mercator
* @ param boundingBox bounding box in feature projection
* @ return bounding box in web mercator */
public BoundingBox boundingBoxToWebMercator ( BoundingBox boundingBox ) { } } | if ( projection == null ) { throw new GeoPackageException ( "Shape Converter projection is null" ) ; } return boundingBox . transform ( toWebMercator ) ; |
public class ISOUPTree { /** * Method for updating ( training ) the model using a new instance */
public void trainOnInstanceImpl ( MultiLabelInstance inst ) { } } | if ( inst . weight ( ) > 0 ) { checkRoot ( ) ; double [ ] prediction = treeRoot . getPrediction ( inst ) ; double [ ] normalError = getNormalizedError ( inst , prediction ) ; // normalError . scaleValues ( inst . weight ( ) ) ;
processInstance ( inst , treeRoot , prediction , normalError , true , false ) ; examplesSeen += inst . weight ( ) ; for ( int i = 0 ; i < inst . numberOutputTargets ( ) ; i ++ ) { sumOfValues . addToValue ( i , inst . weight ( ) * inst . valueOutputAttribute ( i ) ) ; sumOfSquares . addToValue ( i , inst . weight ( ) * inst . valueOutputAttribute ( i ) * inst . valueOutputAttribute ( i ) ) ; } for ( int i = 0 ; i < inst . numInputAttributes ( ) ; i ++ ) { sumOfAttrValues . addToValue ( i , inst . weight ( ) * inst . valueInputAttribute ( i ) ) ; sumOfAttrSquares . addToValue ( i , inst . weight ( ) * inst . valueInputAttribute ( i ) * inst . valueInputAttribute ( i ) ) ; } } |
public class BinaryPropertyProcessor { /** * Deserialize this object from a POF stream .
* @ param reader POF reader to use
* @ throws IOException if an error occurs during deserialization */
public void readExternal ( PofReader reader ) throws IOException { } } | super . readExternal ( reader ) ; navigator = ( PofNavigator ) reader . readObject ( 0 ) ; |
public class JmxMBeans { /** * region refreshing jmx */
private void handleJmxRefreshables ( List < MBeanWrapper > mbeanWrappers , AttributeNodeForPojo rootNode ) { } } | for ( MBeanWrapper mbeanWrapper : mbeanWrappers ) { Eventloop eventloop = mbeanWrapper . getEventloop ( ) ; List < JmxRefreshable > currentRefreshables = rootNode . getAllRefreshables ( mbeanWrapper . getMBean ( ) ) ; if ( ! eventloopToJmxRefreshables . containsKey ( eventloop ) ) { eventloopToJmxRefreshables . put ( eventloop , currentRefreshables ) ; eventloop . execute ( createRefreshTask ( eventloop , null , 0 ) ) ; } else { List < JmxRefreshable > previousRefreshables = eventloopToJmxRefreshables . get ( eventloop ) ; List < JmxRefreshable > allRefreshables = new ArrayList < > ( previousRefreshables ) ; allRefreshables . addAll ( currentRefreshables ) ; eventloopToJmxRefreshables . put ( eventloop , allRefreshables ) ; } refreshableStatsCounts . put ( eventloop , eventloopToJmxRefreshables . get ( eventloop ) . size ( ) ) ; } |
public class StackSizeSimulator { /** * Initializes the runtime stack sizes with the given instructions . This has to be called before { @ link StackSizeSimulator # simulateStatementBackwards }
* @ param instructions The instructions to simulate */
void buildStackSizes ( final List < Instruction > instructions ) { } } | stackSizes = new ArrayList < > ( ) ; int stackSize = 0 ; for ( Instruction instruction : instructions ) { final int previousStackSize = stackSize ; stackSize += instruction . getStackSizeDifference ( ) ; if ( isStackCleared ( instruction ) ) stackSize = 0 ; if ( stackSize < 0 ) { throw new IllegalStateException ( "Runtime stack under-flow occurred." ) ; } stackSizes . add ( Pair . of ( previousStackSize , stackSize ) ) ; } |
public class ChainDataImpl { /** * Enables external entities to be notified of chain events described in
* ChainEventListener interface .
* @ param listener */
public final void addChainEventListener ( ChainEventListener listener ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "addChainEventListener: " + listener ) ; } if ( null != listener ) { this . chainEventListeners . add ( listener ) ; } |
public class MultiMessage { /** * Decodes the messages using the provided { @ link TempFileContext temporary file context } . */
public static MultiMessage decode ( String encodedMessages , TempFileContext tempFileContext ) throws IOException { } } | if ( encodedMessages . isEmpty ( ) ) return EMPTY_MULTI_MESSAGE ; int pos = encodedMessages . indexOf ( DELIMITER ) ; if ( pos == - 1 ) throw new IllegalArgumentException ( "Delimiter not found" ) ; final int size = Integer . parseInt ( encodedMessages . substring ( 0 , pos ++ ) ) ; List < Message > decodedMessages = new ArrayList < > ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { MessageType type = MessageType . getFromTypeChar ( encodedMessages . charAt ( pos ++ ) ) ; int nextPos = encodedMessages . indexOf ( DELIMITER , pos ) ; if ( nextPos == - 1 ) throw new IllegalArgumentException ( "Delimiter not found" ) ; final int capacity = Integer . parseInt ( encodedMessages . substring ( pos , nextPos ++ ) ) ; pos = nextPos + capacity ; decodedMessages . add ( type . decode ( encodedMessages . substring ( nextPos , pos ) , tempFileContext ) ) ; } if ( pos != encodedMessages . length ( ) ) throw new IllegalArgumentException ( "pos != encodedMessages.length()" ) ; return new MultiMessage ( Collections . unmodifiableList ( decodedMessages ) ) ; |
public class AutoElasticsearch { /** * { @ inheritDoc }
* @ deprecated This is available for any storm cluster that operates against the older method of credential renewal */
@ Override public void renew ( Map < String , String > credentials , Map topologyConf ) { } } | renew ( credentials , topologyConf , null ) ; |
public class DevicePoolCompatibilityResult { /** * Information about the compatibility .
* @ param incompatibilityMessages
* Information about the compatibility . */
public void setIncompatibilityMessages ( java . util . Collection < IncompatibilityMessage > incompatibilityMessages ) { } } | if ( incompatibilityMessages == null ) { this . incompatibilityMessages = null ; return ; } this . incompatibilityMessages = new java . util . ArrayList < IncompatibilityMessage > ( incompatibilityMessages ) ; |
public class DRL5Lexer { /** * $ ANTLR start " EOL " */
public final void mEOL ( ) throws RecognitionException { } } | try { // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 80:6 : ( ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' ) )
// src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 81:12 : ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' )
{ // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 81:12 : ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' )
int alt2 = 3 ; int LA2_0 = input . LA ( 1 ) ; if ( ( LA2_0 == '\r' ) ) { int LA2_1 = input . LA ( 2 ) ; if ( ( LA2_1 == '\n' ) && ( synpred1_DRL5Lexer ( ) ) ) { alt2 = 1 ; } } else if ( ( LA2_0 == '\n' ) ) { alt2 = 3 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 2 , 0 , input ) ; throw nvae ; } switch ( alt2 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 81:20 : ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n '
{ match ( "\r\n" ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 82:25 : ' \ \ r '
{ match ( '\r' ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 83:25 : ' \ \ n '
{ match ( '\n' ) ; if ( state . failed ) return ; } break ; } } } finally { // do for sure before leaving
} |
public class EmptyResourceBundlePostProcessor { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . postprocess .
* AbstractChainedResourceBundlePostProcessor # doPostProcessBundle ( net . jawr .
* web . resource . bundle . postprocess . BundleProcessingStatus ,
* java . lang . StringBuffer ) */
@ Override protected StringBuffer doPostProcessBundle ( BundleProcessingStatus status , StringBuffer bundleData ) throws IOException { } } | if ( status . getProcessingType ( ) . equals ( BundleProcessingStatus . FILE_PROCESSING_TYPE ) && status . getJawrConfig ( ) . getResourceType ( ) . equals ( JawrConstant . JS_TYPE ) ) { if ( bundleData . toString ( ) . trim ( ) . endsWith ( ")" ) ) { bundleData . append ( ";" ) ; } } return bundleData ; |
public class TopicAdminClient { /** * Gets the configuration of a topic .
* < p > Sample code :
* < pre > < code >
* try ( TopicAdminClient topicAdminClient = TopicAdminClient . create ( ) ) {
* ProjectTopicName topic = ProjectTopicName . of ( " [ PROJECT ] " , " [ TOPIC ] " ) ;
* Topic response = topicAdminClient . getTopic ( topic . toString ( ) ) ;
* < / code > < / pre >
* @ param topic The name of the topic to get . Format is ` projects / { project } / topics / { topic } ` .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Topic getTopic ( String topic ) { } } | GetTopicRequest request = GetTopicRequest . newBuilder ( ) . setTopic ( topic ) . build ( ) ; return getTopic ( request ) ; |
public class ClassNameResolver { /** * 增加类名过滤器
* @ param classNameFilter 类名过滤器t */
public void addFilter ( ClassNameFilter classNameFilter ) { } } | writeLock . lock ( ) ; try { if ( filters == null ) { filters = new ArrayList < ClassNameFilter > ( ) ; } filters . add ( classNameFilter ) ; // 从小到大 , 先进优先
Collections . sort ( filters , new Comparator < ClassNameFilter > ( ) { public int compare ( ClassNameFilter o1 , ClassNameFilter o2 ) { return o1 . order ( ) - o2 . order ( ) ; } } ) ; } finally { writeLock . unlock ( ) ; } |
public class JsonWriter { /** * Closes the current scope by appending any necessary whitespace and the
* given bracket . */
private JsonWriter close ( int empty , int nonempty , String closeBracket ) throws IOException { } } | int context = peek ( ) ; if ( context != nonempty && context != empty ) { throw new IllegalStateException ( "Nesting problem." ) ; } if ( deferredName != null ) { throw new IllegalStateException ( "Dangling name: " + deferredName ) ; } stackSize -- ; if ( context == nonempty ) { newline ( ) ; } out . write ( closeBracket ) ; return this ; |
public class DescriptorInstantiator { /** * Creates a { @ link Descriptor } instance from the specified implementation class name , also using the specified name
* @ param implClass
* @ param descriptorName
* @ return
* @ throws IllegalArgumentException
* If either argument is not specified */
static Descriptor createFromImplModelType ( final Class < ? extends Descriptor > implClass , String descriptorName ) throws IllegalArgumentException { } } | // Precondition checks
if ( implClass == null ) { throw new IllegalArgumentException ( "implClass must be specified" ) ; } if ( descriptorName == null || descriptorName . length ( ) == 0 ) { throw new IllegalArgumentException ( "descriptorName must be specified" ) ; } // Get the constructor to use in making the new instance
final Constructor < ? extends Descriptor > ctor ; try { ctor = implClass . getConstructor ( String . class ) ; } catch ( final NoSuchMethodException nsme ) { throw new RuntimeException ( implClass + " must contain a constructor with a single String argument" ) ; } // Create a new descriptor instance using the backing model
final Descriptor descriptor ; try { descriptor = ctor . newInstance ( descriptorName ) ; } // Handle all construction errors equally
catch ( final Exception e ) { throw new RuntimeException ( "Could not create new descriptor instance" , e ) ; } // Return
return descriptor ; |
public class SymbolType { /** * Builds a symbol for a type variable from a list of upper bounds .
* @ param typeVariable the name of the variable
* @ param upperBounds the upper bounds of the type variable
* @ return a SymbolType that represents a variable ( for generics ) */
public static SymbolType typeVariableOf ( final String typeVariable , List < SymbolType > upperBounds ) { } } | return new SymbolType ( upperBounds , typeVariable ) ; |
public class Job { /** * Invoke this method from within the { @ code run } method of a < b > generator
* job < / b > in order to specify a job node in the generated child job graph .
* This version of the method is for child jobs that take zero arguments .
* @ param < T > The return type of the child job being specified
* @ param jobInstance A user - written job object
* @ param settings Optional one or more { @ code JobSetting }
* @ return a { @ code FutureValue } representing an empty value slot that will be
* filled by the output of { @ code jobInstance } when it finalizes . This
* may be passed in to further invocations of { @ code futureCall ( ) } in
* order to specify a data dependency . */
public < T > FutureValue < T > futureCall ( Job0 < T > jobInstance , JobSetting ... settings ) { } } | return futureCallUnchecked ( settings , jobInstance ) ; |
public class NumberExpression { /** * Create a { @ code this between from and to } expression
* < p > Is equivalent to { @ code from < = this < = to } < / p >
* @ param < A >
* @ param from inclusive start of range
* @ param to inclusive end of range
* @ return this between from and to */
public final < A extends Number & Comparable < ? > > BooleanExpression between ( @ Nullable A from , @ Nullable A to ) { } } | if ( from == null ) { if ( to != null ) { return loe ( to ) ; } else { throw new IllegalArgumentException ( "Either from or to needs to be non-null" ) ; } } else if ( to == null ) { return goe ( from ) ; } else { return between ( ConstantImpl . create ( cast ( from ) ) , ConstantImpl . create ( cast ( to ) ) ) ; } |
public class BaseDestinationDefinitionImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . admin . BaseDestinationDefinition # getDescription ( ) */
public String getDescription ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getDescription" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getDescription" , _description ) ; return _description ; |
public class ConversionManager { /** * Converts from String based on isAssignableFrom or instanceof
* @ param rawString
* @ param type
* @ return ConversionStatus < T > whether a converter is found and the converted value */
protected ConversionStatus convertCompatible ( String rawString , Class < ? > type ) { } } | ConversionStatus status = new ConversionStatus ( ) ; for ( PriorityConverter con : converters . getAll ( ) ) { Type key = con . getType ( ) ; if ( key instanceof Class ) { Class < ? > clazz = ( Class < ? > ) key ; if ( type . isAssignableFrom ( clazz ) ) { Object converted = convert ( rawString , key ) ; status . setConverted ( converted ) ; break ; } } else if ( key instanceof TypeVariable ) { TypeVariable < ? > typeVariable = ( TypeVariable < ? > ) key ; status = convertGenericClazz ( rawString , type , typeVariable ) ; if ( status . isConverterFound ( ) ) { break ; } } } return status ; |
public class CPDefinitionSpecificationOptionValueUtil { /** * Returns the first cp definition specification option value in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition specification option value , or < code > null < / code > if a matching cp definition specification option value could not be found */
public static CPDefinitionSpecificationOptionValue fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; |
public class Transform1D { /** * Transform the specified 1.5D point .
* If the point is on a segment from the Transform1D path , it will be transformed
* to follow the path . If the point is not on a segment from the Transform1D path ,
* the curviline and shift transformations will be simply added .
* @ param point the point to transform .
* @ param appliedTranslation is set by this function with the really applied distances . The values are always positives .
* @ return the index of the segment on which the point lies in the segment path ;
* or < code > - 1 < / code > if the segment of the point is not the first segment in the path . */
@ Pure @ SuppressWarnings ( { } } | "checkstyle:cyclomaticcomplexity" , "checkstyle:nestedifdepth" } ) public int transform ( Point1D < ? , ? , S > point , Tuple2D < ? > appliedTranslation ) { assert point != null : AssertMessages . notNullParameter ( 0 ) ; assert point . getSegment ( ) != null ; int idx ; double distanceToSegmentEnd ; Direction1D direction ; S previousSegment ; double shift = point . getY ( ) ; double curviline = point . getCurvilineCoordinate ( ) ; S segment = point . getSegment ( ) ; double distance = 0 ; // Ensure the path is valid
final List < S > rpath ; if ( this . path == null || this . path . isEmpty ( ) ) { rpath = Collections . singletonList ( segment ) ; } else { rpath = this . path ; } // Test if the point is located on the the first segment
if ( ! segment . equals ( rpath . get ( 0 ) ) ) { return - 1 ; } // Change the shift .
if ( this . firstSegmentDirection . isSegmentDirection ( ) ) { shift += this . shiftTranslation ; } else { shift -= this . shiftTranslation ; } double distanceToMove = this . curvilineTranslation ; if ( distanceToMove >= 0. ) { // Move foward , along the path .
direction = this . firstSegmentDirection ; idx = 0 ; do { if ( direction . isSegmentDirection ( ) ) { distanceToSegmentEnd = segment . getLength ( ) - curviline ; if ( distanceToSegmentEnd < distanceToMove ) { distance += distanceToSegmentEnd ; if ( ( idx + 1 ) < rpath . size ( ) ) { previousSegment = segment ; ++ idx ; segment = rpath . get ( idx ) ; distanceToMove -= distanceToSegmentEnd ; if ( segment . isLastPointConnectedTo ( previousSegment ) || previousSegment . equals ( segment ) ) { shift = - shift ; curviline = segment . getLength ( ) ; direction = Direction1D . REVERTED_DIRECTION ; } else { curviline = 0. ; } } else { curviline = segment . getLength ( ) ; distanceToMove = 0. ; } } else { curviline += distanceToMove ; distance += distanceToMove ; distanceToMove = 0. ; } } else { distanceToSegmentEnd = curviline ; if ( distanceToSegmentEnd < distanceToMove ) { distance += distanceToSegmentEnd ; if ( ( idx + 1 ) < rpath . size ( ) ) { ++ idx ; previousSegment = segment ; segment = rpath . get ( idx ) ; distanceToMove -= distanceToSegmentEnd ; if ( segment . isFirstPointConnectedTo ( previousSegment ) || previousSegment . equals ( segment ) ) { shift = - shift ; curviline = 0. ; direction = Direction1D . SEGMENT_DIRECTION ; } else { curviline = segment . getLength ( ) ; } } else { distanceToMove = 0. ; curviline = 0. ; } } else { curviline -= distanceToMove ; distance += distanceToMove ; distanceToMove = 0. ; } } } while ( distanceToMove > 0. ) ; } else { // Move backward . This operation can be applied only of
// the current segment .
idx = 0 ; if ( this . firstSegmentDirection . isSegmentDirection ( ) ) { // Caution : distanceToMove is negative
distance = Math . min ( curviline , - distanceToMove ) ; curviline -= distance ; if ( curviline < 0. ) { curviline = 0. ; } } else { // Caution : distanceToMove is negative
distance = Math . min ( segment . getLength ( ) - curviline , - distanceToMove ) ; curviline += distance ; if ( curviline > segment . getLength ( ) ) { curviline = segment . getLength ( ) ; } } } if ( appliedTranslation != null ) { appliedTranslation . set ( distance , Math . abs ( this . shiftTranslation ) ) ; } point . set ( segment , curviline , shift ) ; return idx ; |
public class GitlabAPI { /** * Get a list of tags in specific project
* @ param projectId
* @ return */
public List < GitlabTag > getTags ( Serializable projectId ) { } } | String tailUrl = GitlabProject . URL + "/" + sanitizeProjectId ( projectId ) + GitlabTag . URL + PARAM_MAX_ITEMS_PER_PAGE ; return retrieve ( ) . getAll ( tailUrl , GitlabTag [ ] . class ) ; |
public class CommerceDiscountUsageEntryPersistenceImpl { /** * Returns the commerce discount usage entries before and after the current commerce discount usage entry in the ordered set where groupId = & # 63 ; .
* @ param commerceDiscountUsageEntryId the primary key of the current commerce discount usage entry
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the previous , current , and next commerce discount usage entry
* @ throws NoSuchDiscountUsageEntryException if a commerce discount usage entry with the primary key could not be found */
@ Override public CommerceDiscountUsageEntry [ ] findByGroupId_PrevAndNext ( long commerceDiscountUsageEntryId , long groupId , OrderByComparator < CommerceDiscountUsageEntry > orderByComparator ) throws NoSuchDiscountUsageEntryException { } } | CommerceDiscountUsageEntry commerceDiscountUsageEntry = findByPrimaryKey ( commerceDiscountUsageEntryId ) ; Session session = null ; try { session = openSession ( ) ; CommerceDiscountUsageEntry [ ] array = new CommerceDiscountUsageEntryImpl [ 3 ] ; array [ 0 ] = getByGroupId_PrevAndNext ( session , commerceDiscountUsageEntry , groupId , orderByComparator , true ) ; array [ 1 ] = commerceDiscountUsageEntry ; array [ 2 ] = getByGroupId_PrevAndNext ( session , commerceDiscountUsageEntry , groupId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class Calendar { /** * Implementing binary search for zone transtion detection , used by { @ link # getPreviousZoneTransitionTime ( TimeZone , long , long ) }
* @ param tz The time zone .
* @ param upperOffset The zone offset at < code > upper < / code >
* @ param upper The upper bound , inclusive .
* @ param lower The lower bound , exclusive .
* @ return The time of the previous zone transition , or null if not available . */
private static Long findPreviousZoneTransitionTime ( TimeZone tz , int upperOffset , long upper , long lower ) { } } | boolean onUnitTime = false ; long mid = 0 ; for ( int unit : FIND_ZONE_TRANSITION_TIME_UNITS ) { long lunits = lower / unit ; long uunits = upper / unit ; if ( uunits > lunits ) { mid = ( ( lunits + uunits + 1 ) >>> 1 ) * unit ; onUnitTime = true ; break ; } } int midOffset ; if ( ! onUnitTime ) { mid = ( upper + lower ) >>> 1 ; } if ( onUnitTime ) { if ( mid != upper ) { midOffset = tz . getOffset ( mid ) ; if ( midOffset != upperOffset ) { return findPreviousZoneTransitionTime ( tz , upperOffset , upper , mid ) ; } upper = mid ; } // check mid - 1
mid -- ; } else { mid = ( upper + lower ) >>> 1 ; } if ( mid == lower ) { return Long . valueOf ( upper ) ; } midOffset = tz . getOffset ( mid ) ; if ( midOffset != upperOffset ) { if ( onUnitTime ) { return Long . valueOf ( upper ) ; } return findPreviousZoneTransitionTime ( tz , upperOffset , upper , mid ) ; } return findPreviousZoneTransitionTime ( tz , upperOffset , mid , lower ) ; |
public class PhaseOneOptions { /** * { @ inheritDoc } */
@ Override protected void processSetting ( String name , String value ) { } } | super . processSetting ( name , value ) ; if ( INPUT_PATH_DESC . equals ( name ) ) inputPath = value ; |
public class Tree { /** * Appends the specified byte array to the end of this List ( or adds the
* value to this Set - it depends on the type of this node ) . Sample code :
* < br >
* < br >
* byte [ ] b1 = QFiles . readFile ( " path / to / file1 " ) ; < br >
* byte [ ] b2 = QFiles . readFile ( " path / to / file2 " ) ; < br >
* < br >
* Tree node = new Tree ( ) ; < br >
* node . put ( " path . to . array " ) . add ( b1 , true ) . add ( b2 , true ) ;
* @ param value
* new value to be added to this list ( or set )
* @ param asBase64String
* store the byte array as BASE64 String
* @ return this ( the List or Set ) node */
public Tree add ( byte [ ] value , boolean asBase64String ) { } } | if ( asBase64String ) { return addObjectInternal ( BASE64 . encode ( value ) ) ; } return addObjectInternal ( value ) ; |
public class AbstractObjectStore { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . ObjectStore # clear ( ) */
protected synchronized void clear ( ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "clear" ) ; // All ManagedObjects are new sequence number up to initialSequenceNumber are reserved .
sequenceNumber = initialSequenceNumber ; allocationAllowed = true ; inMemoryTokens = new WeakValueConcurrentHashMap ( concurrency ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "clear" ) ; |
public class PathQueryParser { /** * Recursive BFS tree walker
* @ param node current node */
private void treeWalker ( Node node , int level , List < PathQueryMatcher > queryMatchers , List < Node > collector ) { } } | MatchType matchType = queryMatchers . get ( level ) . match ( level , node ) ; if ( matchType == MatchType . NOT_A_MATCH ) { // no reason to scan deeper
// noinspection UnnecessaryReturnStatement
return ; } else if ( matchType == MatchType . NODE_MATCH ) { // we have a match
if ( level == queryMatchers . size ( ) - 1 ) { // full path match
collector . add ( node ) ; } else if ( node instanceof Element ) { // scan deeper
Element element = ( Element ) node ; List < Node > childElements = element . getChildElements ( ) ; for ( Node childElement : childElements ) { treeWalker ( childElement , level + 1 , queryMatchers , collector ) ; } } } else { throw new PathQueryException ( "Unknown MatchType: " + matchType ) ; } |
public class Tr { /** * private : If we already have ( or can get ) the allTraceComponentsLock
* process the new trace components */
private static void processNewTraceComponents ( ) { } } | if ( allTraceComponentsLock . tryLock ( ) ) { try { TraceComponent tc = newTracecomponents . poll ( ) ; while ( tc != null ) { allTraceComponents . add ( tc ) ; tc = newTracecomponents . poll ( ) ; } } finally { allTraceComponentsLock . unlock ( ) ; } } |
public class AbstractContainerHelper { /** * Indicates whether there is an error which has been propogated from the action to the render phase .
* @ return true if there is a propogated escape , false otherwise . */
private boolean havePropogatedError ( ) { } } | Request req = getRequest ( ) ; return req != null && req . getAttribute ( ACTION_ERROR_KEY ) != null ; |
public class AbstractWebPageForm { /** * Create toolbar for viewing an existing object . Contains the back button and
* the edit button .
* @ param aWPEC
* The web page execution context . Never < code > null < / code > .
* @ param bCanGoBack
* < code > true < / code > to enable back button
* @ param aSelectedObject
* The selected object
* @ return Never < code > null < / code > . */
@ Nonnull @ OverrideOnDemand protected TOOLBAR_TYPE createViewToolbar ( @ Nonnull final WPECTYPE aWPEC , final boolean bCanGoBack , @ Nonnull final DATATYPE aSelectedObject ) { } } | final Locale aDisplayLocale = aWPEC . getDisplayLocale ( ) ; final TOOLBAR_TYPE aToolbar = createNewViewToolbar ( aWPEC ) ; if ( bCanGoBack ) { // Back to list
aToolbar . addButtonBack ( aDisplayLocale ) ; } if ( isActionAllowed ( aWPEC , EWebPageFormAction . EDIT , aSelectedObject ) ) { // Edit object
aToolbar . addButtonEdit ( aDisplayLocale , createEditURL ( aWPEC , aSelectedObject ) ) ; } // Callback
modifyViewToolbar ( aWPEC , aSelectedObject , aToolbar ) ; return aToolbar ; |
public class CmsEditSiteForm { /** * Checks if given Ou has resources matching to currently set parent folder . < p >
* @ param ou to check
* @ return true if ou is ok for parent folder */
private boolean ouIsOK ( CmsOrganizationalUnit ou ) { } } | try { for ( CmsResource res : OpenCms . getOrgUnitManager ( ) . getResourcesForOrganizationalUnit ( m_clonedCms , ou . getName ( ) ) ) { if ( m_simpleFieldParentFolderName . getValue ( ) . startsWith ( res . getRootPath ( ) ) ) { return true ; } } } catch ( CmsException e ) { LOG . error ( "Unable to read Resources for Org Unit" , e ) ; } return false ; |
public class sslfips { /** * Use this API to update sslfips . */
public static base_response update ( nitro_service client , sslfips resource ) throws Exception { } } | sslfips updateresource = new sslfips ( ) ; updateresource . inithsm = resource . inithsm ; updateresource . sopassword = resource . sopassword ; updateresource . oldsopassword = resource . oldsopassword ; updateresource . userpassword = resource . userpassword ; updateresource . hsmlabel = resource . hsmlabel ; return updateresource . update_resource ( client ) ; |
public class LineageEventBuilder { /** * Create a { @ link LineageEventBuilder } from a { @ link GobblinEventBuilder } . An inverse function
* to { @ link LineageEventBuilder # build ( ) } */
public static LineageEventBuilder fromEvent ( GobblinTrackingEvent event ) { } } | Map < String , String > metadata = event . getMetadata ( ) ; LineageEventBuilder lineageEvent = new LineageEventBuilder ( event . getName ( ) ) ; metadata . forEach ( ( key , value ) -> { switch ( key ) { case SOURCE : lineageEvent . setSource ( Descriptor . fromJson ( value ) ) ; break ; case DESTINATION : lineageEvent . setDestination ( Descriptor . fromJson ( value ) ) ; break ; default : lineageEvent . addMetadata ( key , value ) ; break ; } } ) ; return lineageEvent ; |
public class AuthTokenManager { /** * Implementation of PBKDF2WithHmacSHA1 */
private static boolean validateToken ( String originalToken , String storedToken ) throws NoSuchAlgorithmException , InvalidKeySpecException { } } | String [ ] parts = storedToken . split ( ":" ) ; int iterations = Integer . parseInt ( parts [ 0 ] ) ; byte [ ] salt = fromHex ( parts [ 1 ] ) ; byte [ ] hash = fromHex ( parts [ 2 ] ) ; PBEKeySpec spec = new PBEKeySpec ( originalToken . toCharArray ( ) , salt , iterations , hash . length * 8 ) ; SecretKeyFactory skf = SecretKeyFactory . getInstance ( "PBKDF2WithHmacSHA1" ) ; byte [ ] testHash = skf . generateSecret ( spec ) . getEncoded ( ) ; int diff = hash . length ^ testHash . length ; for ( int i = 0 ; i < hash . length && i < testHash . length ; i ++ ) { diff |= hash [ i ] ^ testHash [ i ] ; } return diff == 0 ; |
public class RouteBuilder { /** * Sets the targeted action method of the resulting route .
* @ param controller the controller object , must not be { @ literal null } .
* @ param method the method name , must not be { @ literal null } .
* @ return the current route builder */
public Route to ( Controller controller , String method ) { } } | Preconditions . checkNotNull ( controller ) ; Preconditions . checkNotNull ( method ) ; this . controller = controller ; try { this . controllerMethod = verifyThatControllerAndMethodExists ( controller . getClass ( ) , method ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( ERROR_CTRL + method + ERROR_IN + controller . getClass ( ) + "`, or the method is invalid" , e ) ; } return _build ( ) ; |
public class ReadCommittedStrategy { /** * acquire a read lock on Object obj for Transaction tx .
* @ param tx the transaction requesting the lock
* @ param obj the Object to be locked
* @ return true if successful , else false */
public boolean readLock ( TransactionImpl tx , Object obj ) { } } | LockEntry writer = getWriter ( obj ) ; if ( writer == null ) { addReader ( tx , obj ) ; // if there has been a successful write locking , try again
if ( getWriter ( obj ) == null ) return true ; else { removeReader ( tx , obj ) ; return readLock ( tx , obj ) ; } } if ( writer . isOwnedBy ( tx ) ) { return true ; // If I ' m the writer , I can read .
} else { return false ; } |
public class Permutations { /** * Generates the next permutation ( algorithm from Rosen p . 284)
* @ return an array containing the next permutation */
public Integer [ ] nextPermutation ( ) { } } | if ( permLeft . equals ( permTotal ) ) { permLeft = permLeft . subtract ( BigInteger . ONE ) ; return position ; } // Find largest index j with a [ j ] < a [ j + 1]
int j = position . length - 2 ; while ( position [ j ] > position [ j + 1 ] ) { j -- ; } // Find index k such that a [ k ] is smallest integer
// greater than a [ j ] to the right of a [ j ]
int k = position . length - 1 ; while ( position [ j ] > position [ k ] ) { k -- ; } // Interchange a [ j ] and a [ k ]
int temp ; temp = position [ k ] ; position [ k ] = position [ j ] ; position [ j ] = temp ; // Put tail end of permutation after j - th position in increasing order
int r = position . length - 1 ; int s = j + 1 ; while ( r > s ) { temp = position [ s ] ; position [ s ] = position [ r ] ; position [ r ] = temp ; r -- ; s ++ ; } permLeft = permLeft . subtract ( BigInteger . ONE ) ; return position ; |
public class ImportVolumeRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < ImportVolumeRequest > getDryRunRequest ( ) { } } | Request < ImportVolumeRequest > request = new ImportVolumeRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class FloatSubject { /** * Ensures that the given tolerance is a non - negative finite value , i . e . not { @ code Float . NaN } ,
* { @ code Float . POSITIVE _ INFINITY } , or negative , including { @ code - 0.0f } . */
static void checkTolerance ( float tolerance ) { } } | checkArgument ( ! Float . isNaN ( tolerance ) , "tolerance cannot be NaN" ) ; checkArgument ( tolerance >= 0.0f , "tolerance (%s) cannot be negative" , tolerance ) ; checkArgument ( floatToIntBits ( tolerance ) != NEG_ZERO_BITS , "tolerance (%s) cannot be negative" , tolerance ) ; checkArgument ( tolerance != Float . POSITIVE_INFINITY , "tolerance cannot be POSITIVE_INFINITY" ) ; |
public class LuceneHelper { /** * depends on document size , but > 0.01 should be a good threshold
* @ return a score that reflects if needle subsumes haystack */
public static float subsumes ( String haystack , final String needle ) { } } | if ( needle == null || needle . length ( ) < 0 || haystack == null || haystack . length ( ) < 0 ) return 0f ; try { // index the cleaned text
RAMDirectory idx = new RAMDirectory ( ) ; IndexWriterConfig iwc = new IndexWriterConfig ( LUCENE_41 , anal ) ; IndexWriter writer = new IndexWriter ( idx , iwc ) ; // index
Document doc = new Document ( ) ; doc . add ( new TextField ( CONTENT_FIELD , haystack , Store . YES ) ) ; writer . addDocument ( doc ) ; writer . close ( ) ; // search
IndexReader reader = open ( idx ) ; Query query = parser . parse ( getNGram ( needle , 3 ) ) ; TopDocs results = new IndexSearcher ( reader ) . search ( query , 1 ) ; float score = 0 ; for ( ScoreDoc hit : results . scoreDocs ) { score += hit . score ; } reader . close ( ) ; idx . close ( ) ; return score ; } catch ( Exception e ) { LOG . warn ( "could not computeSimilarity for " + needle , e ) ; } return 0f ; |
public class CmsImageCacheHolder { /** * Clones a CmsObject . < p >
* @ param cms the CmsObject to be cloned .
* @ return a clones CmsObject
* @ throws CmsException if something goes wrong */
private CmsObject getClonedCmsObject ( CmsObject cms ) throws CmsException { } } | CmsObject clonedCms = OpenCms . initCmsObject ( cms ) ; // only online images get caches
clonedCms . getRequestContext ( ) . setCurrentProject ( clonedCms . readProject ( CmsProject . ONLINE_PROJECT_ID ) ) ; // paths are always root path
clonedCms . getRequestContext ( ) . setSiteRoot ( "" ) ; return clonedCms ; |
public class TSDB { /** * Gets the entire given row from the data table . */
final Deferred < ArrayList < KeyValue > > get ( final byte [ ] key ) { } } | return client . get ( new GetRequest ( table , key , FAMILY ) ) ; |
public class SarlBehaviorUnitBuilderImpl { /** * Create the block of code .
* @ return the block builder . */
public IBlockExpressionBuilder getExpression ( ) { } } | IBlockExpressionBuilder block = this . blockExpressionProvider . get ( ) ; block . eInit ( getTypeResolutionContext ( ) ) ; XBlockExpression expr = block . getXBlockExpression ( ) ; this . sarlBehaviorUnit . setExpression ( expr ) ; return block ; |
public class TableLocation { /** * Change case of parameters to make it more user - friendly .
* @ param identifier Table , Catalog , Schema , or column name
* @ param isH2Database True if H2 , False if PostGreSQL , null if unknown
* @ return Upper or lower case version of identifier */
public static String capsIdentifier ( String identifier , Boolean isH2Database ) { } } | if ( isH2Database != null ) { if ( isH2Database ) { return identifier . toUpperCase ( ) ; } else { return identifier . toLowerCase ( ) ; } } else { return identifier ; } |
public class Scroller { /** * Call this when you want to know the new location . If it returns true ,
* the animation is not yet finished . loc will be altered to provide the
* new location . */
public boolean computeScrollOffset ( ) { } } | if ( mFinished ) { return false ; } int timePassed = ( int ) ( AnimationUtils . currentAnimationTimeMillis ( ) - mStartTime ) ; if ( timePassed < mDuration ) { switch ( mMode ) { case SCROLL_MODE : float x = timePassed * mDurationReciprocal ; if ( mInterpolator == null ) x = viscousFluid ( x ) ; else x = mInterpolator . getInterpolation ( x ) ; mCurrX = mStartX + Math . round ( x * mDeltaX ) ; mCurrY = mStartY + Math . round ( x * mDeltaY ) ; break ; case FLING_MODE : final float t = ( float ) timePassed / mDuration ; final int index = ( int ) ( NB_SAMPLES * t ) ; final float tInf = ( float ) index / NB_SAMPLES ; final float tSup = ( float ) ( index + 1 ) / NB_SAMPLES ; final float dInf = SPLINE [ index ] ; final float dSup = SPLINE [ index + 1 ] ; final float distanceCoef = dInf + ( t - tInf ) / ( tSup - tInf ) * ( dSup - dInf ) ; mCurrX = mStartX + Math . round ( distanceCoef * ( mFinalX - mStartX ) ) ; // Pin to mMinX < = mCurrX < = mMaxX
mCurrX = Math . min ( mCurrX , mMaxX ) ; mCurrX = Math . max ( mCurrX , mMinX ) ; mCurrY = mStartY + Math . round ( distanceCoef * ( mFinalY - mStartY ) ) ; // Pin to mMinY < = mCurrY < = mMaxY
mCurrY = Math . min ( mCurrY , mMaxY ) ; mCurrY = Math . max ( mCurrY , mMinY ) ; if ( mCurrX == mFinalX && mCurrY == mFinalY ) { mFinished = true ; } break ; } } else { mCurrX = mFinalX ; mCurrY = mFinalY ; mFinished = true ; } return true ; |
public class DescribeJobFlowsRequest { /** * Return only job flows whose state is contained in this list .
* @ param jobFlowStates
* Return only job flows whose state is contained in this list .
* @ see JobFlowExecutionState */
public void setJobFlowStates ( java . util . Collection < String > jobFlowStates ) { } } | if ( jobFlowStates == null ) { this . jobFlowStates = null ; return ; } this . jobFlowStates = new com . amazonaws . internal . SdkInternalList < String > ( jobFlowStates ) ; |
public class TreeRenderer { /** * Write out the images that create the leading indentation for the given node .
* @ param writer the appender where the node indentation images are appended
* @ param node the node to render
* @ param level the level or depth of the node within the tree
* @ param state the set of tree properties that are used to render the tree markup */
protected void renderIndentation ( AbstractRenderAppender writer , TreeElement node , int level , InheritableState state ) { } } | InternalStringBuilder img = new InternalStringBuilder ( 32 ) ; // Create the appropriate number of indents
// These are either the spacer . gif if the parent is the last in the line or the
// vertical line gif if the parent is not the last child .
_imgState . clear ( ) ; _imgState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , WIDTH , "16px" ) ; _imgState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , BORDER , "0" ) ; _imgState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , ALT , "" , false ) ; for ( int i = 0 ; i < level ; i ++ ) { int levels = level - i ; TreeElement parent = node ; for ( int j = 1 ; j <= levels ; j ++ ) { parent = parent . getParent ( ) ; } img . setLength ( 0 ) ; img . append ( state . getImageRoot ( ) ) ; img . append ( '/' ) ; if ( parent . isLast ( ) ) { renderSpacerPrefix ( writer , node ) ; img . append ( state . getImageSpacer ( ) ) ; _imgState . style = null ; } else { renderVerticalLinePrefix ( writer , node ) ; img . append ( state . getVerticalLineImage ( ) ) ; _imgState . style = "vertical-align:bottom;" ; } _imgState . src = img . toString ( ) ; _imageRenderer . doStartTag ( writer , _imgState ) ; _imageRenderer . doEndTag ( writer ) ; if ( parent . isLast ( ) ) { renderSpacerSuffix ( writer , node ) ; } else { renderVerticalLineSuffix ( writer , node ) ; } } |
public class UserBroker { /** * Returns the contents of an Excel row in the form of a String array .
* @ see com . ibm . ccd . common . parsing . Parser # splitLine ( ) */
public User addUser ( Set < User > users ) throws Exception { } } | if ( m_iCurrentRow == m_iNbRows ) return null ; Row row = m_sheet . getRow ( m_iCurrentRow ) ; if ( row == null ) { return null ; } else { // extract fields from excel sheet
String userName = extractCellData ( row , 0 ) ; String tweetBody = extractCellData ( row , 1 ) ; String videoId = extractCellData ( row , 2 ) ; String videoName = extractCellData ( row , 3 ) ; User user = null ; boolean present = false ; // iterate over fields .
for ( Iterator iterator = users . iterator ( ) ; iterator . hasNext ( ) ; ) { user = ( User ) iterator . next ( ) ; if ( user . getFirstName ( ) . equals ( userName ) ) { present = true ; break ; } } if ( ! present ) { user = addUser ( userName ) ; } Tweets tweet = addTweets ( user , tweetBody , new Date ( ) , m_iCurrentRow + "" ) ; assignVideosToTweet ( tweet , videoId , videoName ) ; m_iCurrentRow ++ ; return user ; } |
public class Parser { /** * Parse a new - expression , or if next token isn ' t { @ link Token # NEW } ,
* a primary expression .
* @ param allowCallSyntax passed down to { @ link # memberExprTail } */
private AstNode memberExpr ( boolean allowCallSyntax ) throws IOException { } } | int tt = peekToken ( ) , lineno = ts . lineno ; AstNode pn ; if ( tt != Token . NEW ) { pn = primaryExpr ( ) ; } else { consumeToken ( ) ; int pos = ts . tokenBeg ; NewExpression nx = new NewExpression ( pos ) ; AstNode target = memberExpr ( false ) ; int end = getNodeEnd ( target ) ; nx . setTarget ( target ) ; int lp = - 1 ; if ( matchToken ( Token . LP , true ) ) { lp = ts . tokenBeg ; List < AstNode > args = argumentList ( ) ; if ( args != null && args . size ( ) > ARGC_LIMIT ) reportError ( "msg.too.many.constructor.args" ) ; int rp = ts . tokenBeg ; end = ts . tokenEnd ; if ( args != null ) nx . setArguments ( args ) ; nx . setParens ( lp - pos , rp - pos ) ; } // Experimental syntax : allow an object literal to follow a new
// expression , which will mean a kind of anonymous class built with
// the JavaAdapter . the object literal will be passed as an
// additional argument to the constructor .
if ( matchToken ( Token . LC , true ) ) { ObjectLiteral initializer = objectLiteral ( ) ; end = getNodeEnd ( initializer ) ; nx . setInitializer ( initializer ) ; } nx . setLength ( end - pos ) ; pn = nx ; } pn . setLineno ( lineno ) ; AstNode tail = memberExprTail ( allowCallSyntax , pn ) ; return tail ; |
public class MinioClient { /** * Removes an object from a bucket .
* < / p > < b > Example : < / b > < br >
* < pre > { @ code minioClient . removeObject ( " my - bucketname " , " my - objectname " ) ; } < / pre >
* @ param bucketName Bucket name .
* @ param objectName Object name in the bucket .
* @ throws InvalidBucketNameException upon invalid bucket name is given
* @ throws NoSuchAlgorithmException
* upon requested algorithm was not found during signature calculation
* @ throws InsufficientDataException upon getting EOFException while reading given
* InputStream even before reading given length
* @ throws IOException upon connection error
* @ throws InvalidKeyException
* upon an invalid access key or secret key
* @ throws NoResponseException upon no response from server
* @ throws XmlPullParserException upon parsing response xml
* @ throws ErrorResponseException upon unsuccessful execution
* @ throws InternalException upon internal library error
* @ throws InvalidArgumentException upon invalid value is passed to a method . */
public void removeObject ( String bucketName , String objectName ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException , InvalidArgumentException { } } | if ( ( bucketName == null ) || ( bucketName . isEmpty ( ) ) ) { throw new InvalidArgumentException ( "bucket name cannot be empty" ) ; } if ( ( objectName == null ) || ( objectName . isEmpty ( ) ) ) { throw new InvalidArgumentException ( "object name cannot be empty" ) ; } executeDelete ( bucketName , objectName , null ) ; |
public class EscapableResourceIdSplitter { /** * Joins a list of elements into a resource id , escaping
* special characters if required .
* See { @ link # splitIdIntoElements ( String ) } for details .
* @ param elements a list of elements
* @ return the resource id */
@ Override public String joinElementsToId ( List < String > elements ) { } } | Preconditions . checkNotNull ( elements , "elements argument" ) ; StringBuilder sb = new StringBuilder ( ) ; for ( String el : elements ) { // Skip null elements
if ( el == null ) { continue ; } // Trim the elements and skip any empty ones
String trimmedEl = el . trim ( ) ; if ( trimmedEl . length ( ) < 1 ) { continue ; } trimmedEl = MATCH_BACKSLASH . matcher ( trimmedEl ) . replaceAll ( "\\\\\\\\" ) ; trimmedEl = MATCH_COLON . matcher ( trimmedEl ) . replaceAll ( "\\\\:" ) ; if ( sb . length ( ) > 0 ) { sb . append ( SEPARATOR ) ; } sb . append ( trimmedEl ) ; } return sb . toString ( ) ; |
public class AbsObAxis { /** * Atomizes an operand according to the rules specified in the XPath
* specification .
* @ param mOperand
* the operand to atomize
* @ return the atomized operand . ( always an atomic value ) */
private AtomicValue atomize ( final AbsAxis mOperand ) { } } | int type = getNode ( ) . getTypeKey ( ) ; AtomicValue atom ; if ( XPATH_10_COMP ) { atom = new AtomicValue ( ( ( ITreeValData ) getNode ( ) ) . getRawValue ( ) , getNode ( ) . getTypeKey ( ) ) ; } else { // unatomicType is cast to double
if ( type == NamePageHash . generateHashForString ( "xs:untypedAtomic" ) ) { type = NamePageHash . generateHashForString ( "xs:double" ) ; // TODO : throw error , of cast fails
} atom = new AtomicValue ( ( ( ITreeValData ) getNode ( ) ) . getRawValue ( ) , getNode ( ) . getTypeKey ( ) ) ; } // if ( ! XPATH _ 10 _ COMP & & operand . hasNext ( ) ) {
// throw new XPathError ( ErrorType . XPTY0004 ) ;
return atom ; |
public class JcsegServerConfig { /** * initialize it from the specified config file
* @ param configFile
* @ throws IOException */
public void resetFromFile ( String configFile ) throws IOException { } } | IStringBuffer isb = new IStringBuffer ( ) ; String line = null ; BufferedReader reader = new BufferedReader ( new FileReader ( configFile ) ) ; while ( ( line = reader . readLine ( ) ) != null ) { line = line . trim ( ) ; if ( line . equals ( "" ) ) continue ; if ( line . charAt ( 0 ) == '#' ) continue ; isb . append ( line ) . append ( '\n' ) ; line = null ; // let gc do its work
} globalConfig = new JSONObject ( isb . toString ( ) ) ; // let gc do its work
isb = null ; reader . close ( ) ; reader = null ; |
public class AbstractTurtleOBDAVisitor { /** * in case of more than two terms need to be concatted */
private ImmutableTerm getNestedConcat ( String str ) { } } | List < ImmutableTerm > terms ; terms = addToTermsList ( str ) ; if ( terms . size ( ) == 1 ) { return terms . get ( 0 ) ; } ImmutableFunctionalTerm f = termFactory . getImmutableFunctionalTerm ( ExpressionOperation . CONCAT , terms . get ( 0 ) , terms . get ( 1 ) ) ; for ( int j = 2 ; j < terms . size ( ) ; j ++ ) { f = termFactory . getImmutableFunctionalTerm ( ExpressionOperation . CONCAT , f , terms . get ( j ) ) ; } return f ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public IfcDuctFittingTypeEnum createIfcDuctFittingTypeEnumFromString ( EDataType eDataType , String initialValue ) { } } | IfcDuctFittingTypeEnum result = IfcDuctFittingTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class Gauge { /** * Defines the type of knob that will be used in the radial
* gauges . The values are STANDARD , PLAIN , METAL and FLAT .
* @ param TYPE */
public void setKnobType ( final KnobType TYPE ) { } } | if ( null == knobType ) { _knobType = null == TYPE ? KnobType . STANDARD : TYPE ; fireUpdateEvent ( RESIZE_EVENT ) ; } else { knobType . set ( TYPE ) ; } |
public class CShareableResource { /** * { @ inheritDoc }
* @ param i the model to use to inspect the VMs .
* @ return the set of VMs that cannot have their associated { @ link Preserve } constraint satisfy with regards
* to a possible { @ link Overbook } and single - node { @ link ResourceCapacity } constraint . */
@ Override public Set < VM > getMisPlacedVMs ( Instance i ) { } } | for ( SatConstraint c : i . getSatConstraints ( ) ) { if ( ! ( c instanceof ResourceRelated && ( ( ResourceRelated ) c ) . getResource ( ) . equals ( rc . getResourceIdentifier ( ) ) ) ) { continue ; } if ( c instanceof Preserve ) { VM v = c . getInvolvedVMs ( ) . iterator ( ) . next ( ) ; wantedAmount . put ( v , consumption ( v , ( ( Preserve ) c ) . getAmount ( ) ) ) ; } else if ( c instanceof Overbook ) { Node n = c . getInvolvedNodes ( ) . iterator ( ) . next ( ) ; wantedRatios . put ( n , ratio ( n , ( ( Overbook ) c ) . getRatio ( ) ) ) ; } else if ( c instanceof ResourceCapacity && c . getInvolvedNodes ( ) . size ( ) == 1 ) { Node n = c . getInvolvedNodes ( ) . iterator ( ) . next ( ) ; wantedCapacity . put ( n , capacity ( n , ( ( ResourceCapacity ) c ) . getAmount ( ) ) ) ; } } Mapping m = i . getModel ( ) . getMapping ( ) ; Set < VM > candidates = new HashSet < > ( ) ; for ( Node n : m . getOnlineNodes ( ) ) { if ( overloaded ( m , n ) ) { candidates . addAll ( m . getRunningVMs ( n ) ) ; } } return candidates ; |
public class CmsProperty { /** * Replaces the given delimiter character with the replacement string . < p >
* @ param value the string that is scanned
* @ param delimiter the delimiter character to replace
* @ param delimiterReplacement the replacement string for the delimiter character
* @ return the substituted string */
private String replaceDelimiter ( String value , char delimiter , String delimiterReplacement ) { } } | return CmsStringUtil . substitute ( value , String . valueOf ( delimiter ) , delimiterReplacement ) ; |
public class BaseDestinationDefinitionImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . admin . BaseDestinationDefinition # getUUID ( ) */
public SIBUuid12 getUUID ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getUUID" , this . _uuid ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , "getUUID" ) ; } return _uuid ; |
public class CmsExplorerTypeAccess { /** * Creates the access control list from the temporary map . < p >
* @ param resourceType the name of the resource type
* @ throws CmsException if something goes wrong */
public void createAccessControlList ( String resourceType ) throws CmsException { } } | if ( OpenCms . getRunLevel ( ) < OpenCms . RUNLEVEL_2_INITIALIZING ) { // we don ' t need this for simple test cases
return ; } if ( m_permissionsCache == null ) { m_permissionsCache = CmsMemoryMonitor . createLRUCacheMap ( 2048 ) ; OpenCms . getMemoryMonitor ( ) . register ( this . getClass ( ) . getName ( ) + "." + resourceType , m_permissionsCache ) ; } else { m_permissionsCache . clear ( ) ; } m_accessControlList = new CmsAccessControlList ( ) ; Iterator < String > i = m_accessControl . keySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { String key = i . next ( ) ; if ( ! PRINCIPAL_DEFAULT . equals ( key ) ) { String value = m_accessControl . get ( key ) ; // get the principal name from the principal String
String principal = key . substring ( key . indexOf ( '.' ) + 1 , key . length ( ) ) ; // create an OpenCms user context with " Guest " permissions
CmsObject cms = OpenCms . initCmsObject ( OpenCms . getDefaultUsers ( ) . getUserGuest ( ) ) ; CmsUUID principalId = null ; if ( key . startsWith ( I_CmsPrincipal . PRINCIPAL_GROUP ) ) { // read the group
principal = OpenCms . getImportExportManager ( ) . translateGroup ( principal ) ; try { principalId = cms . readGroup ( principal ) . getId ( ) ; } catch ( CmsException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } else if ( key . startsWith ( I_CmsPrincipal . PRINCIPAL_USER ) ) { // read the user
principal = OpenCms . getImportExportManager ( ) . translateUser ( principal ) ; try { principalId = cms . readUser ( principal ) . getId ( ) ; } catch ( CmsException e ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } else { // read the role with role name
CmsRole role = CmsRole . valueOfRoleName ( principal ) ; if ( role == null ) { // try to read the role in the old fashion with group name
role = CmsRole . valueOfGroupName ( principal ) ; } principalId = role . getId ( ) ; } if ( principalId != null ) { // create a new entry for the principal
CmsAccessControlEntry entry = new CmsAccessControlEntry ( null , principalId , value ) ; m_accessControlList . add ( entry ) ; } } } |
public class ModifyCacheClusterRequest { /** * The list of Availability Zones where the new Memcached cache nodes are created .
* This parameter is only valid when < code > NumCacheNodes < / code > in the request is greater than the sum of the number
* of active cache nodes and the number of cache nodes pending creation ( which may be zero ) . The number of
* Availability Zones supplied in this list must match the cache nodes being added in this request .
* This option is only supported on Memcached clusters .
* Scenarios :
* < ul >
* < li >
* < b > Scenario 1 : < / b > You have 3 active nodes and wish to add 2 nodes . Specify < code > NumCacheNodes = 5 < / code > ( 3 + 2)
* and optionally specify two Availability Zones for the two new nodes .
* < / li >
* < li >
* < b > Scenario 2 : < / b > You have 3 active nodes and 2 nodes pending creation ( from the scenario 1 call ) and want to
* add 1 more node . Specify < code > NumCacheNodes = 6 < / code > ( ( 3 + 2 ) + 1 ) and optionally specify an Availability Zone
* for the new node .
* < / li >
* < li >
* < b > Scenario 3 : < / b > You want to cancel all pending operations . Specify < code > NumCacheNodes = 3 < / code > to cancel all
* pending operations .
* < / li >
* < / ul >
* The Availability Zone placement of nodes pending creation cannot be modified . If you wish to cancel any nodes
* pending creation , add 0 nodes by setting < code > NumCacheNodes < / code > to the number of current nodes .
* If < code > cross - az < / code > is specified , existing Memcached nodes remain in their current Availability Zone . Only
* newly created nodes can be located in different Availability Zones . For guidance on how to move existing
* Memcached nodes to different Availability Zones , see the < b > Availability Zone Considerations < / b > section of < a
* href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / mem - ug / CacheNode . Memcached . html " > Cache Node
* Considerations for Memcached < / a > .
* < b > Impact of new add / remove requests upon pending requests < / b >
* < ul >
* < li >
* Scenario - 1
* < ul >
* < li >
* Pending Action : Delete
* < / li >
* < li >
* New Request : Delete
* < / li >
* < li >
* Result : The new delete , pending or immediate , replaces the pending delete .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 2
* < ul >
* < li >
* Pending Action : Delete
* < / li >
* < li >
* New Request : Create
* < / li >
* < li >
* Result : The new create , pending or immediate , replaces the pending delete .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 3
* < ul >
* < li >
* Pending Action : Create
* < / li >
* < li >
* New Request : Delete
* < / li >
* < li >
* Result : The new delete , pending or immediate , replaces the pending create .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 4
* < ul >
* < li >
* Pending Action : Create
* < / li >
* < li >
* New Request : Create
* < / li >
* < li >
* Result : The new create is added to the pending create .
* < important >
* < b > Important : < / b > If the new create request is < b > Apply Immediately - Yes < / b > , all creates are performed
* immediately . If the new create request is < b > Apply Immediately - No < / b > , all creates are pending .
* < / important > < / li >
* < / ul >
* < / li >
* < / ul >
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setNewAvailabilityZones ( java . util . Collection ) } or { @ link # withNewAvailabilityZones ( java . util . Collection ) }
* if you want to override the existing values .
* @ param newAvailabilityZones
* The list of Availability Zones where the new Memcached cache nodes are created . < / p >
* This parameter is only valid when < code > NumCacheNodes < / code > in the request is greater than the sum of the
* number of active cache nodes and the number of cache nodes pending creation ( which may be zero ) . The
* number of Availability Zones supplied in this list must match the cache nodes being added in this request .
* This option is only supported on Memcached clusters .
* Scenarios :
* < ul >
* < li >
* < b > Scenario 1 : < / b > You have 3 active nodes and wish to add 2 nodes . Specify < code > NumCacheNodes = 5 < / code >
* ( 3 + 2 ) and optionally specify two Availability Zones for the two new nodes .
* < / li >
* < li >
* < b > Scenario 2 : < / b > You have 3 active nodes and 2 nodes pending creation ( from the scenario 1 call ) and
* want to add 1 more node . Specify < code > NumCacheNodes = 6 < / code > ( ( 3 + 2 ) + 1 ) and optionally specify an
* Availability Zone for the new node .
* < / li >
* < li >
* < b > Scenario 3 : < / b > You want to cancel all pending operations . Specify < code > NumCacheNodes = 3 < / code > to
* cancel all pending operations .
* < / li >
* < / ul >
* The Availability Zone placement of nodes pending creation cannot be modified . If you wish to cancel any
* nodes pending creation , add 0 nodes by setting < code > NumCacheNodes < / code > to the number of current nodes .
* If < code > cross - az < / code > is specified , existing Memcached nodes remain in their current Availability Zone .
* Only newly created nodes can be located in different Availability Zones . For guidance on how to move
* existing Memcached nodes to different Availability Zones , see the < b > Availability Zone Considerations < / b >
* section of < a
* href = " http : / / docs . aws . amazon . com / AmazonElastiCache / latest / mem - ug / CacheNode . Memcached . html " > Cache Node
* Considerations for Memcached < / a > .
* < b > Impact of new add / remove requests upon pending requests < / b >
* < ul >
* < li >
* Scenario - 1
* < ul >
* < li >
* Pending Action : Delete
* < / li >
* < li >
* New Request : Delete
* < / li >
* < li >
* Result : The new delete , pending or immediate , replaces the pending delete .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 2
* < ul >
* < li >
* Pending Action : Delete
* < / li >
* < li >
* New Request : Create
* < / li >
* < li >
* Result : The new create , pending or immediate , replaces the pending delete .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 3
* < ul >
* < li >
* Pending Action : Create
* < / li >
* < li >
* New Request : Delete
* < / li >
* < li >
* Result : The new delete , pending or immediate , replaces the pending create .
* < / li >
* < / ul >
* < / li >
* < li >
* Scenario - 4
* < ul >
* < li >
* Pending Action : Create
* < / li >
* < li >
* New Request : Create
* < / li >
* < li >
* Result : The new create is added to the pending create .
* < important >
* < b > Important : < / b > If the new create request is < b > Apply Immediately - Yes < / b > , all creates are performed
* immediately . If the new create request is < b > Apply Immediately - No < / b > , all creates are pending .
* < / important > < / li >
* < / ul >
* < / li >
* @ return Returns a reference to this object so that method calls can be chained together . */
public ModifyCacheClusterRequest withNewAvailabilityZones ( String ... newAvailabilityZones ) { } } | if ( this . newAvailabilityZones == null ) { setNewAvailabilityZones ( new com . amazonaws . internal . SdkInternalList < String > ( newAvailabilityZones . length ) ) ; } for ( String ele : newAvailabilityZones ) { this . newAvailabilityZones . add ( ele ) ; } return this ; |
public class Generics { /** * Finds the type parameter for the given class which is assignable to the bound class .
* @ param klass a parameterized class
* @ param bound the type bound
* @ param < T > the type bound
* @ return the class ' s type parameter */
public static < T > Class < T > getTypeParameter ( Class < ? > klass , Class < ? super T > bound ) { } } | Type t = requireNonNull ( klass ) ; while ( t instanceof Class < ? > ) { t = ( ( Class < ? > ) t ) . getGenericSuperclass ( ) ; } /* This is not guaranteed to work for all cases with convoluted piping
* of type parameters : but it can at least resolve straight - forward
* extension with single type parameter ( as per [ Issue - 89 ] ) .
* And when it fails to do that , will indicate with specific exception . */
if ( t instanceof ParameterizedType ) { // should typically have one of type parameters ( first one ) that matches :
for ( Type param : ( ( ParameterizedType ) t ) . getActualTypeArguments ( ) ) { if ( param instanceof Class < ? > ) { final Class < T > cls = determineClass ( bound , param ) ; if ( cls != null ) { return cls ; } } else if ( param instanceof TypeVariable ) { for ( Type paramBound : ( ( TypeVariable < ? > ) param ) . getBounds ( ) ) { if ( paramBound instanceof Class < ? > ) { final Class < T > cls = determineClass ( bound , paramBound ) ; if ( cls != null ) { return cls ; } } } } else if ( param instanceof ParameterizedType ) { final Type rawType = ( ( ParameterizedType ) param ) . getRawType ( ) ; if ( rawType instanceof Class < ? > ) { final Class < T > cls = determineClass ( bound , rawType ) ; if ( cls != null ) { return cls ; } } } } } throw new IllegalStateException ( "Cannot figure out type parameterization for " + klass . getName ( ) ) ; |
public class WSelectToggleRenderer { /** * Paints the given WSelectToggle .
* @ param component the WSelectToggle to paint .
* @ param renderContext the RenderContext to paint to . */
@ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } } | WSelectToggle toggle = ( WSelectToggle ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; xml . appendTagOpen ( "ui:selecttoggle" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; State state = toggle . getState ( ) ; if ( State . ALL . equals ( state ) ) { xml . appendAttribute ( "selected" , "all" ) ; } else if ( State . NONE . equals ( state ) ) { xml . appendAttribute ( "selected" , "none" ) ; } else { xml . appendAttribute ( "selected" , "some" ) ; } xml . appendOptionalAttribute ( "disabled" , toggle . isDisabled ( ) , "true" ) ; xml . appendAttribute ( "target" , toggle . getTarget ( ) . getId ( ) ) ; xml . appendAttribute ( "renderAs" , toggle . isRenderAsText ( ) ? "text" : "control" ) ; xml . appendOptionalAttribute ( "roundTrip" , ! toggle . isClientSide ( ) , "true" ) ; xml . appendEnd ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.