signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class JobClient { /** * 取消任务 */
public Response cancelJob ( String taskId , String taskTrackerNodeGroup ) { } }
|
checkStart ( ) ; final Response response = new Response ( ) ; Assert . hasText ( taskId , "taskId can not be empty" ) ; Assert . hasText ( taskTrackerNodeGroup , "taskTrackerNodeGroup can not be empty" ) ; JobCancelRequest request = CommandBodyWrapper . wrapper ( appContext , new JobCancelRequest ( ) ) ; request . setTaskId ( taskId ) ; request . setTaskTrackerNodeGroup ( taskTrackerNodeGroup ) ; RemotingCommand requestCommand = RemotingCommand . createRequestCommand ( JobProtos . RequestCode . CANCEL_JOB . code ( ) , request ) ; try { RemotingCommand remotingResponse = remotingClient . invokeSync ( requestCommand ) ; if ( JobProtos . ResponseCode . JOB_CANCEL_SUCCESS . code ( ) == remotingResponse . getCode ( ) ) { LOGGER . info ( "Cancel job success taskId={}, taskTrackerNodeGroup={} " , taskId , taskTrackerNodeGroup ) ; response . setSuccess ( true ) ; return response ; } response . setSuccess ( false ) ; response . setCode ( JobProtos . ResponseCode . valueOf ( remotingResponse . getCode ( ) ) . name ( ) ) ; response . setMsg ( remotingResponse . getRemark ( ) ) ; LOGGER . warn ( "Cancel job failed: taskId={}, taskTrackerNodeGroup={}, msg={}" , taskId , taskTrackerNodeGroup , remotingResponse . getRemark ( ) ) ; return response ; } catch ( JobTrackerNotFoundException e ) { response . setSuccess ( false ) ; response . setCode ( ResponseCode . JOB_TRACKER_NOT_FOUND ) ; response . setMsg ( "Can not found JobTracker node!" ) ; return response ; }
|
public class CipherUtils { /** * converts a byte [ ] that originally was created using { @ link PrivateKey # getEncoded ( ) } back to the corresponding
* instance .
* Example : CipherUtils . deserializePrivateKey ( data , " RSA " ) */
public static PrivateKey deserializePrivateKey ( byte [ ] keyData , String algorithm ) { } }
|
LOGGER . trace ( "deserialize private key from data using algorithm \"{}\"" , algorithm ) ; PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec ( keyData ) ; try { KeyFactory keyFactory = KeyFactory . getInstance ( algorithm ) ; return keyFactory . generatePrivate ( privSpec ) ; } catch ( GeneralSecurityException e ) { throw new IllegalArgumentException ( "provided data could not be converted to a PrivateKey for algorithm " + algorithm , e ) ; }
|
public class Assets { /** * Asynchronously loads and returns the image at the specified URL . The width and height of the
* image will be the supplied { @ code width } and { @ code height } until the image is loaded .
* < em > Note : < / em > on non - HTML platforms , this spawns a new thread for each loaded image . Thus ,
* attempts to load large numbers of remote images simultaneously may result in poor performance . */
public Image getRemoteImage ( String url , int width , int height ) { } }
|
Exception error = new Exception ( "Remote image loading not yet supported: " + url + "@" + width + "x" + height ) ; ImageImpl image = createImage ( false , width , height , url ) ; image . fail ( error ) ; return image ;
|
public class GetUsagePlanKeysResult { /** * The current page of elements from this collection .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the
* existing values .
* @ param items
* The current page of elements from this collection .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetUsagePlanKeysResult withItems ( UsagePlanKey ... items ) { } }
|
if ( this . items == null ) { setItems ( new java . util . ArrayList < UsagePlanKey > ( items . length ) ) ; } for ( UsagePlanKey ele : items ) { this . items . add ( ele ) ; } return this ;
|
public class MCWrapper { /** * This method is used for marking a connection to destroy .
* The connection state does not matter . The connection still
* can be useable . When the connection is returned to the
* free pool , this connection will be cleaned up and destroyed .
* This method may be called when total connection count is being
* decreased . */
@ Override public void setDestroyConnectionOnReturn ( ) { } }
|
final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "setDestroyConnectionOnReturn" ) ; } -- fatalErrorValue ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "setDestroyConnectionOnReturn" , fatalErrorValue ) ; }
|
public class Solo { /** * Clicks the specified item index and returns an ArrayList of the TextView objects that
* the item index is displaying . Will use the first RecyclerView it finds .
* @ param itemIndex the item index to click
* @ return an { @ code ArrayList } of the { @ link TextView } objects located in the item index */
public ArrayList < TextView > clickInRecyclerView ( int itemIndex ) { } }
|
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "clickInRecyclerView(" + itemIndex + ")" ) ; } return clicker . clickInRecyclerView ( itemIndex ) ;
|
public class NGAExtensions { /** * Delete all NGA extensions including custom extension tables for the
* GeoPackage
* @ param geoPackage
* GeoPackage */
public static void deleteExtensions ( GeoPackageCore geoPackage ) { } }
|
deleteGeometryIndexExtension ( geoPackage ) ; deleteFeatureTileLinkExtension ( geoPackage ) ; deleteTileScalingExtension ( geoPackage ) ; deletePropertiesExtension ( geoPackage ) ; deleteFeatureStyleExtension ( geoPackage ) ; deleteContentsIdExtension ( geoPackage ) ; // Delete future extension tables here
|
public class GenericLogicDiscoverer { /** * Discover registered operations that consume all the types of input provided . That is , all those that have as input
* the types provided . All the input types should be matched to different inputs .
* @ param inputTypes the types of input to be consumed
* @ return a Set containing all the matching operations . If there are no solutions , the Set should be empty , not null . */
@ Override public Map < URI , MatchResult > findOperationsConsumingAll ( Set < URI > inputTypes ) { } }
|
return findOperationsConsumingAll ( inputTypes , LogicConceptMatchType . Plugin ) ;
|
public class AbstractHasTopLevelRules { /** * Get a list of all top - level rules that are support rules ( implementing
* { @ link CSSSupportsRule } ) .
* @ return A copy of all contained < code > @ supports < / code > rules . Never
* < code > null < / code > . */
@ Nonnull @ ReturnsMutableCopy public ICommonsList < CSSSupportsRule > getAllSupportsRules ( ) { } }
|
return m_aRules . getAllMapped ( r -> r instanceof CSSSupportsRule , r -> ( CSSSupportsRule ) r ) ;
|
public class CollectionUtils { /** * Adapts the { @ link Enumeration } into an instance of the { @ link Iterable } interface .
* @ param < T > Class type of the elements in the { @ link Enumeration } .
* @ param enumeration { @ link Enumeration } to adapt into an { @ link Iterable } .
* @ return an { @ link Iterable } implementation backed by the { @ link Enumeration } .
* @ see # asIterator ( java . util . Enumeration )
* @ see java . util . Enumeration
* @ see java . lang . Iterable */
@ NullSafe public static < T > Iterable < T > asIterable ( Enumeration < T > enumeration ) { } }
|
return ( ) -> asIterator ( enumeration ) ;
|
public class AgentManifestReader { /** * Read all the specific manifest files and return the set of packages containing type query beans . */
private AgentManifestReader readManifests ( ClassLoader classLoader , String path ) throws IOException { } }
|
Enumeration < URL > resources = classLoader . getResources ( path ) ; while ( resources . hasMoreElements ( ) ) { URL url = resources . nextElement ( ) ; try { addResource ( url . openStream ( ) ) ; } catch ( IOException e ) { System . err . println ( "Error reading manifest resources " + url ) ; e . printStackTrace ( ) ; } } return this ;
|
public class DefaultXMLReader { /** * / * ( non - Javadoc )
* @ see tuwien . auto . calimero . xml . XMLReader # close ( ) */
public void close ( ) throws KNXMLException { } }
|
if ( closeReader ) try { r . close ( ) ; } catch ( final IOException e ) { throw new KNXMLException ( e . getMessage ( ) ) ; }
|
public class KTypeHashSet { /** * / * # if ( $ TemplateOptions . KTypeGeneric ) */
@ SafeVarargs /* # end */
public final int addAll ( KType ... elements ) { } }
|
ensureCapacity ( elements . length ) ; int count = 0 ; for ( KType e : elements ) { if ( add ( e ) ) { count ++ ; } } return count ;
|
public class PdfDocument { /** * Returns the bottomvalue of a < CODE > Table < / CODE > if it were added to this document .
* @ paramtablethe table that may or may not be added to this document
* @ returna bottom value */
float bottom ( Table table ) { } }
|
// constructing a PdfTable
PdfTable tmp = new PdfTable ( table , indentLeft ( ) , indentRight ( ) , indentTop ( ) - currentHeight ) ; return tmp . getBottom ( ) ;
|
public class CmsLocaleGroup { /** * Gets the list of all resources of this group ( primary and secondary ) . < p >
* @ return the list of all resources of this group */
public List < CmsResource > getAllResources ( ) { } }
|
List < CmsResource > result = Lists . newArrayList ( ) ; result . add ( m_primaryResource ) ; for ( CmsResource res : getSecondaryResources ( ) ) { result . add ( res ) ; } return result ;
|
public class HiveRegistrationUnitComparator { /** * Compare all parameters . */
@ SuppressWarnings ( "unchecked" ) public T compareAll ( ) { } }
|
this . compareInputFormat ( ) . compareOutputFormat ( ) . compareIsCompressed ( ) . compareIsStoredAsSubDirs ( ) . compareNumBuckets ( ) . compareBucketCols ( ) . compareRawLocation ( ) . compareParameters ( ) ; return ( T ) this ;
|
public class SpatialReferenceSystemDao { /** * { @ inheritDoc } */
@ Override public SpatialReferenceSystem queryForId ( Long id ) throws SQLException { } }
|
SpatialReferenceSystem srs = super . queryForId ( id ) ; setDefinition_12_063 ( srs ) ; return srs ;
|
public class CFCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setRetired1 ( Integer newRetired1 ) { } }
|
Integer oldRetired1 = retired1 ; retired1 = newRetired1 ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . CFC__RETIRED1 , oldRetired1 , retired1 ) ) ;
|
public class JsonBuilder { /** * Add a number to the object .
* @ param key key
* @ param n value
* @ return the builder */
public @ Nonnull JsonBuilder put ( String key , Number n ) { } }
|
object . put ( key , primitive ( n ) ) ; return this ;
|
public class FxFlowableTransformers { /** * Performs an action on FX thread on onCompleted with the provided emission count
* @ param onComplete
* @ param < T > */
public static < T > FlowableTransformer < T , T > doOnCompleteCountFx ( Consumer < Integer > onComplete ) { } }
|
return obs -> obs . compose ( doOnCompleteCount ( i -> runOnFx ( i , onComplete ) ) ) ;
|
public class GitHub { /** * Get matching paths found in given base directory
* @ param includes
* @ param excludes
* @ param baseDir
* @ return non - null but possibly empty array of string paths relative to the
* base directory */
public static String [ ] getMatchingPaths ( final String [ ] includes , final String [ ] excludes , final String baseDir ) { } }
|
DirectoryScanner scanner = new DirectoryScanner ( ) ; scanner . setBasedir ( baseDir ) ; if ( includes != null && includes . length > 0 ) { scanner . setIncludes ( includes ) ; } if ( excludes != null && excludes . length > 0 ) { scanner . setExcludes ( excludes ) ; } scanner . scan ( ) ; return scanner . getIncludedFiles ( ) ;
|
public class TitlePaneIconifyButtonPainter { /** * Paint the background pressed state .
* @ param g the Graphics2D context to paint with .
* @ param c the component .
* @ param width the width of the component .
* @ param height the height of the component . */
private void paintBackgroundPressed ( Graphics2D g , JComponent c , int width , int height ) { } }
|
paintBackground ( g , c , width , height , pressed ) ;
|
public class DataSinkNode { @ Override public List < PlanNode > getAlternativePlans ( CostEstimator estimator ) { } }
|
// check if we have a cached version
if ( this . cachedPlans != null ) { return this . cachedPlans ; } // calculate alternative sub - plans for predecessor
List < ? extends PlanNode > subPlans = getPredecessorNode ( ) . getAlternativePlans ( estimator ) ; List < PlanNode > outputPlans = new ArrayList < PlanNode > ( ) ; final int parallelism = getParallelism ( ) ; final int inDop = getPredecessorNode ( ) . getParallelism ( ) ; final ExecutionMode executionMode = this . input . getDataExchangeMode ( ) ; final boolean dopChange = parallelism != inDop ; final boolean breakPipeline = this . input . isBreakingPipeline ( ) ; InterestingProperties ips = this . input . getInterestingProperties ( ) ; for ( PlanNode p : subPlans ) { for ( RequestedGlobalProperties gp : ips . getGlobalProperties ( ) ) { for ( RequestedLocalProperties lp : ips . getLocalProperties ( ) ) { Channel c = new Channel ( p ) ; gp . parameterizeChannel ( c , dopChange , executionMode , breakPipeline ) ; lp . parameterizeChannel ( c ) ; c . setRequiredLocalProps ( lp ) ; c . setRequiredGlobalProps ( gp ) ; // no need to check whether the created properties meet what we need in case
// of ordering or global ordering , because the only interesting properties we have
// are what we require
outputPlans . add ( new SinkPlanNode ( this , "DataSink (" + this . getOperator ( ) . getName ( ) + ")" , c ) ) ; } } } // cost and prune the plans
for ( PlanNode node : outputPlans ) { estimator . costOperator ( node ) ; } prunePlanAlternatives ( outputPlans ) ; this . cachedPlans = outputPlans ; return outputPlans ;
|
public class SDBaseOps { /** * Reshape the input variable to the specified ( fixed ) shape . The output variable will have the same values as the
* input , but with the specified shape . < br >
* Note that prod ( shape ) must match length ( input ) = = prod ( input . shape )
* @ param name Output variable name
* @ param x Input variable
* @ param shape New shape for variable
* @ return Output variable
* @ see # reshape ( SDVariable , SDVariable ) */
public SDVariable reshape ( String name , SDVariable x , long ... shape ) { } }
|
SDVariable result = f ( ) . reshape ( x , shape ) ; return updateVariableNameAndReference ( result , name ) ;
|
public class LinkedPredict { /** * 合并重复标签 , 有问题 ( 未排序 ) */
public void mergeDuplicate ( ) { } }
|
for ( int i = 0 ; i < labels . size ( ) ; i ++ ) for ( int j = i + 1 ; j < labels . size ( ) ; j ++ ) { T tagi = labels . get ( i ) ; T tagj = labels . get ( j ) ; if ( tagi . equals ( tagj ) ) { scores . set ( i , scores . get ( i ) + scores . get ( j ) ) ; labels . remove ( j ) ; scores . remove ( j ) ; j -- ; } }
|
public class QuickHull3D { /** * Triangulates any non - triangular hull faces . In some cases , due to
* precision issues , the resulting triangles may be very thin or small , and
* hence appear to be non - convex ( this same limitation is present in < a
* href = http : / / www . qhull . org > qhull < / a > ) . */
public void triangulate ( ) { } }
|
double minArea = 1000 * charLength * DOUBLE_PREC ; newFaces . clear ( ) ; for ( Iterator it = faces . iterator ( ) ; it . hasNext ( ) ; ) { Face face = ( Face ) it . next ( ) ; if ( face . mark == Face . VISIBLE ) { face . triangulate ( newFaces , minArea ) ; // splitFace ( face ) ;
} } for ( Face face = newFaces . first ( ) ; face != null ; face = face . next ) { faces . add ( face ) ; }
|
public class Query { /** * < pre >
* { array : < array > , contains : < op > , values : [ values ] }
* < / pre > */
public static Query arrayContains ( String array , ArrOp op , Literal ... values ) { } }
|
Query q = new Query ( false ) ; q . add ( "array" , array ) . add ( "contains" , op . toString ( ) ) . add ( "values" , Literal . toJson ( values ) ) ; return q ;
|
public class CloudMe { /** * Recursive method that parses folders XML and builds CMFolder structure .
* @ param element
* @ param cmFolder */
private void scanFolderLevel ( Element element , CMFolder cmFolder ) { } }
|
NodeList nodeList = element . getChildNodes ( ) ; for ( int i = 0 ; i < nodeList . getLength ( ) ; i ++ ) { Node currentNode = nodeList . item ( i ) ; if ( currentNode . getNodeType ( ) != Node . ELEMENT_NODE ) { continue ; } Element currentElement = ( Element ) currentNode ; if ( ! currentElement . getLocalName ( ) . equals ( "folder" ) ) { continue ; } // calls this method for all the children which is Element
CMFolder childFolder = cmFolder . addChild ( currentElement . getAttribute ( "id" ) , currentElement . getAttribute ( "name" ) ) ; scanFolderLevel ( currentElement , childFolder ) ; }
|
public class ClientBeanDefinitionParser { /** * Managing common properties for TransportClient */
static BeanDefinitionBuilder startClientBuilder ( Class beanClass , String properties , boolean forceMapping , boolean forceTemplate , boolean mergeMapping , boolean mergeSettings , boolean autoscan , String classpathRoot , String mappings , String aliases , String templates , String async , String taskExecutor ) { } }
|
BeanDefinitionBuilder nodeFactory = BeanDefinitionBuilder . rootBeanDefinition ( beanClass ) ; if ( properties != null && properties . length ( ) > 0 ) { nodeFactory . addPropertyReference ( "properties" , properties ) ; } nodeFactory . addPropertyValue ( "forceMapping" , forceMapping ) ; nodeFactory . addPropertyValue ( "forceTemplate" , forceTemplate ) ; nodeFactory . addPropertyValue ( "mergeMapping" , mergeMapping ) ; nodeFactory . addPropertyValue ( "mergeSettings" , mergeSettings ) ; nodeFactory . addPropertyValue ( "autoscan" , autoscan ) ; if ( classpathRoot != null && classpathRoot . length ( ) > 0 ) { nodeFactory . addPropertyValue ( "classpathRoot" , classpathRoot ) ; } if ( mappings != null && mappings . length ( ) > 0 ) { nodeFactory . addPropertyValue ( "mappings" , mappings ) ; } if ( aliases != null && aliases . length ( ) > 0 ) { nodeFactory . addPropertyValue ( "aliases" , aliases ) ; } if ( templates != null && templates . length ( ) > 0 ) { nodeFactory . addPropertyValue ( "templates" , templates ) ; } if ( async != null && async . length ( ) > 0 ) { nodeFactory . addPropertyValue ( "async" , async ) ; } if ( taskExecutor != null && taskExecutor . length ( ) > 0 ) { nodeFactory . addPropertyReference ( "taskExecutor" , taskExecutor ) ; } return nodeFactory ;
|
public class Reflecter { /** * Returns a new Reflecter instance
* @ param target
* @ return */
@ SuppressWarnings ( "unchecked" ) public static < T > Reflecter < T > from ( T target ) { } }
|
return Decisions . isClass ( ) . apply ( target ) ? ( ( ( Class < T > ) target ) . isArray ( ) ? ( Reflecter < T > ) from ( ObjectArrays . newArray ( ( ( Class < T > ) target ) . getComponentType ( ) , 0 ) ) : from ( ( Class < T > ) target ) ) : new Reflecter < T > ( target ) ;
|
public class SDEFmethods { /** * Another method to force an input string into a fixed width field
* and set it on the right with the left side filled with space ' ' characters .
* @ param input input string
* @ param width required width
* @ return formatted string */
public static String rset ( String input , int width ) { } }
|
String result ; // result to return
StringBuilder pad = new StringBuilder ( ) ; if ( input == null ) { for ( int i = 0 ; i < width - 1 ; i ++ ) { pad . append ( ' ' ) ; // put blanks into buffer
} result = " " + pad ; // one short to use + overload
} else { if ( input . length ( ) >= width ) { result = input . substring ( 0 , width ) ; // when input is too long , truncate
} else { int padLength = width - input . length ( ) ; // number of blanks to add
for ( int i = 0 ; i < padLength ; i ++ ) { pad . append ( ' ' ) ; // actually put blanks into buffer
} result = pad + input ; // concatenate
} } return result ;
|
public class PushApplicationEndpoint { /** * Count Push Applications
* @ param pushApplicationID id of { @ link PushApplication }
* @ return count number for each { @ link org . jboss . aerogear . unifiedpush . api . VariantType } */
@ GET @ Path ( "/{pushAppID}/count" ) @ Produces ( MediaType . APPLICATION_JSON ) public Response countInstallations ( @ PathParam ( "pushAppID" ) String pushApplicationID ) { } }
|
logger . trace ( "counting devices by type for push application '{}'" , pushApplicationID ) ; Map < String , Long > result = pushAppService . countInstallationsByType ( pushApplicationID ) ; return Response . ok ( result ) . build ( ) ;
|
public class ToStringStyle { /** * < p > Append to the < code > toString < / code > an < code > Object < / code >
* value that has been detected to participate in a cycle . This
* implementation will print the standard string value of the value . < / p >
* @ param buffer the < code > StringBuffer < / code > to populate
* @ param fieldName the field name , typically not used as already appended
* @ param value the value to add to the < code > toString < / code > ,
* not < code > null < / code >
* @ since 2.2 */
protected void appendCyclicObject ( final StringBuffer buffer , final String fieldName , final Object value ) { } }
|
ObjectUtils . identityToString ( buffer , value ) ;
|
public class TransformedMirage { /** * documentation inherited from interface Mirage */
public void paint ( Graphics2D gfx , int x , int y ) { } }
|
AffineTransform otrans = gfx . getTransform ( ) ; gfx . translate ( x , y ) ; gfx . transform ( _transform ) ; _base . paint ( gfx , 0 , 0 ) ; gfx . setTransform ( otrans ) ;
|
public class CATBifurcatedConsumer { /** * This method will unlock a set of locked messages that have been delivered to
* us ( the server ) which we have then passed on to the client .
* @ param requestNumber The request number that replies should be sent with .
* @ param msgIds The array of message id ' s that should be unlocked .
* @ param reply Whether this will demand a reply . */
@ Override public void unlockSet ( int requestNumber , SIMessageHandle [ ] msgHandles , boolean reply ) // f199593 , F219476.2
{ } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "unlockSet" , new Object [ ] { requestNumber , msgHandles , reply } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "Request to unlock " + msgHandles . length + " message(s)" ) ; if ( reply ) SibTr . debug ( this , tc , "The client is expecting a reply" ) ; } try { bifSession . unlockSet ( msgHandles ) ; if ( reply ) { try { getConversation ( ) . send ( poolManager . allocate ( ) , JFapChannelConstants . SEG_UNLOCK_SET_R , requestNumber , JFapChannelConstants . PRIORITY_MEDIUM , true , ThrottlingPolicy . BLOCK_THREAD , null ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".unlockSet" , CommsConstants . CATBIFCONSUMER_UNLOCKSET_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , e . getMessage ( ) , e ) ; SibTr . error ( tc , "COMMUNICATION_ERROR_SICO2033" , e ) ; } } } catch ( SIException e ) { // No FFDC code needed
// Only FFDC if we haven ' t received a meTerminated event .
if ( ! ( ( ConversationState ) getConversation ( ) . getAttachment ( ) ) . hasMETerminated ( ) ) { FFDCFilter . processException ( e , CLASS_NAME + ".unlockSet" , CommsConstants . CATBIFCONSUMER_UNLOCKSET_02 , this ) ; } if ( reply ) { StaticCATHelper . sendExceptionToClient ( e , CommsConstants . CATBIFCONSUMER_UNLOCKSET_02 , getConversation ( ) , requestNumber ) ; } else { SibTr . error ( tc , "UNABLE_TO_UNLOCK_MSGS_SICO2032" , e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "unlockSet" ) ;
|
public class Cookies { /** * Wait for the cookies to become available , cache them and subsequently return the
* cached map of cookies . */
public Map < CharSequence , Set < Cookie > > getCachedCookies ( ) { } }
|
if ( ! STATE . compareAndSet ( this , NOT_READ , READING ) ) { for ( ; ; ) { if ( state == READ ) { return cachedCookies ; } } } List < String > allCookieHeaders = nettyHeaders . getAll ( cookiesHeaderName ) ; Map < String , Set < Cookie > > cookies = new HashMap < > ( ) ; for ( String aCookieHeader : allCookieHeaders ) { Set < Cookie > decode ; if ( isClientChannel ) { final Cookie c = ( ( ClientCookieDecoder ) decoder ) . decode ( aCookieHeader ) ; Set < Cookie > existingCookiesOfName = cookies . get ( c . name ( ) ) ; if ( null == existingCookiesOfName ) { existingCookiesOfName = new HashSet < > ( ) ; cookies . put ( c . name ( ) , existingCookiesOfName ) ; } existingCookiesOfName . add ( c ) ; } else { decode = ( ( ServerCookieDecoder ) decoder ) . decode ( aCookieHeader ) ; for ( Cookie cookie : decode ) { Set < Cookie > existingCookiesOfName = cookies . get ( cookie . name ( ) ) ; if ( null == existingCookiesOfName ) { existingCookiesOfName = new HashSet < > ( ) ; cookies . put ( cookie . name ( ) , existingCookiesOfName ) ; } existingCookiesOfName . add ( cookie ) ; } } } cachedCookies = Collections . unmodifiableMap ( cookies ) ; state = READ ; return cachedCookies ;
|
public class NettyServer { /** * 初始化handler适配包装
* @ param init
* @ return */
protected ChannelHandler initLogHandlerAdapter ( ChannelHandler init ) { } }
|
ChannelHandler handler = new ShareableChannelInboundHandler ( ) { @ Override public void channelRegistered ( ChannelHandlerContext ctx ) throws Exception { Channel ch = ctx . channel ( ) ; manageChannel ( ch ) ; LogLevel level = config . getChannelLevel ( ) ; if ( level != null ) { // 单个链路的日志记录器
ch . pipeline ( ) . addLast ( new LoggerHandler ( level ) ) ; } ch . pipeline ( ) . addLast ( init ) ; ctx . pipeline ( ) . remove ( this ) ; // 移除当前handler
ctx . fireChannelRegistered ( ) ; // 从当前handler往后抛出事件
} } ; // ChannelHandler handler = new ChannelInitializer < Channel > ( ) {
// @ Override
// protected void initChannel ( Channel ch ) throws Exception {
// channelGroup . add ( ch ) ;
// LogLevel level = config . getLevel ( ) ;
// if ( level ! = null )
// ch . pipeline ( ) . addLast ( new LoggerHandler ( config . getLevel ( ) ) ) ;
// ch . pipeline ( ) . addLast ( init ) ;
return handler ;
|
public class AwtExtensions { /** * Gets the root parent from the given Component Object .
* @ param component
* The Component to find the root parent .
* @ return ' s the root parent . */
public static Component getRootParent ( Component component ) { } }
|
while ( null != component . getParent ( ) ) { component = component . getParent ( ) ; } return component ;
|
public class DataMediaPairAction { /** * 添加DataMediaPair
* @ param channelInfo
* @ param channelParameterInfo
* @ throws Exception */
public void doAdd ( @ Param ( "submitKey" ) String submitKey , @ FormGroup ( "dataMediaPairInfo" ) Group dataMediaPairInfo , @ FormField ( name = "formDataMediaPairError" , group = "dataMediaPairInfo" ) CustomErrors err , Navigator nav ) throws Exception { } }
|
DataMediaPair dataMediaPair = new DataMediaPair ( ) ; DataMedia sourceDataMedia = new DataMedia ( ) ; DataMedia targetDataMedia = new DataMedia ( ) ; dataMediaPairInfo . setProperties ( dataMediaPair ) ; // filter解析
ExtensionDataType filterType = ExtensionDataType . valueOf ( dataMediaPairInfo . getField ( "filterType" ) . getStringValue ( ) ) ; ExtensionData filterData = new ExtensionData ( ) ; filterData . setExtensionDataType ( filterType ) ; if ( filterType . isClazz ( ) ) { filterData . setClazzPath ( dataMediaPairInfo . getField ( "filterText" ) . getStringValue ( ) ) ; } else if ( filterType . isSource ( ) ) { filterData . setSourceText ( dataMediaPairInfo . getField ( "filterText" ) . getStringValue ( ) ) ; } dataMediaPair . setFilterData ( filterData ) ; // fileresovler解析
ExtensionDataType resolverType = ExtensionDataType . valueOf ( dataMediaPairInfo . getField ( "resolverType" ) . getStringValue ( ) ) ; ExtensionData resolverData = new ExtensionData ( ) ; resolverData . setExtensionDataType ( resolverType ) ; if ( resolverType . isClazz ( ) ) { resolverData . setClazzPath ( dataMediaPairInfo . getField ( "resolverText" ) . getStringValue ( ) ) ; } else if ( resolverType . isSource ( ) ) { resolverData . setSourceText ( dataMediaPairInfo . getField ( "resolverText" ) . getStringValue ( ) ) ; } dataMediaPair . setResolverData ( resolverData ) ; sourceDataMedia . setId ( dataMediaPairInfo . getField ( "sourceDataMediaId" ) . getLongValue ( ) ) ; dataMediaPair . setSource ( sourceDataMedia ) ; targetDataMedia . setId ( dataMediaPairInfo . getField ( "targetDataMediaId" ) . getLongValue ( ) ) ; dataMediaPair . setTarget ( targetDataMedia ) ; Long id = 0L ; try { id = dataMediaPairService . createAndReturnId ( dataMediaPair ) ; } catch ( RepeatConfigureException rce ) { err . setMessage ( "invalidDataMediaPair" ) ; return ; } if ( submitKey . equals ( "保存" ) ) { nav . redirectToLocation ( "dataMediaPairList.htm?pipelineId=" + dataMediaPair . getPipelineId ( ) ) ; } else if ( submitKey . equals ( "下一步" ) ) { nav . redirectToLocation ( "addColumnPair.htm?dataMediaPairId=" + id + "&pipelineId=" + dataMediaPair . getPipelineId ( ) + "&dataMediaPairId=" + id + "&sourceMediaId=" + sourceDataMedia . getId ( ) + "&targetMediaId=" + targetDataMedia . getId ( ) ) ; }
|
public class BuildUtils { /** * Attaches a node into the network . If a node already exists that could
* substitute , it is used instead .
* @ param context
* The current build context
* @ param candidate
* The node to attach .
* @ return the actual attached node that may be the one given as parameter
* or eventually one that was already in the cache if sharing is enabled */
public < T extends BaseNode > T attachNode ( BuildContext context , T candidate ) { } }
|
BaseNode node = null ; RuleBasePartitionId partition = null ; if ( candidate . getType ( ) == NodeTypeEnums . EntryPointNode ) { // entry point nodes are always shared
node = context . getKnowledgeBase ( ) . getRete ( ) . getEntryPointNode ( ( ( EntryPointNode ) candidate ) . getEntryPoint ( ) ) ; // all EntryPointNodes belong to the main partition
partition = RuleBasePartitionId . MAIN_PARTITION ; } else if ( candidate . getType ( ) == NodeTypeEnums . ObjectTypeNode ) { // object type nodes are always shared
Map < ObjectType , ObjectTypeNode > map = context . getKnowledgeBase ( ) . getRete ( ) . getObjectTypeNodes ( context . getCurrentEntryPoint ( ) ) ; if ( map != null ) { ObjectTypeNode otn = map . get ( ( ( ObjectTypeNode ) candidate ) . getObjectType ( ) ) ; if ( otn != null ) { // adjusting expiration offset
otn . mergeExpirationOffset ( ( ObjectTypeNode ) candidate ) ; node = otn ; } } // all ObjectTypeNodes belong to the main partition
partition = RuleBasePartitionId . MAIN_PARTITION ; } else if ( isSharingEnabledForNode ( context , candidate ) ) { if ( ( context . getTupleSource ( ) != null ) && NodeTypeEnums . isLeftTupleSink ( candidate ) ) { node = context . getTupleSource ( ) . getSinkPropagator ( ) . getMatchingNode ( candidate ) ; } else if ( ( context . getObjectSource ( ) != null ) && NodeTypeEnums . isObjectSink ( candidate ) ) { node = context . getObjectSource ( ) . getObjectSinkPropagator ( ) . getMatchingNode ( candidate ) ; } else { throw new RuntimeException ( "This is a bug on node sharing verification. Please report to development team." ) ; } } if ( node != null && ! areNodesCompatibleForSharing ( context , node , candidate ) ) { node = null ; } if ( node == null ) { // only attach ( ) if it is a new node
node = candidate ; // new node , so it must be labeled
if ( partition == null ) { // if it does not has a predefined label
if ( context . getPartitionId ( ) == null ) { // if no label in current context , create one
context . setPartitionId ( context . getKnowledgeBase ( ) . createNewPartitionId ( ) ) ; } partition = context . getPartitionId ( ) ; } // set node whit the actual partition label
node . setPartitionId ( context , partition ) ; node . attach ( context ) ; // adds the node to the context list to track all added nodes
context . getNodes ( ) . add ( node ) ; } else { // shared node found
mergeNodes ( node , candidate ) ; // undo previous id assignment
context . releaseId ( candidate ) ; if ( partition == null && context . getPartitionId ( ) == null ) { partition = node . getPartitionId ( ) ; // if no label in current context , create one
context . setPartitionId ( partition ) ; } } node . addAssociation ( context , context . getRule ( ) ) ; return ( T ) node ;
|
public class SelectBase { /** * Adds a header to the top of the menu ; includes
* a close button by default . < br >
* < br >
* No header by default .
* @ param header */
public void setHeader ( final String header ) { } }
|
if ( header != null ) attrMixin . setAttribute ( HEADER , header ) ; else attrMixin . removeAttribute ( HEADER ) ;
|
public class LocalResponseHandler { /** * { @ inheritDoc } */
@ Override public void handle ( Response response ) { } }
|
if ( LOG . equals ( response . getAction ( ) ) ) { logger . warn ( "Response executed for user:" + response . getUser ( ) . getUsername ( ) + ", Action: Increased Logging" ) ; } else if ( LOGOUT . equals ( response . getAction ( ) ) ) { logger . warn ( "Response executed for user <{}>, " + "Action: Logging out malicious account, delegating to configured user manager <{}>" , response . getUser ( ) . getUsername ( ) , userManager . getClass ( ) . getName ( ) ) ; userManager . logout ( response . getUser ( ) ) ; } else if ( DISABLE_USER . equals ( response . getAction ( ) ) ) { logger . warn ( "Response executed for user <{}>, " + "Action: Disabling malicious account, delegating to configured user manager <{}>" , response . getUser ( ) . getUsername ( ) , userManager . getClass ( ) . getName ( ) ) ; userManager . disable ( response . getUser ( ) ) ; } else if ( DISABLE_COMPONENT_FOR_SPECIFIC_USER . equals ( response . getAction ( ) ) ) { logger . warn ( "Response executed for user:" + response . getUser ( ) . getUsername ( ) + ", Action: Disabling Component for Specific User" ) ; // TODO : fill in real code for disabling component for specific user
} else if ( DISABLE_COMPONENT_FOR_ALL_USERS . equals ( response . getAction ( ) ) ) { logger . warn ( "Response executed for user:" + response . getUser ( ) . getUsername ( ) + ", Action: Disabling Component for All Users" ) ; // TODO : fill in real code for disabling component for all users
} else { throw new IllegalArgumentException ( "There has been a request for an action " + "that is not supported by this response handler. The requested action is: " + response . getAction ( ) ) ; }
|
public class MathExpressions { /** * Create a { @ code acos ( num ) } expression
* < p > Returns the principal value of the arc cosine of num , expressed in radians . < / p >
* @ param num numeric expression
* @ return acos ( num ) */
public static < A extends Number & Comparable < ? > > NumberExpression < Double > acos ( Expression < A > num ) { } }
|
return Expressions . numberOperation ( Double . class , Ops . MathOps . ACOS , num ) ;
|
public class CmsListSearchAction { /** * Returns a sublist of the given items , that match the given filter string . < p >
* @ param items the items to filter
* @ param filter the string to filter
* @ return the filtered sublist */
public List < CmsListItem > filter ( List < CmsListItem > items , String filter ) { } }
|
if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( filter ) ) { return items ; } String filterCriteria = filter ; if ( m_caseInSensitive ) { filterCriteria = filter . toLowerCase ( ) ; } List < CmsListItem > res = new ArrayList < CmsListItem > ( ) ; Iterator < CmsListItem > itItems = items . iterator ( ) ; while ( itItems . hasNext ( ) ) { CmsListItem item = itItems . next ( ) ; if ( res . contains ( item ) ) { continue ; } Iterator < CmsListColumnDefinition > itCols = m_columns . iterator ( ) ; while ( itCols . hasNext ( ) ) { CmsListColumnDefinition col = itCols . next ( ) ; if ( item . get ( col . getId ( ) ) == null ) { continue ; } String columnValue = item . get ( col . getId ( ) ) . toString ( ) ; if ( m_caseInSensitive ) { columnValue = columnValue . toLowerCase ( ) ; } if ( columnValue . indexOf ( filterCriteria ) > - 1 ) { res . add ( item ) ; break ; } } } return res ;
|
public class KeyChainGroup { /** * < p > Returns a fresh address for a given { @ link KeyChain . KeyPurpose } and of a given
* { @ link Script . ScriptType } . < / p >
* < p > This method is meant for when you really need a fallback address . Normally , you should be
* using { @ link # freshAddress ( KeyChain . KeyPurpose ) } or
* { @ link # currentAddress ( KeyChain . KeyPurpose ) } . < / p > */
public Address freshAddress ( KeyChain . KeyPurpose purpose , Script . ScriptType outputScriptType , long keyRotationTimeSecs ) { } }
|
DeterministicKeyChain chain = getActiveKeyChain ( outputScriptType , keyRotationTimeSecs ) ; return Address . fromKey ( params , chain . getKey ( purpose ) , outputScriptType ) ;
|
public class MethodTimeHandlerImpl { /** * 获取各模块的总耗时 .
* @ param performanceVOList
* @ return */
protected double getAllTotalTime ( List < MethodDto > performanceVOList , String threadName ) { } }
|
double totalTime = 0 ; if ( threadName != null && threadName . length ( ) > 0 ) { totalTime = this . getAllTotalTimeByEntry ( performanceVOList ) ; if ( totalTime > - 1 ) { return totalTime ; } else { totalTime = 0 ; } } for ( MethodDto performanceVO : performanceVOList ) { totalTime += performanceVO . getAllTime ( ) ; } return totalTime ;
|
public class DockerCLI { /** * If the server advertises CLI endpoint , returns its location . */
protected CliPort getCliTcpPort ( String jenkinsAddr ) throws IOException { } }
|
URL jenkinsUrl = new URL ( jenkinsAddr ) ; if ( jenkinsUrl . getHost ( ) == null || jenkinsUrl . getHost ( ) . length ( ) == 0 ) { throw new IOException ( "Invalid URL: " + jenkinsAddr ) ; } URLConnection head = jenkinsUrl . openConnection ( ) ; try { head . connect ( ) ; } catch ( IOException e ) { throw ( IOException ) new IOException ( "Failed to connect to " + jenkinsAddr ) . initCause ( e ) ; } String h = head . getHeaderField ( "X-Jenkins-CLI-Host" ) ; if ( h == null ) h = head . getURL ( ) . getHost ( ) ; String identity = head . getHeaderField ( "X-Instance-Identity" ) ; flushURLConnection ( head ) ; return new CliPort ( new InetSocketAddress ( h , exposedPort ) , identity , 2 ) ;
|
public class NetworkConnectionServiceMessageCodec { /** * Encodes a network connection service message to bytes .
* @ param obj a message
* @ return bytes */
@ Override public byte [ ] encode ( final NetworkConnectionServiceMessage obj ) { } }
|
final Codec codec = connFactoryMap . get ( obj . getConnectionFactoryId ( ) ) . getCodec ( ) ; Boolean isStreamingCodec = isStreamingCodecMap . get ( codec ) ; if ( isStreamingCodec == null ) { isStreamingCodec = codec instanceof StreamingCodec ; isStreamingCodecMap . putIfAbsent ( codec , isStreamingCodec ) ; } try ( final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ) { try ( final DataOutputStream daos = new DataOutputStream ( baos ) ) { daos . writeUTF ( obj . getConnectionFactoryId ( ) ) ; daos . writeUTF ( obj . getSrcId ( ) . toString ( ) ) ; daos . writeUTF ( obj . getDestId ( ) . toString ( ) ) ; daos . writeInt ( obj . getData ( ) . size ( ) ) ; if ( isStreamingCodec ) { for ( final Object rec : obj . getData ( ) ) { ( ( StreamingCodec ) codec ) . encodeToStream ( rec , daos ) ; } } else { final Iterable dataList = obj . getData ( ) ; for ( final Object message : dataList ) { final byte [ ] bytes = codec . encode ( message ) ; daos . writeInt ( bytes . length ) ; daos . write ( bytes ) ; } } return baos . toByteArray ( ) ; } } catch ( final IOException e ) { throw new RuntimeException ( "IOException" , e ) ; }
|
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 1229:1 : entryRuleReferenceOrLiteral returns [ EObject current = null ] : iv _ ruleReferenceOrLiteral = ruleReferenceOrLiteral EOF ; */
public final EObject entryRuleReferenceOrLiteral ( ) throws RecognitionException { } }
|
EObject current = null ; EObject iv_ruleReferenceOrLiteral = null ; try { // InternalSimpleAntlr . g : 1230:2 : ( iv _ ruleReferenceOrLiteral = ruleReferenceOrLiteral EOF )
// InternalSimpleAntlr . g : 1231:2 : iv _ ruleReferenceOrLiteral = ruleReferenceOrLiteral EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getReferenceOrLiteralRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleReferenceOrLiteral = ruleReferenceOrLiteral ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleReferenceOrLiteral ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class FileUtils { /** * Writes a map from symbols to file absolute paths to a file . Each line has a mapping with the key and value
* separated by a single tab . The file will have a trailing newline . */
public static void writeSymbolToFileMap ( Map < Symbol , File > symbolToFileMap , CharSink sink ) throws IOException { } }
|
writeSymbolToFileEntries ( symbolToFileMap . entrySet ( ) , sink ) ;
|
public class BaseTransformerSource { /** * / * ( non - Javadoc )
* @ see org . apereo . portal . rendering . xslt . TransformerSource # getCacheKey ( javax . servlet . http . HttpServletRequest , javax . servlet . http . HttpServletResponse ) */
@ Override public final CacheKey getCacheKey ( HttpServletRequest request , HttpServletResponse response ) { } }
|
final Resource stylesheetResource = this . getStylesheetResource ( request ) ; final Serializable stylesheetCacheKey ; try { stylesheetCacheKey = this . xmlUtilities . getStylesheetCacheKey ( stylesheetResource ) ; } catch ( TransformerConfigurationException e ) { throw new RuntimeException ( "Failed to get Transformer for stylesheet: " + stylesheetResource , e ) ; } catch ( IOException e ) { throw new RuntimeException ( "Failed to load stylesheet: " + stylesheetResource , e ) ; } return CacheKey . build ( stylesheetResource . getDescription ( ) , stylesheetCacheKey ) ;
|
public class Streams { /** * Perform a forEach operation over the Stream capturing any elements and errors in the supplied consumers ,
* < pre >
* @ { code
* Subscription next = Streams . forEach ( Stream . of ( ( ) - > 1 , ( ) - > 2 , ( ) - > { throw new RuntimeException ( ) } , ( ) - > 4)
* . map ( Supplier : : getValue ) , System . out : : println , e - > e . printStackTrace ( ) ) ;
* System . out . println ( " processed ! " ) ;
* / / prints
* RuntimeException Stack Trace on System . err
* processed !
* < / pre >
* @ param stream - the Stream to consume data from
* @ param consumerElement To accept incoming elements from the Stream
* @ param consumerError To accept incoming processing errors from the Stream */
public static < T , X extends Throwable > void forEach ( final Stream < T > stream , final Consumer < ? super T > consumerElement , final Consumer < ? super Throwable > consumerError ) { } }
|
val t2 = FutureStreamUtils . forEachWithError ( stream , consumerElement , consumerError ) ; t2 . _2 ( ) . run ( ) ;
|
public class HttpMessage { /** * ZAP : Added getParamNames */
public String [ ] getParamNames ( ) { } }
|
Vector < String > v = new Vector < > ( ) ; // Get the params names from the query
SortedSet < String > pns = this . getParamNameSet ( HtmlParameter . Type . url ) ; Iterator < String > iterator = pns . iterator ( ) ; while ( iterator . hasNext ( ) ) { String name = iterator . next ( ) ; if ( name != null ) { v . add ( name ) ; } } if ( getRequestHeader ( ) . getMethod ( ) . equalsIgnoreCase ( HttpRequestHeader . POST ) ) { // Get the param names from the POST
pns = this . getParamNameSet ( HtmlParameter . Type . form ) ; iterator = pns . iterator ( ) ; while ( iterator . hasNext ( ) ) { String name = iterator . next ( ) ; if ( name != null ) { v . add ( name ) ; } } } String [ ] a = new String [ v . size ( ) ] ; v . toArray ( a ) ; return a ;
|
public class BuildsInner { /** * Gets a link to download the build logs .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param buildId The build ID .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the BuildGetLogResultInner object */
public Observable < ServiceResponse < BuildGetLogResultInner > > getLogLinkWithServiceResponseAsync ( String resourceGroupName , String registryName , String buildId ) { } }
|
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( registryName == null ) { throw new IllegalArgumentException ( "Parameter registryName is required and cannot be null." ) ; } if ( buildId == null ) { throw new IllegalArgumentException ( "Parameter buildId is required and cannot be null." ) ; } final String apiVersion = "2018-02-01-preview" ; return service . getLogLink ( this . client . subscriptionId ( ) , resourceGroupName , registryName , buildId , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < BuildGetLogResultInner > > > ( ) { @ Override public Observable < ServiceResponse < BuildGetLogResultInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < BuildGetLogResultInner > clientResponse = getLogLinkDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
|
public class CommonOps_DDF5 { /** * Extracts the row from the matrix a .
* @ param a Input matrix
* @ param row Which row is to be extracted
* @ param out output . Storage for the extracted row . If null then a new vector will be returned .
* @ return The extracted row . */
public static DMatrix5 extractRow ( DMatrix5x5 a , int row , DMatrix5 out ) { } }
|
if ( out == null ) out = new DMatrix5 ( ) ; switch ( row ) { case 0 : out . a1 = a . a11 ; out . a2 = a . a12 ; out . a3 = a . a13 ; out . a4 = a . a14 ; out . a5 = a . a15 ; break ; case 1 : out . a1 = a . a21 ; out . a2 = a . a22 ; out . a3 = a . a23 ; out . a4 = a . a24 ; out . a5 = a . a25 ; break ; case 2 : out . a1 = a . a31 ; out . a2 = a . a32 ; out . a3 = a . a33 ; out . a4 = a . a34 ; out . a5 = a . a35 ; break ; case 3 : out . a1 = a . a41 ; out . a2 = a . a42 ; out . a3 = a . a43 ; out . a4 = a . a44 ; out . a5 = a . a45 ; break ; case 4 : out . a1 = a . a51 ; out . a2 = a . a52 ; out . a3 = a . a53 ; out . a4 = a . a54 ; out . a5 = a . a55 ; break ; default : throw new IllegalArgumentException ( "Out of bounds row. row = " + row ) ; } return out ;
|
public class PmcNxmlParser { /** * Use singleton design pattern , since JAXBContext is expensive to create */
private JAXBContext getSingleton ( ) throws JAXBException { } }
|
if ( jcSingelton == null ) jcSingelton = JAXBContext . newInstance ( Article . class . getPackage ( ) . getName ( ) ) ; return jcSingelton ;
|
public class ComponentPropertyResolver { /** * Get property .
* @ param name Property name
* @ param type Property type
* @ param < T > Parameter type
* @ return Property value or null if not set */
public @ Nullable < T > T get ( @ NotNull String name , @ NotNull Class < T > type ) { } }
|
return getForComponent ( component , name , type ) ;
|
public class PowerShell { /** * Creates a session in PowerShell console an returns an instance which allows
* to execute commands in PowerShell context . < br >
* This method allows to define a PowersShell executable path different from default
* @ param customPowerShellExecutablePath the path of powershell executable . If you are using
* the default installation path , call { @ link # openSession ( ) } method instead
* @ return an instance of the class
* @ throws PowerShellNotAvailableException if PowerShell is not installed in the system */
public static PowerShell openSession ( String customPowerShellExecutablePath ) throws PowerShellNotAvailableException { } }
|
PowerShell powerShell = new PowerShell ( ) ; // Start with default configuration
powerShell . configuration ( null ) ; String powerShellExecutablePath = customPowerShellExecutablePath == null ? ( OSDetector . isWindows ( ) ? DEFAULT_WIN_EXECUTABLE : DEFAULT_LINUX_EXECUTABLE ) : customPowerShellExecutablePath ; return powerShell . initalize ( powerShellExecutablePath ) ;
|
public class FastJsonWriter { /** * { @ inheritDoc } */
@ Override public FastJsonWriter rawValue ( Object value ) { } }
|
if ( value == null ) { return nullValue ( ) ; } writeDeferredName ( ) ; beforeValue ( false ) ; out . append ( value . toString ( ) ) ; return this ;
|
public class DynamicCamera { /** * Creates a zoom value based on the result of { @ link MapboxMap # getCameraForLatLngBounds ( LatLngBounds , int [ ] ) } .
* 0 zoom is the world view , while 22 ( default max threshold ) is the closest you can position
* the camera to the map .
* @ param routeInformation for current location and progress
* @ return zoom within set min / max bounds */
private double createZoom ( RouteInformation routeInformation ) { } }
|
CameraPosition position = createCameraPosition ( routeInformation . location ( ) , routeInformation . routeProgress ( ) ) ; if ( position . zoom > MAX_CAMERA_ZOOM ) { return MAX_CAMERA_ZOOM ; } else if ( position . zoom < MIN_CAMERA_ZOOM ) { return MIN_CAMERA_ZOOM ; } return position . zoom ;
|
public class GlobalPasswordSettings { /** * Set the global password constraint list .
* @ param aPasswordConstraintList
* The list to be set . May not be < code > null < / code > . */
public static void setPasswordConstraintList ( @ Nonnull final IPasswordConstraintList aPasswordConstraintList ) { } }
|
ValueEnforcer . notNull ( aPasswordConstraintList , "PasswordConstraintList" ) ; // Create a copy
final IPasswordConstraintList aRealPasswordConstraints = aPasswordConstraintList . getClone ( ) ; s_aRWLock . writeLocked ( ( ) -> { s_aPasswordConstraintList = aRealPasswordConstraints ; } ) ; LOGGER . info ( "Set global password constraints to " + aRealPasswordConstraints ) ;
|
public class Sizes { /** * Returns a common size for all kinds of Request implementations . */
public static int minimumRequestSize ( Request request ) { } }
|
// Header and payload are common inside a Frame at the protocol level
// Frame header has a fixed size of 9 for protocol version > = V3 , which includes Frame flags
// size
int size = FrameCodec . headerEncodedSize ( ) ; if ( ! request . getCustomPayload ( ) . isEmpty ( ) ) { // Custom payload is not supported in v3 , but assume user won ' t have a custom payload set if
// they use this version
size += PrimitiveSizes . sizeOfBytesMap ( request . getCustomPayload ( ) ) ; } return size ;
|
public class SerializeGrammarTask { /** * Implementation . */
private static Node serializeEntry ( Entry e , DocumentFactory fac ) { } }
|
// Assertions . . .
if ( e == null ) { String msg = "Argument 'e [Entry]' cannot be null." ; throw new IllegalArgumentException ( msg ) ; } if ( fac == null ) { String msg = "Argument 'fac' cannot be null." ; throw new IllegalArgumentException ( msg ) ; } Element rslt = fac . createElement ( "entry" ) ; rslt . addAttribute ( "type" , e . getType ( ) . name ( ) ) ; // Name .
Element name = fac . createElement ( "name" ) ; name . setText ( e . getName ( ) ) ; rslt . add ( name ) ; // Deprecation .
if ( e . isDeprecated ( ) ) { Element dep = fac . createElement ( "deprecation" ) ; dep . addAttribute ( "version" , e . getDeprecation ( ) . getVersion ( ) ) ; for ( Element n : e . getDeprecation ( ) . getDescription ( ) ) { dep . add ( ( Element ) n . clone ( ) ) ; } rslt . add ( dep ) ; } // Description .
Element desc = e . getDescription ( ) != null ? ( Element ) e . getDescription ( ) . clone ( ) : fac . createElement ( "description" ) ; rslt . add ( desc ) ; // Formula .
rslt . add ( serializeFormula ( e . getFormula ( ) , e . getMappings ( ) , fac ) ) ; // Examples .
for ( Node x : e . getExamples ( ) ) { rslt . add ( ( Node ) x . clone ( ) ) ; } return rslt ;
|
public class AbstractBanDependencies { /** * Execute the rule .
* @ param helper the helper
* @ throws EnforcerRuleException the enforcer rule exception */
public void execute ( EnforcerRuleHelper helper ) throws EnforcerRuleException { } }
|
// get the project
MavenProject project = null ; try { project = ( MavenProject ) helper . evaluate ( "${project}" ) ; } catch ( ExpressionEvaluationException eee ) { throw new EnforcerRuleException ( "Unable to retrieve the MavenProject: " , eee ) ; } try { graphBuilder = ( DependencyGraphBuilder ) helper . getComponent ( DependencyGraphBuilder . class ) ; } catch ( ComponentLookupException e ) { // real cause is probably that one of the Maven3 graph builder could not be initiated and fails with a ClassNotFoundException
try { graphBuilder = ( DependencyGraphBuilder ) helper . getComponent ( DependencyGraphBuilder . class . getName ( ) , "maven2" ) ; } catch ( ComponentLookupException e1 ) { throw new EnforcerRuleException ( "Unable to lookup DependencyGraphBuilder: " , e ) ; } } // get the correct list of dependencies
Set < Artifact > dependencies = getDependenciesToCheck ( project ) ; // look for banned dependencies
Set < Artifact > foundExcludes = checkDependencies ( dependencies , helper . getLog ( ) ) ; // if any are found , fail the check but list all of them
if ( foundExcludes != null && ! foundExcludes . isEmpty ( ) ) { String message = getMessage ( ) ; StringBuilder buf = new StringBuilder ( ) ; if ( message != null ) { buf . append ( message + "\n" ) ; } for ( Artifact artifact : foundExcludes ) { buf . append ( getErrorMessage ( artifact ) ) ; } message = buf . toString ( ) + "Use 'mvn dependency:tree' to locate the source of the banned dependencies." ; throw new EnforcerRuleException ( message ) ; }
|
public class SaslMessage { /** * < code > optional . alluxio . grpc . sasl . SaslMessageType messageType = 1 ; < / code > */
public alluxio . grpc . SaslMessageType getMessageType ( ) { } }
|
alluxio . grpc . SaslMessageType result = alluxio . grpc . SaslMessageType . valueOf ( messageType_ ) ; return result == null ? alluxio . grpc . SaslMessageType . CHALLENGE : result ;
|
public class DBTablePropertySheet { /** * GEN - END : initComponents */
private void tfConversionStrategyClassKeyTyped ( java . awt . event . KeyEvent evt ) // GEN - FIRST : event _ tfConversionStrategyClassKeyTyped
{ } }
|
// GEN - HEADEREND : event _ tfConversionStrategyClassKeyTyped
// Revert on ESC
if ( evt . getKeyChar ( ) == KeyEvent . VK_ESCAPE ) { this . tfConversionStrategyClass . setText ( aTable . getConversionStrategyClass ( ) ) ; }
|
public class XMLEventParser { /** * call back error function if a error occour
* @ param pe */
private void error ( PageException pe ) { } }
|
if ( error == null ) throw new PageRuntimeException ( pe ) ; try { pc = ThreadLocalPageContext . get ( pc ) ; error . call ( pc , new Object [ ] { pe . getCatchBlock ( pc . getConfig ( ) ) } , false ) ; } catch ( PageException e ) { }
|
public class LocPathIterator { /** * Initialize the context values for this expression
* after it is cloned .
* @ param context The XPath runtime context for this
* transformation . */
public void setRoot ( int context , Object environment ) { } }
|
m_context = context ; XPathContext xctxt = ( XPathContext ) environment ; m_execContext = xctxt ; m_cdtm = xctxt . getDTM ( context ) ; m_currentContextNode = context ; // only if top level ?
// Yech , shouldn ' t have to do this . - sb
if ( null == m_prefixResolver ) m_prefixResolver = xctxt . getNamespaceContext ( ) ; m_lastFetched = DTM . NULL ; m_foundLast = false ; m_pos = 0 ; m_length = - 1 ; if ( m_isTopLevel ) this . m_stackFrame = xctxt . getVarStack ( ) . getStackFrame ( ) ; // reset ( ) ;
|
public class UptimeCheckServiceClient { /** * Deletes an uptime check configuration . Note that this method will fail if the uptime check
* configuration is referenced by an alert policy or other dependent configs that would be
* rendered invalid by the deletion .
* < p > Sample code :
* < pre > < code >
* try ( UptimeCheckServiceClient uptimeCheckServiceClient = UptimeCheckServiceClient . create ( ) ) {
* UptimeCheckConfigName name = UptimeCheckConfigName . of ( " [ PROJECT ] " , " [ UPTIME _ CHECK _ CONFIG ] " ) ;
* uptimeCheckServiceClient . deleteUptimeCheckConfig ( name . toString ( ) ) ;
* < / code > < / pre >
* @ param name The uptime check configuration to delete . The format is
* ` projects / [ PROJECT _ ID ] / uptimeCheckConfigs / [ UPTIME _ CHECK _ ID ] ` .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final void deleteUptimeCheckConfig ( String name ) { } }
|
DeleteUptimeCheckConfigRequest request = DeleteUptimeCheckConfigRequest . newBuilder ( ) . setName ( name ) . build ( ) ; deleteUptimeCheckConfig ( request ) ;
|
public class WorkflowsInner { /** * Creates or updates a workflow .
* @ param resourceGroupName The resource group name .
* @ param workflowName The workflow name .
* @ param workflow The workflow .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the WorkflowInner object */
public Observable < WorkflowInner > createOrUpdateAsync ( String resourceGroupName , String workflowName , WorkflowInner workflow ) { } }
|
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , workflowName , workflow ) . map ( new Func1 < ServiceResponse < WorkflowInner > , WorkflowInner > ( ) { @ Override public WorkflowInner call ( ServiceResponse < WorkflowInner > response ) { return response . body ( ) ; } } ) ;
|
public class GeometryEngine { /** * Indicates if one geometry is within another geometry .
* See OperatorWithin .
* @ param geometry1
* The base geometry that is tested for within relationship to
* the other geometry .
* @ param geometry2
* The comparison geometry that is tested for the contains
* relationship to the other geometry .
* @ param spatialReference
* The spatial reference of the geometries .
* @ return TRUE if the first geometry is within the other geometry . */
public static boolean within ( Geometry geometry1 , Geometry geometry2 , SpatialReference spatialReference ) { } }
|
OperatorWithin op = ( OperatorWithin ) factory . getOperator ( Operator . Type . Within ) ; boolean result = op . execute ( geometry1 , geometry2 , spatialReference , null ) ; return result ;
|
public class BaseExchangeRateProvider { /** * Checks if an { @ link javax . money . convert . ExchangeRate } between two { @ link javax . money . CurrencyUnit } is
* available from this provider . This method should check , if a given rate
* is < i > currently < / i > defined .
* @ param baseCode the base currency code
* @ param termCode the terminal / target currency code
* @ return { @ code true } , if such an { @ link javax . money . convert . ExchangeRate } is currently
* defined .
* @ throws javax . money . MonetaryException if one of the currency codes passed is not valid . */
public boolean isAvailable ( String baseCode , String termCode ) { } }
|
return isAvailable ( Monetary . getCurrency ( baseCode ) , Monetary . getCurrency ( termCode ) ) ;
|
public class PartnerLogData { /** * Write the recovery data to to the partner log if it has not already
* been written unless we are closing down or the RA is being terminated .
* This will only occur in main - line calls as any recovered data will
* already be written to disk . We need to check if the terminate flag
* is set either because of shutdown or because an RA is stopping , etc . */
public synchronized void logRecoveryEntry ( ) throws Exception { } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "logRecoveryEntry" , this ) ; if ( ! _loggedToDisk ) { if ( _terminating ) // d172471
{ Tr . warning ( tc , "WTRN0084_RESOURCE_ENDING" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "logRecoveryEntry" , "Terminating" ) ; throw new IllegalStateException ( ) ; } if ( _serializedLogData == null ) { Tr . warning ( tc , "WTRN0039_SERIALIZE_FAILED" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "logRecoveryEntry" , "NotSerializable" ) ; throw new NotSerializableException ( "XAResource recovery information not serializable" ) ; } try { // Perform any pre - log data check ( only used for XARecoveryData )
preLogData ( ) ; // Now write the partner log record
logData ( _sectionId ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.Transaction.JTA.XARecoveryData.logRecoveryEntry" , "284" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "logRecoveryEntry" , e ) ; throw e ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "logRecoveryEntry" ) ;
|
public class SchedulerStateManagerAdaptor { /** * Get the metricscache location for the given topology
* @ return MetricsCacheLocation */
public TopologyMaster . MetricsCacheLocation getMetricsCacheLocation ( String topologyName ) { } }
|
return awaitResult ( delegate . getMetricsCacheLocation ( null , topologyName ) ) ;
|
public class SiteProperties { /** * Returns the list of available target IDs . */
public static String [ ] getAvailableTargetIds ( ) { } }
|
Configuration config = ConfigUtils . getCurrentConfig ( ) ; if ( config != null ) { return config . getStringArray ( AVAILABLE_TARGET_IDS_CONFIG_KEY ) ; } else { return null ; }
|
public class Logger { /** * Log a message and a throwable with level { @ link LogLevel # WARN } .
* @ param msg the message to log
* @ param tr the throwable to be log */
public void w ( String msg , Throwable tr ) { } }
|
println ( LogLevel . WARN , msg , tr ) ;
|
public class InMemoryIDistanceIndex { /** * Seek an iterator to the desired position , using binary search .
* @ param index Index to search
* @ param iter Iterator
* @ param val Distance to search to */
protected static void binarySearch ( ModifiableDoubleDBIDList index , DoubleDBIDListIter iter , double val ) { } }
|
// Binary search . TODO : move this into the DoubleDBIDList class .
int left = 0 , right = index . size ( ) ; while ( left < right ) { final int mid = ( left + right ) >>> 1 ; final double curd = iter . seek ( mid ) . doubleValue ( ) ; if ( val < curd ) { right = mid ; } else if ( val > curd ) { left = mid + 1 ; } else { left = mid ; break ; } } if ( left >= index . size ( ) ) { -- left ; } iter . seek ( left ) ;
|
public class Encoder { /** * Get number of data bytes and number of error correction bytes for block id " blockID " . Store
* the result in " numDataBytesInBlock " , and " numECBytesInBlock " . See table 12 in 8.5.1 of
* JISX0510:2004 ( p . 30) */
static void getNumDataBytesAndNumECBytesForBlockID ( int numTotalBytes , int numDataBytes , int numRSBlocks , int blockID , int [ ] numDataBytesInBlock , int [ ] numECBytesInBlock ) throws WriterException { } }
|
if ( blockID >= numRSBlocks ) { throw new WriterException ( "Block ID too large" ) ; } // numRsBlocksInGroup2 = 196 % 5 = 1
int numRsBlocksInGroup2 = numTotalBytes % numRSBlocks ; // numRsBlocksInGroup1 = 5 - 1 = 4
int numRsBlocksInGroup1 = numRSBlocks - numRsBlocksInGroup2 ; // numTotalBytesInGroup1 = 196 / 5 = 39
int numTotalBytesInGroup1 = numTotalBytes / numRSBlocks ; // numTotalBytesInGroup2 = 39 + 1 = 40
int numTotalBytesInGroup2 = numTotalBytesInGroup1 + 1 ; // numDataBytesInGroup1 = 66 / 5 = 13
int numDataBytesInGroup1 = numDataBytes / numRSBlocks ; // numDataBytesInGroup2 = 13 + 1 = 14
int numDataBytesInGroup2 = numDataBytesInGroup1 + 1 ; // numEcBytesInGroup1 = 39 - 13 = 26
int numEcBytesInGroup1 = numTotalBytesInGroup1 - numDataBytesInGroup1 ; // numEcBytesInGroup2 = 40 - 14 = 26
int numEcBytesInGroup2 = numTotalBytesInGroup2 - numDataBytesInGroup2 ; // Sanity checks .
// 26 = 26
if ( numEcBytesInGroup1 != numEcBytesInGroup2 ) { throw new WriterException ( "EC bytes mismatch" ) ; } // 5 = 4 + 1.
if ( numRSBlocks != numRsBlocksInGroup1 + numRsBlocksInGroup2 ) { throw new WriterException ( "RS blocks mismatch" ) ; } // 196 = ( 13 + 26 ) * 4 + ( 14 + 26 ) * 1
if ( numTotalBytes != ( ( numDataBytesInGroup1 + numEcBytesInGroup1 ) * numRsBlocksInGroup1 ) + ( ( numDataBytesInGroup2 + numEcBytesInGroup2 ) * numRsBlocksInGroup2 ) ) { throw new WriterException ( "Total bytes mismatch" ) ; } if ( blockID < numRsBlocksInGroup1 ) { numDataBytesInBlock [ 0 ] = numDataBytesInGroup1 ; numECBytesInBlock [ 0 ] = numEcBytesInGroup1 ; } else { numDataBytesInBlock [ 0 ] = numDataBytesInGroup2 ; numECBytesInBlock [ 0 ] = numEcBytesInGroup2 ; }
|
public class Watch { /** * Creates a new Watch instance that listens on Queries .
* @ param query The query used for this watch .
* @ return A newly created Watch instance . */
static Watch forQuery ( Query query ) { } }
|
Target . Builder target = Target . newBuilder ( ) ; target . setQuery ( QueryTarget . newBuilder ( ) . setStructuredQuery ( query . buildQuery ( ) ) . setParent ( query . getResourcePath ( ) . getParent ( ) . getName ( ) ) . build ( ) ) ; target . setTargetId ( WATCH_TARGET_ID ) ; return new Watch ( ( FirestoreImpl ) query . getFirestore ( ) , query , target . build ( ) ) ;
|
public class StyleUtilities { /** * Creates a default { @ link Style } for a polygon .
* @ return the default style . */
public static Style createDefaultPolygonStyle ( ) { } }
|
FeatureTypeStyle featureTypeStyle = sf . createFeatureTypeStyle ( ) ; featureTypeStyle . rules ( ) . add ( createDefaultPolygonRule ( ) ) ; Style style = sf . createStyle ( ) ; style . featureTypeStyles ( ) . add ( featureTypeStyle ) ; return style ;
|
public class BatchItemRequestSerializer { /** * Method to get the Jackson object mapper
* @ return ObjectMapper the object mapper */
private ObjectMapper getObjectMapper ( ) { } }
|
ObjectMapper mapper = new ObjectMapper ( ) ; AnnotationIntrospector primary = new JacksonAnnotationIntrospector ( ) ; AnnotationIntrospector secondary = new JaxbAnnotationIntrospector ( ) ; AnnotationIntrospector pair = new AnnotationIntrospectorPair ( primary , secondary ) ; mapper . setAnnotationIntrospector ( pair ) ; mapper . setSerializationInclusion ( Include . NON_NULL ) ; return mapper ;
|
public class LRSubsetSearch { /** * Greedily apply a series of n moves generated by the given neighbourhood , where the move yielding the most
* improvement ( or smallest decline ) is iteratively selected . Returns the actual number of performed moves ,
* which is always lower than or equal to the requested number of moves . It may be strictly lower in case
* no more moves can be applied at some point .
* @ param n number of requested moves
* @ return actual number of applied moves , lower than or equal to requested number of moves */
private int greedyMoves ( int n , SubsetNeighbourhood neigh ) { } }
|
int applied = 0 ; boolean cont = true ; while ( applied < n && cont ) { // go through all moves to find the best one
Move < ? super SubsetSolution > bestMove = null ; double bestDelta = - Double . MAX_VALUE , delta ; Evaluation newEvaluation , bestEvaluation = null ; SubsetValidation newValidation , bestValidation = null ; for ( Move < ? super SubsetSolution > move : neigh . getAllMoves ( getCurrentSolution ( ) ) ) { // validate move ( IMPORTANT : ignore current subset size )
newValidation = getProblem ( ) . validate ( move , getCurrentSolution ( ) , getCurrentSolutionValidation ( ) ) ; if ( newValidation . passed ( false ) ) { // evaluate move
newEvaluation = getProblem ( ) . evaluate ( move , getCurrentSolution ( ) , getCurrentSolutionEvaluation ( ) ) ; // compute delta
delta = computeDelta ( newEvaluation , getCurrentSolutionEvaluation ( ) ) ; // new best move ?
if ( delta > bestDelta ) { bestDelta = delta ; bestMove = move ; bestEvaluation = newEvaluation ; bestValidation = newValidation ; } } } // apply best move , if any
if ( bestMove != null ) { // apply move
bestMove . apply ( getCurrentSolution ( ) ) ; // update current and best solution ( NOTe : best solution will only be updated
// if it is fully valid , also taking into account the current subset size )
updateCurrentAndBestSolution ( getCurrentSolution ( ) , bestEvaluation , bestValidation ) ; // increase counter
applied ++ ; } else { // no valid move found , stop
cont = false ; } } // return actual number of applied moves
return applied ;
|
public class MariaDbConnection { /** * < p > Creates a default < code > PreparedStatement < / code > object capable of returning the
* auto - generated keys designated by the given array . This array contains the indexes of the
* columns in the target table that contain the auto - generated keys that should be made available .
* The driver will ignore the array if the SQL statement is not an < code > INSERT < / code > statement ,
* or an SQL statement able to return auto - generated keys ( the list of such statements is
* vendor - specific ) . < / p >
* < p > An SQL statement with or without IN parameters can be pre - compiled and stored in a
* < code > PreparedStatement < / code > object . This object can
* then be used to efficiently execute this statement multiple times . < / p >
* < p > < B > Note : < / B > This method is optimized for handling parametric SQL statements that benefit
* from precompilation . If the driver supports precompilation , the method
* < code > prepareStatement < / code > will send the statement to the database for precompilation . Some
* drivers may not support precompilation . In this case , the statement may not be sent to the
* database until the < code > PreparedStatement < / code > object is executed . This has no direct
* effect on users ; however , it does affect which methods throw certain SQLExceptions . < / p >
* Result sets created using the returned < code > PreparedStatement < / code > object will by default be
* type < code > TYPE _ FORWARD _ ONLY < / code > and have a concurrency level of
* < code > CONCUR _ READ _ ONLY < / code > . The holdability of the created result sets can be determined by
* calling { @ link # getHoldability } . < / p >
* @ param sql an SQL statement that may contain one or more ' ? ' IN parameter
* placeholders
* @ param columnIndexes an array of column indexes indicating the columns that should be returned
* from the inserted row or rows
* @ return a new < code > PreparedStatement < / code > object , containing the pre - compiled statement ,
* that is capable of returning the auto - generated keys designated by the given array of
* column indexes
* @ throws SQLException if a database access error occurs or this method is
* called on a closed connection */
public PreparedStatement prepareStatement ( final String sql , final int [ ] columnIndexes ) throws SQLException { } }
|
return prepareStatement ( sql , Statement . RETURN_GENERATED_KEYS ) ;
|
public class Content { /** * Gets the hlsIngestErrors value for this Content .
* @ return hlsIngestErrors * The list of any errors that occurred during the most recent
* DAI ingestion process of the HLS
* media . This attribute is read - only and will be null
* if the { @ link # hlsIngestStatus } is { @ link
* DaiIngestStatus # STATUS _ SUCCESS } or if the content
* is not eligible for dynamic ad insertion or
* if the content does not have HLS media . */
public com . google . api . ads . admanager . axis . v201805 . DaiIngestError [ ] getHlsIngestErrors ( ) { } }
|
return hlsIngestErrors ;
|
public class SymoplibParser { /** * Get the space group for the given international short name , using
* the PDB format , e . g . ' P 21 21 21 ' or ' C 1 c 1'
* @ param shortName
* @ return the SpaceGroup or null if the shortName is not valid */
public static SpaceGroup getSpaceGroup ( String shortName ) { } }
|
if ( shortName == null || shortName . length ( ) <= 2 ) return null ; // PDB uses group " P 1 - " for 13 racemic mixture entries ( as of Sep2011 ) , e . g . 3e7r
// they call the space group " P 1 - " unusually ( symop . lib and everyone else call it " P - 1 " )
if ( shortName . equals ( "P 1-" ) ) shortName = "P -1" ; // enantiomorphic space groups contain sometime letters indicating glide planes which should always be lower case
// in some PDB entries like 4gwv they are in upper case , we fix that here : convert any non - first letter to lower case
shortName = shortName . substring ( 0 , 1 ) + shortName . substring ( 1 ) . toLowerCase ( ) ; return name2sgs . get ( shortName ) ;
|
public class DomainValidation { /** * A list of email addresses that ACM used to send domain validation emails .
* @ param validationEmails
* A list of email addresses that ACM used to send domain validation emails . */
public void setValidationEmails ( java . util . Collection < String > validationEmails ) { } }
|
if ( validationEmails == null ) { this . validationEmails = null ; return ; } this . validationEmails = new java . util . ArrayList < String > ( validationEmails ) ;
|
public class ConnectResponse { /** * Returns a textual representation of the status code .
* @ return short description of status as string */
public String getStatusString ( ) { } }
|
switch ( status ) { case ErrorCodes . NO_ERROR : return "the connection was established successfully" ; case ErrorCodes . CONNECTION_TYPE : return "the requested connection type is not supported" ; case ErrorCodes . CONNECTION_OPTION : return "one or more connection options are not supported" ; case ErrorCodes . NO_MORE_CONNECTIONS : return "could not accept new connection (maximum reached)" ; case ErrorCodes . TUNNELING_LAYER : return "the requested tunneling layer is not supported" ; default : return "unknown status" ; }
|
public class ClassWriter { /** * Emit a class file for a given class .
* @ param c The class from which a class file is generated . */
public JavaFileObject writeClass ( ClassSymbol c ) throws IOException , PoolOverflow , StringOverflow { } }
|
String name = ( c . owner . kind == MDL ? c . name : c . flatname ) . toString ( ) ; Location outLocn ; if ( multiModuleMode ) { ModuleSymbol msym = c . owner . kind == MDL ? ( ModuleSymbol ) c . owner : c . packge ( ) . modle ; outLocn = fileManager . getLocationForModule ( CLASS_OUTPUT , msym . name . toString ( ) ) ; } else { outLocn = CLASS_OUTPUT ; } JavaFileObject outFile = fileManager . getJavaFileForOutput ( outLocn , name , JavaFileObject . Kind . CLASS , c . sourcefile ) ; OutputStream out = outFile . openOutputStream ( ) ; try { writeClassFile ( out , c ) ; if ( verbose ) log . printVerbose ( "wrote.file" , outFile ) ; out . close ( ) ; out = null ; } finally { if ( out != null ) { // if we are propagating an exception , delete the file
out . close ( ) ; outFile . delete ( ) ; outFile = null ; } } return outFile ; // may be null if write failed
|
public class ChainReducer { /** * Adds a Mapper class to the chain job ' s JobConf .
* It has to be specified how key and values are passed from one element of
* the chain to the next , by value or by reference . If a Mapper leverages the
* assumed semantics that the key and values are not modified by the collector
* ' by value ' must be used . If the Mapper does not expect this semantics , as
* an optimization to avoid serialization and deserialization ' by reference '
* can be used .
* For the added Mapper the configuration given for it ,
* < code > mapperConf < / code > , have precedence over the job ' s JobConf . This
* precedence is in effect when the task is running .
* IMPORTANT : There is no need to specify the output key / value classes for the
* ChainMapper , this is done by the addMapper for the last mapper in the chain
* @ param job chain job ' s JobConf to add the Mapper class .
* @ param klass the Mapper class to add .
* @ param inputKeyClass mapper input key class .
* @ param inputValueClass mapper input value class .
* @ param outputKeyClass mapper output key class .
* @ param outputValueClass mapper output value class .
* @ param byValue indicates if key / values should be passed by value
* to the next Mapper in the chain , if any .
* @ param mapperConf a JobConf with the configuration for the Mapper
* class . It is recommended to use a JobConf without default values using the
* < code > JobConf ( boolean loadDefaults ) < / code > constructor with FALSE . */
public static < K1 , V1 , K2 , V2 > void addMapper ( JobConf job , Class < ? extends Mapper < K1 , V1 , K2 , V2 > > klass , Class < ? extends K1 > inputKeyClass , Class < ? extends V1 > inputValueClass , Class < ? extends K2 > outputKeyClass , Class < ? extends V2 > outputValueClass , boolean byValue , JobConf mapperConf ) { } }
|
job . setOutputKeyClass ( outputKeyClass ) ; job . setOutputValueClass ( outputValueClass ) ; Chain . addMapper ( false , job , klass , inputKeyClass , inputValueClass , outputKeyClass , outputValueClass , byValue , mapperConf ) ;
|
public class BotmResourceUtil { public static String readText ( Reader reader ) { } }
|
BufferedReader in = new BufferedReader ( reader ) ; StringBuilder out = new StringBuilder ( 100 ) ; try { try { char [ ] buf = new char [ 8192 ] ; int n ; while ( ( n = in . read ( buf ) ) >= 0 ) { out . append ( buf , 0 , n ) ; } } finally { in . close ( ) ; } } catch ( IOException e ) { String msg = "The IOException occurred: reader=" + reader ; throw new IllegalStateException ( msg , e ) ; } return out . toString ( ) ;
|
public class SheetRenderer { /** * Encode hidden input fields
* @ param responseWriter
* @ param sheet
* @ param clientId
* @ throws IOException */
private void encodeHiddenInputs ( final ResponseWriter responseWriter , final Sheet sheet , final String clientId ) throws IOException { } }
|
responseWriter . startElement ( "input" , null ) ; responseWriter . writeAttribute ( "id" , clientId + "_input" , "id" ) ; responseWriter . writeAttribute ( "name" , clientId + "_input" , "name" ) ; responseWriter . writeAttribute ( "type" , "hidden" , null ) ; responseWriter . writeAttribute ( "value" , "" , null ) ; responseWriter . endElement ( "input" ) ; responseWriter . startElement ( "input" , null ) ; responseWriter . writeAttribute ( "id" , clientId + "_focus" , "id" ) ; responseWriter . writeAttribute ( "name" , clientId + "_focus" , "name" ) ; responseWriter . writeAttribute ( "type" , "hidden" , null ) ; if ( sheet . getFocusId ( ) == null ) { responseWriter . writeAttribute ( "value" , "" , null ) ; } else { responseWriter . writeAttribute ( "value" , sheet . getFocusId ( ) , null ) ; } responseWriter . endElement ( "input" ) ; responseWriter . startElement ( "input" , null ) ; responseWriter . writeAttribute ( "id" , clientId + "_selection" , "id" ) ; responseWriter . writeAttribute ( "name" , clientId + "_selection" , "name" ) ; responseWriter . writeAttribute ( "type" , "hidden" , null ) ; if ( sheet . getSelection ( ) == null ) { responseWriter . writeAttribute ( "value" , "" , null ) ; } else { responseWriter . writeAttribute ( "value" , sheet . getSelection ( ) , null ) ; } responseWriter . endElement ( "input" ) ; // sort col and order if specified and supported
final int sortCol = sheet . getSortColRenderIndex ( ) ; responseWriter . startElement ( "input" , null ) ; responseWriter . writeAttribute ( "id" , clientId + "_sortby" , "id" ) ; responseWriter . writeAttribute ( "name" , clientId + "_sortby" , "name" ) ; responseWriter . writeAttribute ( "type" , "hidden" , null ) ; responseWriter . writeAttribute ( "value" , sortCol , null ) ; responseWriter . endElement ( "input" ) ; responseWriter . startElement ( "input" , null ) ; responseWriter . writeAttribute ( "id" , clientId + "_sortorder" , "id" ) ; responseWriter . writeAttribute ( "name" , clientId + "_sortorder" , "name" ) ; responseWriter . writeAttribute ( "type" , "hidden" , null ) ; responseWriter . writeAttribute ( "value" , sheet . getSortOrder ( ) . toLowerCase ( ) , null ) ; responseWriter . endElement ( "input" ) ;
|
public class RandomICAutomatonGenerator { /** * Creates a random IC automaton generator instance for generating DFAs . The { @ code acceptingRatio } parameter
* controls the probability of a state in a generated automaton being an accepting state .
* @ param acceptingRatio
* the ( approximate ) ratio of accepting states in generated automata
* @ return a random IC automaton generator instance for generating DFAs */
public static RandomICAutomatonGenerator < Boolean , Void > forDFA ( double acceptingRatio ) { } }
|
return new RandomICAutomatonGenerator < Boolean , Void > ( ) . withStateProperties ( r -> r . nextDouble ( ) < acceptingRatio ) ;
|
public class SelenideTargetLocator { /** * Switch to window / tab by name / handle / title except some windows handles
* @ param title title of window / tab */
protected static WebDriver windowByTitle ( WebDriver driver , String title ) { } }
|
Set < String > windowHandles = driver . getWindowHandles ( ) ; for ( String windowHandle : windowHandles ) { driver . switchTo ( ) . window ( windowHandle ) ; if ( title . equals ( driver . getTitle ( ) ) ) { return driver ; } } throw new NoSuchWindowException ( "Window with title not found: " + title ) ;
|
public class LayoutRefiner { /** * Restore the coordinates of atoms ( idxs ) in the stack to the provided
* source .
* @ param stack atom indexes to backup
* @ param src source of coordinates */
private void restoreCoords ( IntStack stack , Point2d [ ] src ) { } }
|
for ( int i = 0 ; i < stack . len ; i ++ ) { int v = stack . xs [ i ] ; atoms [ v ] . getPoint2d ( ) . x = src [ v ] . x ; atoms [ v ] . getPoint2d ( ) . y = src [ v ] . y ; }
|
public class ConversionHandler { /** * This method surrounds the explicit conversion defined with a try - catch , to handle null pointers .
* @ return the body wrapped */
private String error ( ) { } }
|
Map < String , List < ConversionMethod > > conversions = xml . conversionsLoad ( ) ; String methodName = "illegalCode" ; String paramater = "" ; String resource = xml . getXmlPath ( ) ; if ( ! isNull ( resource ) ) { boolean isPath = isPath ( resource ) ; methodName = ! isPath ? "illegalCodeContent" : "illegalCode" ; if ( ! conversions . isEmpty ( ) && ! isNull ( conversions . get ( configClass . getName ( ) ) ) ) { // if is a content , the double quotes must be handled
if ( ! isPath ) resource = doubleQuotesHandling ( resource ) ; paramater = write ( ",\"" , resource , "\"" ) ; } } return write ( "com.googlecode.jmapper.config.Error#" , methodName , "(e,\"" , methodToGenerate . getOriginalName ( ) , "\",\"" , configClass . getSimpleName ( ) , "\"" , paramater , ");" ) ;
|
public class LazyList { /** * This method includes associated objects . It will eagerly load associated models of
* models selected by the query . For instance , if there are models < code > Author < / code > , < code > Post < / code >
* and < code > Comment < / code > , where < code > Author < / code > has many < code > Post < / code > s and < code > Post < / code >
* has many < code > Comment < / code > s , then this query :
* < pre >
* List < Post > todayPosts = Post . where ( " post _ date = ? " , today ) . include ( Author . class , Comment . class ) ;
* < / pre >
* will generate only three queries to database - one per model . All the dependencies ( includes ) will be
* eagerly loaded , and iteration via the < code > todayPosts < / code > list will not generate any more queries ,
* even when a post author and comments are requested . Use this with caution as this method can allocate
* a lot of memory ( obviously ) .
* < p > < / p >
* This method will not follow relationships of related models , but rather only relationships of the current
* one .
* @ param classes list of dependent classes . These classes represent models with which a current model has a
* relationship .
* @ return instance of this < code > LazyList < / code > */
public < E extends Model > LazyList < E > include ( Class < ? extends Model > ... classes ) { } }
|
// TODO : why cannot call include ( ) more than once ?
if ( ! includes . isEmpty ( ) ) { throw new IllegalArgumentException ( "Can't call include() more than once!" ) ; } for ( Class < ? extends Model > clazz : classes ) { if ( ! metaModel . isAssociatedTo ( clazz ) ) throw new IllegalArgumentException ( "Model: " + clazz . getName ( ) + " is not associated with: " + metaModel . getModelClass ( ) . getName ( ) ) ; } // lets cache included classes and associations for future processing .
for ( Class includeClass : classes ) { includes . addAll ( metaModel . getAssociationsForTarget ( includeClass ) ) ; } return ( LazyList < E > ) this ;
|
public class CountTreeBitsCollection { /** * Compute sums tree run .
* @ return the tree run */
public TreeMap < Bits , Long > computeSums ( ) { } }
|
final TreeMap < Bits , Long > sums = new TreeMap < Bits , Long > ( ) ; long total = 0 ; for ( final Entry < Bits , AtomicInteger > e : this . map . entrySet ( ) ) { sums . put ( e . getKey ( ) , total += e . getValue ( ) . get ( ) ) ; } return sums ;
|
public class AtlasMetricObserver { /** * Utility function to map an Observable & lt ; ByteBuf > to an Observable & lt ; Integer > while also
* updating our counters for metrics sent and errors . */
protected Func1 < HttpClientResponse < ByteBuf > , Integer > withBookkeeping ( final int batchSize ) { } }
|
return response -> { boolean ok = response . getStatus ( ) . code ( ) == 200 ; if ( ok ) { numMetricsSent . increment ( batchSize ) ; } else { LOGGER . info ( "Status code: {} - Lost {} metrics" , response . getStatus ( ) . code ( ) , batchSize ) ; numMetricsDroppedHttpErr . increment ( batchSize ) ; } return batchSize ; } ;
|
public class MeasureFormat { /** * values in hms with 0. */
private StringBuilder formatNumeric ( Number [ ] hms , StringBuilder appendable ) { } }
|
// find the start and end of non - nil values in hms array . We have to know if we
// have hour - minute ; minute - second ; or hour - minute - second .
int startIndex = - 1 ; int endIndex = - 1 ; for ( int i = 0 ; i < hms . length ; i ++ ) { if ( hms [ i ] != null ) { endIndex = i ; if ( startIndex == - 1 ) { startIndex = endIndex ; } } else { // Replace nil value with 0.
hms [ i ] = Integer . valueOf ( 0 ) ; } } // convert hours , minutes , seconds into milliseconds .
long millis = ( long ) ( ( ( Math . floor ( hms [ 0 ] . doubleValue ( ) ) * 60.0 + Math . floor ( hms [ 1 ] . doubleValue ( ) ) ) * 60.0 + Math . floor ( hms [ 2 ] . doubleValue ( ) ) ) * 1000.0 ) ; Date d = new Date ( millis ) ; // if hour - minute - second
if ( startIndex == 0 && endIndex == 2 ) { return formatNumeric ( d , numericFormatters . getHourMinuteSecond ( ) , DateFormat . Field . SECOND , hms [ endIndex ] , appendable ) ; } // if minute - second
if ( startIndex == 1 && endIndex == 2 ) { return formatNumeric ( d , numericFormatters . getMinuteSecond ( ) , DateFormat . Field . SECOND , hms [ endIndex ] , appendable ) ; } // if hour - minute
if ( startIndex == 0 && endIndex == 1 ) { return formatNumeric ( d , numericFormatters . getHourMinute ( ) , DateFormat . Field . MINUTE , hms [ endIndex ] , appendable ) ; } throw new IllegalStateException ( ) ;
|
public class ElementDefinitionDt { /** * Adds a given new value for < b > constraint < / b > ( )
* < b > Definition : < / b >
* Formal constraints such as co - occurrence and other constraints that can be computationally evaluated within the context of the instance
* @ param theValue The constraint to add ( must not be < code > null < / code > ) */
public ElementDefinitionDt addConstraint ( Constraint theValue ) { } }
|
if ( theValue == null ) { throw new NullPointerException ( "theValue must not be null" ) ; } getConstraint ( ) . add ( theValue ) ; return this ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.