signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class MapCollectors { /** * Based on < a
* href = " https : / / stackoverflow . com / a / 29090335/8579801 " > https : / / stackoverflow . com / a / 29090335/8579801 < / a > */
@ SuppressWarnings ( "squid:S1452" ) public static < T , K , U > Collector < T , ? , Map < K , U > > toLinkedMap ( Function < ? super T , ? extends K > keyMapper , Function < ? super T , ? extends U > valueMapper ) { } } | return Collectors . toMap ( keyMapper , valueMapper , throwingMerger ( ) , LinkedHashMap :: new ) ; |
public class LoggingUtil { /** * Gets the root j . u . l . Logger and removes all registered handlers
* then redirects all active j . u . l . to SLF4J
* N . B . This should only happen once , hence the flag and locking */
public static void hijackJDKLogging ( ) { } } | JUL_HIJACKING_LOCK . lock ( ) ; try { if ( ! julHijacked ) { SLF4JBridgeHandler . removeHandlersForRootLogger ( ) ; SLF4JBridgeHandler . install ( ) ; julHijacked = true ; } } finally { JUL_HIJACKING_LOCK . unlock ( ) ; } |
public class DescribeDirectConnectGatewayAttachmentsResult { /** * The attachments .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDirectConnectGatewayAttachments ( java . util . Collection ) } or
* { @ link # withDirectConnectGatewayAttachments ( java . util . Collection ) } if you want to override the existing values .
* @ param directConnectGatewayAttachments
* The attachments .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeDirectConnectGatewayAttachmentsResult withDirectConnectGatewayAttachments ( DirectConnectGatewayAttachment ... directConnectGatewayAttachments ) { } } | if ( this . directConnectGatewayAttachments == null ) { setDirectConnectGatewayAttachments ( new com . amazonaws . internal . SdkInternalList < DirectConnectGatewayAttachment > ( directConnectGatewayAttachments . length ) ) ; } for ( DirectConnectGatewayAttachment ele : directConnectGatewayAttachments ) { this . directConnectGatewayAttachments . add ( ele ) ; } return this ; |
public class Parser { /** * ( Expression , . . . BindingIdentifier ) */
private ParseTree parseCoverParenthesizedExpressionAndArrowParameterList ( ) { } } | if ( peekType ( 1 ) == TokenType . FOR ) { return parseGeneratorComprehension ( ) ; } SourcePosition start = getTreeStartLocation ( ) ; eat ( TokenType . OPEN_PAREN ) ; // Case ( )
if ( peek ( TokenType . CLOSE_PAREN ) ) { eat ( TokenType . CLOSE_PAREN ) ; if ( peek ( TokenType . ARROW ) ) { return new FormalParameterListTree ( getTreeLocation ( start ) , ImmutableList . < ParseTree > of ( ) ) ; } else { reportError ( "invalid parenthesized expression" ) ; return new MissingPrimaryExpressionTree ( getTreeLocation ( start ) ) ; } } // Case ( . . . BindingIdentifier )
if ( peek ( TokenType . SPREAD ) ) { ImmutableList < ParseTree > params = ImmutableList . of ( parseParameter ( ParamContext . IMPLEMENTATION ) ) ; eat ( TokenType . CLOSE_PAREN ) ; if ( peek ( TokenType . ARROW ) ) { return new FormalParameterListTree ( getTreeLocation ( start ) , params ) ; } else { reportError ( "invalid parenthesized expression" ) ; return new MissingPrimaryExpressionTree ( getTreeLocation ( start ) ) ; } } // For either of the two remaining cases :
// ( Expression )
// ( Expression , . . . BindingIdentifier )
// we can parse as an expression .
ParseTree result = parseExpression ( ) ; // If it follows witha comma , we must be in the
// ( Expression , . . . BindingIdentifier )
// case .
if ( peek ( TokenType . COMMA ) ) { eat ( TokenType . COMMA ) ; // Since we already parsed as an expression , we will guaranteed reparse this expression
// as an arrow function parameter list , but just leave it as a comma expression for now .
result = new CommaExpressionTree ( getTreeLocation ( start ) , ImmutableList . of ( result , parseParameter ( ParamContext . IMPLEMENTATION ) ) ) ; } eat ( TokenType . CLOSE_PAREN ) ; return new ParenExpressionTree ( getTreeLocation ( start ) , result ) ; |
public class ImageLocalNormalization { /** * < p > Normalizes the input image such that local weighted statics are a zero mean and with standard deviation
* of 1 . The image border is handled by truncating the kernel and renormalizing it so that it ' s sum is
* still one . < / p >
* < p > output [ x , y ] = ( input [ x , y ] - mean [ x , y ] ) / ( stdev [ x , y ] + delta ) < / p >
* @ param kernel Separable kernel . Typically Gaussian
* @ param input Input image
* @ param maxPixelValue maximum value of a pixel element in the input image . - 1 = compute max value .
* Typically this is 255 or 1.
* @ param delta A small value used to avoid divide by zero errors . Typical 1e - 4f for 32 bit and 1e - 8 for 64bit
* @ param output Storage for output */
public void zeroMeanStdOne ( Kernel1D kernel , T input , double maxPixelValue , double delta , T output ) { } } | // check preconditions and initialize data structures
initialize ( input , output ) ; // avoid overflow issues by ensuring that the max pixel value is 1
T adjusted = ensureMaxValueOfOne ( input , maxPixelValue ) ; // take advantage of 2D gaussian kernels being separable
if ( border == null ) { GConvolveImageOps . horizontalNormalized ( kernel , adjusted , output ) ; GConvolveImageOps . verticalNormalized ( kernel , output , localMean ) ; GPixelMath . pow2 ( adjusted , pow2 ) ; GConvolveImageOps . horizontalNormalized ( kernel , pow2 , output ) ; GConvolveImageOps . verticalNormalized ( kernel , output , localPow2 ) ; } else { GConvolveImageOps . horizontal ( kernel , adjusted , output , border ) ; GConvolveImageOps . vertical ( kernel , output , localMean , border ) ; GPixelMath . pow2 ( adjusted , pow2 ) ; GConvolveImageOps . horizontal ( kernel , pow2 , output , border ) ; GConvolveImageOps . vertical ( kernel , output , localPow2 , border ) ; } // Compute the final output
if ( imageType == GrayF32 . class ) computeOutput ( ( GrayF32 ) input , ( float ) delta , ( GrayF32 ) output , ( GrayF32 ) adjusted ) ; else computeOutput ( ( GrayF64 ) input , delta , ( GrayF64 ) output , ( GrayF64 ) adjusted ) ; |
public class LabsInner { /** * Modify properties of labs .
* @ param resourceGroupName The name of the resource group .
* @ param labAccountName The name of the lab Account .
* @ param labName The name of the lab .
* @ param lab Represents a lab .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the LabInner object */
public Observable < LabInner > updateAsync ( String resourceGroupName , String labAccountName , String labName , LabFragment lab ) { } } | return updateWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , lab ) . map ( new Func1 < ServiceResponse < LabInner > , LabInner > ( ) { @ Override public LabInner call ( ServiceResponse < LabInner > response ) { return response . body ( ) ; } } ) ; |
public class BenchmarkChronosServer { /** * Benchmark ChronosServer and print metrics in another thread . */
public void startBenchmark ( ) { } } | startTime = System . currentTimeMillis ( ) ; LOG . info ( "Start to benchmark chronos server" ) ; Thread t = new Thread ( ) { // new thread to output the metrics
@ Override public void run ( ) { final long collectPeriod = 10000 ; LOG . info ( "Start another thread to export benchmark metrics every " + collectPeriod / 1000.0 + " second" ) ; int totalCount ; int totalLatency ; long exportTime ; int lastTotalCount = 0 ; int lastTotalLatency = 0 ; long lastExportTime = startTime ; while ( true ) { try { Thread . sleep ( collectPeriod ) ; } catch ( InterruptedException e ) { LOG . error ( "Interrupt when sleep to get benchmark metrics, exit immediately" ) ; System . exit ( 0 ) ; } exportTime = System . currentTimeMillis ( ) ; totalCount = totalCountInteger . get ( ) ; totalLatency = totalLatencyInteger . get ( ) ; double totalCostTime = ( exportTime - startTime ) / 1000.0 ; double costTime = ( exportTime - lastExportTime ) / 1000.0 ; double qps = ( totalCount - lastTotalCount ) / costTime ; double latency = ( totalLatency - lastTotalLatency ) * 1.0 / ( totalCount - lastTotalCount ) ; System . out . println ( "Total " + totalCostTime + ", in " + costTime + " seconds, qps: " + qps + ", latency: " + latency + "ms" ) ; lastTotalCount = totalCount ; lastTotalLatency = totalLatency ; lastExportTime = exportTime ; } } } ; t . setDaemon ( true ) ; t . start ( ) ; while ( true ) { try { long start = System . currentTimeMillis ( ) ; currentTimestamp = chronosClient . getTimestamp ( ) ; totalCountInteger . incrementAndGet ( ) ; totalLatencyInteger . addAndGet ( ( int ) ( System . currentTimeMillis ( ) - start ) ) ; if ( currentTimestamp <= previousTimestamp ) { // check correctness
LOG . error ( "Fatal error to get a lower timestamp " + currentTimestamp + "(previous is " + previousTimestamp + "), exit immediately" ) ; System . exit ( 0 ) ; } previousTimestamp = currentTimestamp ; if ( isFailover == true ) { // calculate failover time
double failoverTime = ( System . currentTimeMillis ( ) - failoverStartTime ) / 1000.0 ; System . out . println ( "After " + failoverStartTimeString + ", the total failover time is " + failoverTime + " seconds" ) ; } isFailover = false ; } catch ( IOException e ) { LOG . error ( "Exception to get timestamp" ) ; if ( isFailover == false ) { failoverStartTime = System . currentTimeMillis ( ) ; failoverStartTimeString = new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss" ) . format ( new Date ( failoverStartTime ) ) ; LOG . info ( "Failover occurs at " + failoverStartTimeString ) ; } isFailover = true ; } } |
public class ComponentFinder { /** * Finds the first parent of the given childComponent from the given parentClass and a flag if
* the search shell be continued with the class name if the search with the given parentClass
* returns null .
* @ param childComponent
* the child component
* @ param parentClass
* the parent class
* @ param byClassname
* the flag to search by classname if the search with given parentClass returns null .
* @ return the component */
public static Component findParent ( final Component childComponent , final Class < ? extends Component > parentClass , final boolean byClassname ) { } } | Component parent = childComponent . getParent ( ) ; while ( parent != null ) { if ( parent . getClass ( ) . equals ( parentClass ) ) { break ; } parent = parent . getParent ( ) ; } if ( ( parent == null ) && byClassname ) { return findParentByClassname ( childComponent , parentClass ) ; } return parent ; |
public class ProducerSessionProxy { /** * This method performs the actual send , but does no parameter checking
* and gets no locks needed to perform the send . This should be done
* by a suitable ' super ' - method .
* @ param msg
* @ param tran
* @ throws com . ibm . wsspi . sib . core . exception . SISessionUnavailableException
* @ throws com . ibm . wsspi . sib . core . exception . SISessionDroppedException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionUnavailableException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionDroppedException
* @ throws com . ibm . websphere . sib . exception . SIResourceException
* @ throws com . ibm . wsspi . sib . core . exception . SIConnectionLostException
* @ throws com . ibm . wsspi . sib . core . exception . SILimitExceededException
* @ throws com . ibm . websphere . sib . exception . SIErrorException
* @ throws com . ibm . wsspi . sib . core . exception . SINotAuthorizedException
* @ throws com . ibm . websphere . sib . exception . SIIncorrectCallException
* @ throws com . ibm . websphere . sib . exception . SINotPossibleInCurrentConfigurationException */
private void _send ( SIBusMessage msg , SITransaction tran ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIResourceException , SIConnectionLostException , SILimitExceededException , SIErrorException , SINotAuthorizedException , SIIncorrectCallException , SINotPossibleInCurrentConfigurationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "_send" ) ; boolean sendSuccessful = false ; // Get the message priority
short jfapPriority = JFapChannelConstants . getJFAPPriority ( msg . getPriority ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Sending with JFAP priority of " + jfapPriority ) ; updateLowestPriority ( jfapPriority ) ; // * * * Complex logic to determine if we can get away we need a reply to * * *
// * * * this send operation . * * *
final boolean requireReply ; if ( tran != null && ! exchangeTransactedSends ) { // If there is a transaction , and we haven ' t been explicitly told to exchange
// transacted sends - then a reply is NOT required .
requireReply = false ; } else if ( exchangeExpressSends ) { // We have been prohibited from sending ( rather than exchanging ) low
// qualities of service - thus there is no way that we can avoid requiring
// a reply .
requireReply = true ; } else { // We CAN perform the optimization where low qualities of service can be sent
// without requiring a reply . Check the message quality of service .
requireReply = ( msg . getReliability ( ) != Reliability . BEST_EFFORT_NONPERSISTENT ) && ( msg . getReliability ( ) != Reliability . EXPRESS_NONPERSISTENT ) ; } // * * * end of " is a reply required " logic * * *
// If we are at FAP9 or above we can do a ' chunked ' send of the message in seperate
// slices to make life easier on the Java memory manager
final HandshakeProperties props = getConversation ( ) . getHandshakeProperties ( ) ; if ( props . getFapLevel ( ) >= JFapChannelConstants . FAP_VERSION_9 ) { sendChunkedMessage ( tran , msg , requireReply , jfapPriority ) ; } else { sendEntireMessage ( tran , msg , null , requireReply , jfapPriority ) ; } sendSuccessful = true ; if ( TraceComponent . isAnyTracingEnabled ( ) ) CommsLightTrace . traceMessageId ( tc , "SendMsgTrace" , msg ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "_send" ) ; |
public class InOutLogger { protected void asyncShow ( RequestManager requestManager , String whole ) { } } | requestManager . getAsyncManager ( ) . async ( new ConcurrentAsyncCall ( ) { @ Override public ConcurrentAsyncImportance importance ( ) { return ConcurrentAsyncImportance . TERTIARY ; // as low priority
} @ Override public void callback ( ) { log ( whole ) ; } } ) ; |
public class BaseEnvelopeSchemaConverter { /** * Get payload field and convert to byte array
* @ param inputRecord the input record which has the payload
* @ return the byte array of the payload in the input record
* @ deprecated use { @ link # getFieldAsBytes ( GenericRecord , String ) } */
@ Deprecated protected byte [ ] getPayloadBytes ( GenericRecord inputRecord ) { } } | try { return getFieldAsBytes ( inputRecord , payloadField ) ; } catch ( Exception e ) { return null ; } |
public class JavaTokenizer { /** * Reads a character from { @ code reader } . In this implementation ,
* all read accesses to the reader pass through this method .
* @ param allowEOF if { @ code false } , throws IOException if EOF is
* reached */
@ Ensures ( { } } | "!allowEOF ? result >= 0 : result >= -1" } ) protected int readChar ( boolean allowEOF ) throws IOException { int c = reader . read ( ) ; if ( c != - 1 ) { ++ currentOffset ; } else { if ( ! allowEOF ) { throw new IOException ( ) ; } } return c ; |
public class Music { /** * Return true if the bar is empty or contains only barline and spacer ( s ) .
* False if barline contain other kind of music element */
public boolean barIsEmptyForAllVoices ( Bar bar ) { } } | for ( Object m_voice : m_voices ) { Voice v = ( Voice ) m_voice ; if ( ! v . barIsEmpty ( bar ) ) return false ; } return true ; |
public class CudaGridExecutioner { /** * This method forces all currently enqueued ops to be executed immediately
* PLEASE NOTE : This call IS non - blocking */
public void flushQueue ( ) { } } | /* Basically we just want to form GridOp and pass it to native executioner
But since we don ' t have GridOp interface yet , we ' ll send everything to underlying CudaExecutioner . */
// logger . info ( " Non - Blocking flush " ) ;
// TODO : proper implementation for GridOp creation required here
/* Deque < OpDescriptor > currentQueue = deviceQueues . get ( ) ;
if ( currentQueue = = null )
return ;
OpDescriptor op = currentQueue . pollFirst ( ) ;
while ( op ! = null ) {
pushToGrid ( op , false ) ;
op = currentQueue . pollFirst ( ) ; */
// we need to check ,
OpDescriptor op = lastOp . get ( ) ; if ( op != null ) { if ( ! experimental . get ( ) ) { // if ( ! nativeOps . isExperimentalEnabled ( ) ) {
// it might be only pairwise transform here for now
// logger . info ( " Flushing existing lastOp " ) ;
lastOp . remove ( ) ; dequeueOp ( op ) ; pushToGrid ( op , false ) ; } else { throw new UnsupportedOperationException ( "Experimental flush isn't supported yet" ) ; } } else { // logger . info ( " Queue is empty " ) ;
} |
public class MethodUtils { /** * Gets all class level public methods of the given class that are annotated with the given annotation .
* @ param cls
* the { @ link Class } to query
* @ param annotationCls
* the { @ link Annotation } that must be present on a method to be matched
* @ return a list of Methods ( possibly empty ) .
* @ throws IllegalArgumentException
* if the class or annotation are { @ code null }
* @ since 3.4 */
public static List < Method > getMethodsListWithAnnotation ( final Class < ? > cls , final Class < ? extends Annotation > annotationCls ) { } } | return getMethodsListWithAnnotation ( cls , annotationCls , false , false ) ; |
public class FamiliarRecyclerView { /** * FooterView onBindViewHolder callback
* @ param onFooterViewBindViewHolderListener OnFooterViewBindViewHolderListener */
public void setOnFooterViewBindViewHolderListener ( FamiliarRecyclerView . OnFooterViewBindViewHolderListener onFooterViewBindViewHolderListener ) { } } | if ( null != mWrapFamiliarRecyclerViewAdapter ) { mWrapFamiliarRecyclerViewAdapter . setOnFooterViewBindViewHolderListener ( onFooterViewBindViewHolderListener ) ; } else { mTempOnFooterViewBindViewHolderListener = onFooterViewBindViewHolderListener ; } |
public class SeaGlassLookAndFeel { /** * Initialize the menu settings .
* @ param d the UI defaults map . */
private void defineMenus ( UIDefaults d ) { } } | d . put ( "menuItemBackgroundBase" , new Color ( 0x5b7ea4 ) ) ; // Initialize Menu
String c = PAINTER_PREFIX + "MenuPainter" ; String p = "Menu" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 1 , 12 , 2 , 5 ) ) ; d . put ( p + "[Disabled].textForeground" , d . get ( "seaGlassDisabledText" ) ) ; d . put ( p + "[Enabled].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[Enabled+Selected].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[Enabled+Selected].backgroundPainter" , new LazyPainter ( c , MenuPainter . Which . BACKGROUND_ENABLED_SELECTED ) ) ; d . put ( p + "[Disabled].arrowIconPainter" , new LazyPainter ( c , MenuPainter . Which . ARROWICON_DISABLED ) ) ; d . put ( p + "[Enabled].arrowIconPainter" , new LazyPainter ( c , MenuPainter . Which . ARROWICON_ENABLED ) ) ; d . put ( p + "[Enabled+Selected].arrowIconPainter" , new LazyPainter ( c , MenuPainter . Which . ARROWICON_ENABLED_SELECTED ) ) ; d . put ( p + ".arrowIcon" , new SeaGlassIcon ( p + "" , "arrowIconPainter" , 9 , 10 ) ) ; d . put ( p + ".checkIcon" , new SeaGlassIcon ( p + "" , "checkIconPainter" , 6 , 10 ) ) ; p = "Menu:MenuItemAccelerator" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; // We don ' t paint MenuBar backgrounds . Remove the painters .
c = PAINTER_PREFIX + "MenuBarPainter" ; p = "MenuBar" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 2 , 6 , 2 , 6 ) ) ; if ( d . get ( p + "[Enabled].backgroundPainter" ) != null ) { d . remove ( p + "[Enabled].backgroundPainter" ) ; } if ( d . get ( p + "[Enabled].borderPainter" ) != null ) { d . remove ( p + "[Enabled].borderPainter" ) ; } // Rossi : " Selected Menu " color changed to dark blue . Not tested with " unified " title / menu / toolbar
c = PAINTER_PREFIX + "MenuItemPainter" ; p = "MenuBar:Menu" ; d . put ( p + ".States" , "Enabled,Selected,Disabled,NotUnified" ) ; d . put ( p + ".NotUnified" , new MenuNotUnified ( ) ) ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 1 , 4 , 2 , 4 ) ) ; d . put ( p + "[Disabled].textForeground" , d . getColor ( "seaGlassDisabledText" ) ) ; d . put ( p + "[Enabled].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[Selected].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[Selected].backgroundPainter" , new LazyPainter ( c , MenuItemPainter . Which . BACKGROUND_MOUSEOVER_UNIFIED ) ) ; d . put ( p + "[Enabled+NotUnified].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[Enabled+Selected+NotUnified].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[Enabled+Selected+NotUnified].backgroundPainter" , new LazyPainter ( c , MenuItemPainter . Which . BACKGROUND_MOUSEOVER ) ) ; p = "MenuBar:Menu:MenuItemAccelerator" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; // Initialize MenuItem
c = PAINTER_PREFIX + "MenuItemPainter" ; p = "MenuItem" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 1 , 12 , 2 , 13 ) ) ; d . put ( p + ".textIconGap" , new Integer ( 5 ) ) ; d . put ( p + ".acceleratorFont" , new DerivedFont ( "defaultFont" , 1.0f , null , null ) ) ; d . put ( p + "[Disabled].textForeground" , d . getColor ( "seaGlassDisabledText" ) ) ; d . put ( p + "[Enabled].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[MouseOver].backgroundPainter" , new LazyPainter ( c , MenuItemPainter . Which . BACKGROUND_MOUSEOVER ) ) ; p = "MenuItem:MenuItemAccelerator" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[Disabled].textForeground" , d . getColor ( "seaGlassDisabledText" ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; // Initialize CheckBoxMenuItem
c = PAINTER_PREFIX + "CheckBoxMenuItemPainter" ; p = "CheckBoxMenuItem" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 1 , 12 , 2 , 13 ) ) ; d . put ( p + ".textIconGap" , new Integer ( 5 ) ) ; d . put ( p + "[Disabled].textForeground" , d . getColor ( "seaGlassDisabledText" ) ) ; d . put ( p + "[Enabled].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[MouseOver].backgroundPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . BACKGROUND_MOUSEOVER ) ) ; d . put ( p + "[MouseOver+Selected].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[MouseOver+Selected].backgroundPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . BACKGROUND_SELECTED_MOUSEOVER ) ) ; d . put ( p + "[Disabled+Selected].checkIconPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . CHECKICON_DISABLED_SELECTED ) ) ; d . put ( p + "[Enabled+Selected].checkIconPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . CHECKICON_ENABLED_SELECTED ) ) ; // Rossi : Added painter that shows an " indicator " that menu item is a " selectable checkbox "
d . put ( p + "[Enabled].checkIconPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . CHECKICON_ENABLED ) ) ; d . put ( p + "[MouseOver].checkIconPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . CHECKICON_ENABLED_MOUSEOVER ) ) ; d . put ( p + "[MouseOver+Selected].checkIconPainter" , new LazyPainter ( c , CheckBoxMenuItemPainter . Which . CHECKICON_SELECTED_MOUSEOVER ) ) ; d . put ( p + ".checkIcon" , new SeaGlassIcon ( p , "checkIconPainter" , 9 , 10 ) ) ; p = "CheckBoxMenuItem:MenuItemAccelerator" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; // Initialize RadioButtonMenuItem
c = PAINTER_PREFIX + "RadioButtonMenuItemPainter" ; p = "RadioButtonMenuItem" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 1 , 12 , 2 , 13 ) ) ; d . put ( p + ".textIconGap" , new Integer ( 5 ) ) ; d . put ( p + "[Disabled].textForeground" , d . getColor ( "seaGlassDisabledText" ) ) ; d . put ( p + "[Enabled].textForeground" , new ColorUIResource ( Color . BLACK ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[MouseOver].backgroundPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . BACKGROUND_MOUSEOVER ) ) ; d . put ( p + "[MouseOver+Selected].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; d . put ( p + "[MouseOver+Selected].backgroundPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . BACKGROUND_SELECTED_MOUSEOVER ) ) ; d . put ( p + "[Disabled+Selected].checkIconPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . CHECKICON_DISABLED_SELECTED ) ) ; d . put ( p + "[Enabled+Selected].checkIconPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . CHECKICON_ENABLED_SELECTED ) ) ; // Rossi : Added painter that shows an " indicator " that menu item is a " selectable radio button "
d . put ( p + "[Enabled].checkIconPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . CHECKICON_ENABLED ) ) ; d . put ( p + "[MouseOver].checkIconPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . CHECKICON_ENABLED_MOUSEOVER ) ) ; d . put ( p + "[MouseOver+Selected].checkIconPainter" , new LazyPainter ( c , RadioButtonMenuItemPainter . Which . CHECKICON_SELECTED_MOUSEOVER ) ) ; d . put ( p + ".checkIcon" , new SeaGlassIcon ( p , "checkIconPainter" , 9 , 10 ) ) ; p = "RadioButtonMenuItem:MenuItemAccelerator" ; d . put ( p + ".contentMargins" , new InsetsUIResource ( 0 , 0 , 0 , 0 ) ) ; d . put ( p + "[MouseOver].textForeground" , new ColorUIResource ( Color . WHITE ) ) ; |
public class DoublePoint { /** * Divides values of two points .
* @ param point1 DoublePoint .
* @ param point2 DoublePoint .
* @ return A new DoublePoint with the division operation . */
public DoublePoint Divide ( DoublePoint point1 , DoublePoint point2 ) { } } | DoublePoint result = new DoublePoint ( point1 ) ; result . Divide ( point2 ) ; return result ; |
public class JDBCConnection { /** * < ! - - start generic documentation - - >
* Makes all changes made since the previous
* commit / rollback permanent and releases any database locks
* currently held by this < code > Connection < / code > object .
* This method should be
* used only when auto - commit mode has been disabled .
* < ! - - end generic documentation - - >
* < ! - - start release - specific documentation - - >
* < div class = " ReleaseSpecificDocumentation " >
* < h3 > HSQLDB - Specific Information : < / h3 > < p >
* < / div > < ! - - end release - specific documentation - - >
* @ exception SQLException if a database access error occurs ,
* ( JDBC4 Clarification : )
* this method is called while participating in a distributed transaction ,
* if this method is called on a closed conection or this
* < code > Connection < / code > object is in auto - commit mode
* @ see # setAutoCommit */
public synchronized void commit ( ) throws SQLException { } } | checkClosed ( ) ; try { sessionProxy . commit ( false ) ; } catch ( HsqlException e ) { throw Util . sqlException ( e ) ; } |
public class CloudMe { /** * An invoker that checks response content type = XML : to be used by all API requests
* @ param request API request
* @ return a request invoker specific for API requests */
private RequestInvoker < CResponse > getApiRequestInvoker ( HttpRequestBase request , CPath path ) { } } | return new RequestInvoker < CResponse > ( new HttpRequestor ( request , path , sessionManager ) , API_RESPONSE_VALIDATOR ) ; |
public class Viewport { /** * Write this viewport to the specified parcel . To restore a viewport from a parcel , use readFromParcel ( )
* @ param out The parcel to write the viewport ' s coordinates into */
public void writeToParcel ( Parcel out , int flags ) { } } | out . writeFloat ( left ) ; out . writeFloat ( top ) ; out . writeFloat ( right ) ; out . writeFloat ( bottom ) ; |
public class UNode { /** * are found while scanning . */
private static boolean parseXMLChildElems ( Element elem , List < UNode > childUNodeList ) { } } | assert elem != null ; assert childUNodeList != null ; // Scan for Element nodes ( there could be Comment and other nodes ) .
boolean bDupNodeNames = false ; Set < String > nodeNameSet = new HashSet < String > ( ) ; NodeList nodeList = elem . getChildNodes ( ) ; for ( int index = 0 ; index < nodeList . getLength ( ) ; index ++ ) { Node childNode = nodeList . item ( index ) ; if ( childNode instanceof Element ) { // Create the appropriate child UNode for this element .
UNode childUNode = parseXMLElement ( ( Element ) childNode ) ; childUNodeList . add ( childUNode ) ; if ( nodeNameSet . contains ( childUNode . getName ( ) ) ) { bDupNodeNames = true ; } else { nodeNameSet . add ( childUNode . getName ( ) ) ; } } } return bDupNodeNames ; |
public class ScreenshotMaker { /** * Takes a raw string representing the result of the toDataURL ( ) function
* of the HTML5 canvas and calls the callback with a proper mime type
* and the bytes of the image .
* @ see < a href = " http : / / www . whatwg . org / specs / web - apps / current - work / multipage / the - canvas - element . html # dom - canvas - todataurl " > http : / / www . whatwg . org / specs / web - apps / current - work / multipage / the - canvas - element . html # dom - canvas - todataurl < /
* @ param rawImage The encoded image
* @ param callback The callback to call with the result .
* @ return */
private void parseAndCallback ( String rawImage , ScreenshotCallback callback ) { } } | if ( callback == null ) { return ; } // find the mime type
final String [ ] typeInfoAndData = rawImage . split ( "," ) ; String [ ] mimeAndEncoding = typeInfoAndData [ 0 ] . replaceFirst ( "data:" , "" ) . split ( ";" ) ; if ( typeInfoAndData . length == 2 && mimeAndEncoding . length == 2 && "base64" . equalsIgnoreCase ( mimeAndEncoding [ 1 ] ) ) { byte [ ] result = Base64 . decodeBase64 ( typeInfoAndData [ 1 ] ) ; callback . screenshotReceived ( result , mimeAndEncoding [ 0 ] ) ; } |
public class InvalidRequestException { /** * The error code associated with the exception .
* @ param emrErrorCode
* The error code associated with the exception . */
@ com . fasterxml . jackson . annotation . JsonProperty ( "ErrorCode" ) public void setEmrErrorCode ( String emrErrorCode ) { } } | this . emrErrorCode = emrErrorCode ; |
public class PDFPageHelper { /** * Gets the pdf page annotation name . It takes into acount the mappings
* defined in { @ link VisualizerInput # mappings } . */
public String getPDFPageAnnotationName ( ) { } } | Properties mappings = input . getMappings ( ) ; if ( mappings != null ) { return mappings . getProperty ( MAPPING_PAGE_KEY , DEFAULT_PAGE_NUMBER_ANNOTATION_NAME ) ; } return DEFAULT_PAGE_NUMBER_ANNOTATION_NAME ; |
public class RowsLogBuffer { /** * Extracting next field value from packed buffer .
* @ see mysql - 5.1.60 / sql / log _ event . cc - log _ event _ print _ value */
final Serializable fetchValue ( String columnName , int columnIndex , int type , final int meta , boolean isBinary ) { } } | int len = 0 ; if ( type == LogEvent . MYSQL_TYPE_STRING ) { if ( meta >= 256 ) { int byte0 = meta >> 8 ; int byte1 = meta & 0xff ; if ( ( byte0 & 0x30 ) != 0x30 ) { /* a long CHAR ( ) field : see # 37426 */
len = byte1 | ( ( ( byte0 & 0x30 ) ^ 0x30 ) << 4 ) ; type = byte0 | 0x30 ; } else { switch ( byte0 ) { case LogEvent . MYSQL_TYPE_SET : case LogEvent . MYSQL_TYPE_ENUM : case LogEvent . MYSQL_TYPE_STRING : type = byte0 ; len = byte1 ; break ; default : throw new IllegalArgumentException ( String . format ( "!! Don't know how to handle column type=%d meta=%d (%04X)" , type , meta , meta ) ) ; } } } else { len = meta ; } } switch ( type ) { case LogEvent . MYSQL_TYPE_LONG : { // XXX : How to check signed / unsigned ?
// value = unsigned ? Long . valueOf ( buffer . getUint32 ( ) ) :
// Integer . valueOf ( buffer . getInt32 ( ) ) ;
value = Integer . valueOf ( buffer . getInt32 ( ) ) ; javaType = Types . INTEGER ; length = 4 ; break ; } case LogEvent . MYSQL_TYPE_TINY : { // XXX : How to check signed / unsigned ?
// value = Integer . valueOf ( unsigned ? buffer . getUint8 ( ) :
// buffer . getInt8 ( ) ) ;
value = Integer . valueOf ( buffer . getInt8 ( ) ) ; javaType = Types . TINYINT ; // java . sql . Types . INTEGER ;
length = 1 ; break ; } case LogEvent . MYSQL_TYPE_SHORT : { // XXX : How to check signed / unsigned ?
// value = Integer . valueOf ( unsigned ? buffer . getUint16 ( ) :
// buffer . getInt16 ( ) ) ;
value = Integer . valueOf ( ( short ) buffer . getInt16 ( ) ) ; javaType = Types . SMALLINT ; // java . sql . Types . INTEGER ;
length = 2 ; break ; } case LogEvent . MYSQL_TYPE_INT24 : { // XXX : How to check signed / unsigned ?
// value = Integer . valueOf ( unsigned ? buffer . getUint24 ( ) :
// buffer . getInt24 ( ) ) ;
value = Integer . valueOf ( buffer . getInt24 ( ) ) ; javaType = Types . INTEGER ; length = 3 ; break ; } case LogEvent . MYSQL_TYPE_LONGLONG : { // XXX : How to check signed / unsigned ?
// value = unsigned ? buffer . getUlong64 ( ) ) :
// Long . valueOf ( buffer . getLong64 ( ) ) ;
value = Long . valueOf ( buffer . getLong64 ( ) ) ; javaType = Types . BIGINT ; // Types . INTEGER ;
length = 8 ; break ; } case LogEvent . MYSQL_TYPE_DECIMAL : { /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
logger . warn ( "MYSQL_TYPE_DECIMAL : This enumeration value is " + "only used internally and cannot exist in a binlog!" ) ; javaType = Types . DECIMAL ; value = null ; /* unknown format */
length = 0 ; break ; } case LogEvent . MYSQL_TYPE_NEWDECIMAL : { final int precision = meta >> 8 ; final int decimals = meta & 0xff ; value = buffer . getDecimal ( precision , decimals ) ; javaType = Types . DECIMAL ; length = precision ; break ; } case LogEvent . MYSQL_TYPE_FLOAT : { value = Float . valueOf ( buffer . getFloat32 ( ) ) ; javaType = Types . REAL ; // Types . FLOAT ;
length = 4 ; break ; } case LogEvent . MYSQL_TYPE_DOUBLE : { value = Double . valueOf ( buffer . getDouble64 ( ) ) ; javaType = Types . DOUBLE ; length = 8 ; break ; } case LogEvent . MYSQL_TYPE_BIT : { /* Meta - data : bit _ len , bytes _ in _ rec , 2 bytes */
final int nbits = ( ( meta >> 8 ) * 8 ) + ( meta & 0xff ) ; len = ( nbits + 7 ) / 8 ; if ( nbits > 1 ) { // byte [ ] bits = new byte [ len ] ;
// buffer . fillBytes ( bits , 0 , len ) ;
// 转化为unsign long
switch ( len ) { case 1 : value = buffer . getUint8 ( ) ; break ; case 2 : value = buffer . getBeUint16 ( ) ; break ; case 3 : value = buffer . getBeUint24 ( ) ; break ; case 4 : value = buffer . getBeUint32 ( ) ; break ; case 5 : value = buffer . getBeUlong40 ( ) ; break ; case 6 : value = buffer . getBeUlong48 ( ) ; break ; case 7 : value = buffer . getBeUlong56 ( ) ; break ; case 8 : value = buffer . getBeUlong64 ( ) ; break ; default : throw new IllegalArgumentException ( "!! Unknown Bit len = " + len ) ; } } else { final int bit = buffer . getInt8 ( ) ; // value = ( bit ! = 0 ) ? Boolean . TRUE : Boolean . FALSE ;
value = bit ; } javaType = Types . BIT ; length = nbits ; break ; } case LogEvent . MYSQL_TYPE_TIMESTAMP : { // MYSQL DataTypes : TIMESTAMP
// range is ' 1970-01-01 00:00:01 ' UTC to ' 2038-01-19 03:14:07'
// UTC
// A TIMESTAMP cannot represent the value ' 1970-01-01 00:00:00'
// because that is equivalent to 0 seconds from the epoch and
// the value 0 is reserved for representing ' 0000-00-00
// 00:00:00 ' , the “ zero ” TIMESTAMP value .
final long i32 = buffer . getUint32 ( ) ; if ( i32 == 0 ) { value = "0000-00-00 00:00:00" ; } else { String v = new Timestamp ( i32 * 1000 ) . toString ( ) ; value = v . substring ( 0 , v . length ( ) - 2 ) ; } javaType = Types . TIMESTAMP ; length = 4 ; break ; } case LogEvent . MYSQL_TYPE_TIMESTAMP2 : { final long tv_sec = buffer . getBeUint32 ( ) ; // big - endian
int tv_usec = 0 ; switch ( meta ) { case 0 : tv_usec = 0 ; break ; case 1 : case 2 : tv_usec = buffer . getInt8 ( ) * 10000 ; break ; case 3 : case 4 : tv_usec = buffer . getBeInt16 ( ) * 100 ; break ; case 5 : case 6 : tv_usec = buffer . getBeInt24 ( ) ; break ; default : tv_usec = 0 ; break ; } String second = null ; if ( tv_sec == 0 ) { second = "0000-00-00 00:00:00" ; } else { Timestamp time = new Timestamp ( tv_sec * 1000 ) ; second = time . toString ( ) ; second = second . substring ( 0 , second . length ( ) - 2 ) ; // 去掉毫秒精度 . 0
} if ( meta >= 1 ) { String microSecond = usecondsToStr ( tv_usec , meta ) ; microSecond = microSecond . substring ( 0 , meta ) ; value = second + '.' + microSecond ; } else { value = second ; } javaType = Types . TIMESTAMP ; length = 4 + ( meta + 1 ) / 2 ; break ; } case LogEvent . MYSQL_TYPE_DATETIME : { // MYSQL DataTypes : DATETIME
// range is ' 0000-01-01 00:00:00 ' to ' 9999-12-31 23:59:59'
final long i64 = buffer . getLong64 ( ) ; /* YYYYMMDDhhmmss */
if ( i64 == 0 ) { value = "0000-00-00 00:00:00" ; } else { final int d = ( int ) ( i64 / 1000000 ) ; final int t = ( int ) ( i64 % 1000000 ) ; // if ( cal = = null ) cal = Calendar . getInstance ( ) ;
// cal . clear ( ) ;
/* month is 0 - based , 0 for january . */
// cal . set ( d / 10000 , ( d % 10000 ) / 100 - 1 , d % 100 , t /
// 10000 , ( t % 10000 ) / 100 , t % 100 ) ;
// value = new Timestamp ( cal . getTimeInMillis ( ) ) ;
// value = String . format ( " % 04d - % 02d - % 02d % 02d : % 02d : % 02d " ,
// d / 10000,
// ( d % 10000 ) / 100,
// d % 100,
// t / 10000,
// ( t % 10000 ) / 100,
// t % 100 ) ;
StringBuilder builder = new StringBuilder ( ) ; appendNumber4 ( builder , d / 10000 ) ; builder . append ( '-' ) ; appendNumber2 ( builder , ( d % 10000 ) / 100 ) ; builder . append ( '-' ) ; appendNumber2 ( builder , d % 100 ) ; builder . append ( ' ' ) ; appendNumber2 ( builder , t / 10000 ) ; builder . append ( ':' ) ; appendNumber2 ( builder , ( t % 10000 ) / 100 ) ; builder . append ( ':' ) ; appendNumber2 ( builder , t % 100 ) ; value = builder . toString ( ) ; } javaType = Types . TIMESTAMP ; length = 8 ; break ; } case LogEvent . MYSQL_TYPE_DATETIME2 : { /* * DATETIME and DATE low - level memory and disk representation
* routines 1 bit sign ( used when on disk ) 17 bits year * 13 + month
* ( year 0-9999 , month 0-12 ) 5 bits day ( 0-31 ) 5 bits hour
* ( 0-23 ) 6 bits minute ( 0-59 ) 6 bits second ( 0-59 ) 24 bits
* microseconds ( 0-99999 ) Total : 64 bits = 8 bytes
* SYYYYY . YYYYY
* . YYdddddh . hhhhmmmm . mmsssss . fffff . fffff . fffff */
long intpart = buffer . getBeUlong40 ( ) - DATETIMEF_INT_OFS ; // big - endian
int frac = 0 ; switch ( meta ) { case 0 : frac = 0 ; break ; case 1 : case 2 : frac = buffer . getInt8 ( ) * 10000 ; break ; case 3 : case 4 : frac = buffer . getBeInt16 ( ) * 100 ; break ; case 5 : case 6 : frac = buffer . getBeInt24 ( ) ; break ; default : frac = 0 ; break ; } String second = null ; if ( intpart == 0 ) { second = "0000-00-00 00:00:00" ; } else { // 构造TimeStamp只处理到秒
long ymd = intpart >> 17 ; long ym = ymd >> 5 ; long hms = intpart % ( 1 << 17 ) ; // if ( cal = = null ) cal = Calendar . getInstance ( ) ;
// cal . clear ( ) ;
// cal . set ( ( int ) ( ym / 13 ) , ( int ) ( ym % 13 ) - 1 , ( int ) ( ymd
// % ( 1 < < 5 ) ) , ( int ) ( hms > > 12 ) ,
// ( int ) ( ( hms > > 6 ) % ( 1 < < 6 ) ) , ( int ) ( hms % ( 1 < < 6 ) ) ) ;
// value = new Timestamp ( cal . getTimeInMillis ( ) ) ;
// second = String . format ( " % 04d - % 02d - % 02d % 02d : % 02d : % 02d " ,
// ( int ) ( ym / 13 ) ,
// ( int ) ( ym % 13 ) ,
// ( int ) ( ymd % ( 1 < < 5 ) ) ,
// ( int ) ( hms > > 12 ) ,
// ( int ) ( ( hms > > 6 ) % ( 1 < < 6 ) ) ,
// ( int ) ( hms % ( 1 < < 6 ) ) ) ;
StringBuilder builder = new StringBuilder ( 26 ) ; appendNumber4 ( builder , ( int ) ( ym / 13 ) ) ; builder . append ( '-' ) ; appendNumber2 ( builder , ( int ) ( ym % 13 ) ) ; builder . append ( '-' ) ; appendNumber2 ( builder , ( int ) ( ymd % ( 1 << 5 ) ) ) ; builder . append ( ' ' ) ; appendNumber2 ( builder , ( int ) ( hms >> 12 ) ) ; builder . append ( ':' ) ; appendNumber2 ( builder , ( int ) ( ( hms >> 6 ) % ( 1 << 6 ) ) ) ; builder . append ( ':' ) ; appendNumber2 ( builder , ( int ) ( hms % ( 1 << 6 ) ) ) ; second = builder . toString ( ) ; } if ( meta >= 1 ) { String microSecond = usecondsToStr ( frac , meta ) ; microSecond = microSecond . substring ( 0 , meta ) ; value = second + '.' + microSecond ; } else { value = second ; } javaType = Types . TIMESTAMP ; length = 5 + ( meta + 1 ) / 2 ; break ; } case LogEvent . MYSQL_TYPE_TIME : { // MYSQL DataTypes : TIME
// The range is ' - 838:59:59 ' to ' 838:59:59'
// final int i32 = buffer . getUint24 ( ) ;
final int i32 = buffer . getInt24 ( ) ; final int u32 = Math . abs ( i32 ) ; if ( i32 == 0 ) { value = "00:00:00" ; } else { // if ( cal = = null ) cal = Calendar . getInstance ( ) ;
// cal . clear ( ) ;
// cal . set ( 70 , 0 , 1 , i32 / 10000 , ( i32 % 10000 ) / 100 , i32 %
// 100 ) ;
// value = new Time ( cal . getTimeInMillis ( ) ) ;
// value = String . format ( " % s % 02d : % 02d : % 02d " ,
// ( i32 > = 0 ) ? " " : " - " ,
// u32 / 10000,
// ( u32 % 10000 ) / 100,
// u32 % 100 ) ;
StringBuilder builder = new StringBuilder ( 17 ) ; if ( i32 < 0 ) { builder . append ( '-' ) ; } int d = u32 / 10000 ; if ( d > 100 ) { builder . append ( String . valueOf ( d ) ) ; } else { appendNumber2 ( builder , d ) ; } builder . append ( ':' ) ; appendNumber2 ( builder , ( u32 % 10000 ) / 100 ) ; builder . append ( ':' ) ; appendNumber2 ( builder , u32 % 100 ) ; value = builder . toString ( ) ; } javaType = Types . TIME ; length = 3 ; break ; } case LogEvent . MYSQL_TYPE_TIME2 : { /* * TIME low - level memory and disk representation routines
* In - memory format : 1 bit sign ( Used for sign , when on disk ) 1
* bit unused ( Reserved for wider hour range , e . g . for
* intervals ) 10 bit hour ( 0-836 ) 6 bit minute ( 0-59 ) 6 bit
* second ( 0-59 ) 24 bits microseconds ( 0-99999 ) Total : 48 bits
* = 6 bytes
* Suhhhhh . hhhhmmmm . mmsssss . fffff . fffff . fffff */
long intpart = 0 ; int frac = 0 ; long ltime = 0 ; switch ( meta ) { case 0 : intpart = buffer . getBeUint24 ( ) - TIMEF_INT_OFS ; // big - endian
ltime = intpart << 24 ; break ; case 1 : case 2 : intpart = buffer . getBeUint24 ( ) - TIMEF_INT_OFS ; frac = buffer . getUint8 ( ) ; if ( intpart < 0 && frac > 0 ) { /* * Negative values are stored with reverse
* fractional part order , for binary sort
* compatibility . Disk value intpart frac Time value
* Memory value 800000.00 0 0 00:00:00.00
* 00000.00000 7FFFFF . FF - 1 255 - 00:00:00.01
* FFFFF . FFD8F0 7FFFFF . 9D - 1 99 - 00:00:00.99
* FFFFF . F0E4D0 7FFFFF . 00 - 1 0 - 00:00:01.00
* FFFFF . 00000 7FFFFE . FF - 1 255 - 00:00:01.01
* FFFFFE . FFD8F0 7FFFFE . F6 - 2 246 - 00:00:01.10
* FFFFFE . FE7960 Formula to convert fractional
* part from disk format ( now stored in " frac "
* variable ) to absolute value : " 0x100 - frac " . To
* reconstruct in - memory value , we shift to the next
* integer value and then substruct fractional part . */
intpart ++ ; /* Shift to the next integer value */
frac -= 0x100 ; /* - ( 0x100 - frac ) */
// fraclong = frac * 10000;
} frac = frac * 10000 ; ltime = intpart << 24 ; break ; case 3 : case 4 : intpart = buffer . getBeUint24 ( ) - TIMEF_INT_OFS ; frac = buffer . getBeUint16 ( ) ; if ( intpart < 0 && frac > 0 ) { /* * Fix reverse fractional part order :
* " 0x10000 - frac " . See comments for FSP = 1 and
* FSP = 2 above . */
intpart ++ ; /* Shift to the next integer value */
frac -= 0x10000 ; /* - ( 0x10000 - frac ) */
// fraclong = frac * 100;
} frac = frac * 100 ; ltime = intpart << 24 ; break ; case 5 : case 6 : intpart = buffer . getBeUlong48 ( ) - TIMEF_OFS ; ltime = intpart ; frac = ( int ) ( intpart % ( 1L << 24 ) ) ; break ; default : intpart = buffer . getBeUint24 ( ) - TIMEF_INT_OFS ; ltime = intpart << 24 ; break ; } String second = null ; if ( intpart == 0 ) { second = "00:00:00" ; } else { // 目前只记录秒 , 不处理us frac
// if ( cal = = null ) cal = Calendar . getInstance ( ) ;
// cal . clear ( ) ;
// cal . set ( 70 , 0 , 1 , ( int ) ( ( intpart > > 12 ) % ( 1 < < 10 ) ) ,
// ( int ) ( ( intpart > > 6 ) % ( 1 < < 6 ) ) ,
// ( int ) ( intpart % ( 1 < < 6 ) ) ) ;
// value = new Time ( cal . getTimeInMillis ( ) ) ;
long ultime = Math . abs ( ltime ) ; intpart = ultime >> 24 ; // second = String . format ( " % s % 02d : % 02d : % 02d " ,
// ltime > = 0 ? " " : " - " ,
// ( int ) ( ( intpart > > 12 ) % ( 1 < < 10 ) ) ,
// ( int ) ( ( intpart > > 6 ) % ( 1 < < 6 ) ) ,
// ( int ) ( intpart % ( 1 < < 6 ) ) ) ;
StringBuilder builder = new StringBuilder ( 12 ) ; if ( ltime < 0 ) { builder . append ( '-' ) ; } int d = ( int ) ( ( intpart >> 12 ) % ( 1 << 10 ) ) ; if ( d > 100 ) { builder . append ( String . valueOf ( d ) ) ; } else { appendNumber2 ( builder , d ) ; } builder . append ( ':' ) ; appendNumber2 ( builder , ( int ) ( ( intpart >> 6 ) % ( 1 << 6 ) ) ) ; builder . append ( ':' ) ; appendNumber2 ( builder , ( int ) ( intpart % ( 1 << 6 ) ) ) ; second = builder . toString ( ) ; } if ( meta >= 1 ) { String microSecond = usecondsToStr ( Math . abs ( frac ) , meta ) ; microSecond = microSecond . substring ( 0 , meta ) ; value = second + '.' + microSecond ; } else { value = second ; } javaType = Types . TIME ; length = 3 + ( meta + 1 ) / 2 ; break ; } case LogEvent . MYSQL_TYPE_NEWDATE : { /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
logger . warn ( "MYSQL_TYPE_NEWDATE : This enumeration value is " + "only used internally and cannot exist in a binlog!" ) ; javaType = Types . DATE ; value = null ; /* unknown format */
length = 0 ; break ; } case LogEvent . MYSQL_TYPE_DATE : { // MYSQL DataTypes :
// range : 0000-00-00 ~ 9999-12-31
final int i32 = buffer . getUint24 ( ) ; if ( i32 == 0 ) { value = "0000-00-00" ; } else { // if ( cal = = null ) cal = Calendar . getInstance ( ) ;
// cal . clear ( ) ;
/* month is 0 - based , 0 for january . */
// cal . set ( ( i32 / ( 16 * 32 ) ) , ( i32 / 32 % 16 ) - 1 , ( i32 %
// 32 ) ) ;
// value = new java . sql . Date ( cal . getTimeInMillis ( ) ) ;
// value = String . format ( " % 04d - % 02d - % 02d " , i32 / ( 16 * 32 ) ,
// i32 / 32 % 16 , i32 % 32 ) ;
StringBuilder builder = new StringBuilder ( 12 ) ; appendNumber4 ( builder , i32 / ( 16 * 32 ) ) ; builder . append ( '-' ) ; appendNumber2 ( builder , i32 / 32 % 16 ) ; builder . append ( '-' ) ; appendNumber2 ( builder , i32 % 32 ) ; value = builder . toString ( ) ; } javaType = Types . DATE ; length = 3 ; break ; } case LogEvent . MYSQL_TYPE_YEAR : { // MYSQL DataTypes : YEAR [ ( 2 | 4 ) ]
// In four - digit format , values display as 1901 to 2155 , and
// 0000.
// In two - digit format , values display as 70 to 69 , representing
// years from 1970 to 2069.
final int i32 = buffer . getUint8 ( ) ; // If connection property ' YearIsDateType ' has
// set , value is java . sql . Date .
/* * if ( cal = = null ) cal = Calendar . getInstance ( ) ; cal . clear ( ) ;
* cal . set ( Calendar . YEAR , i32 + 1900 ) ; value = new
* java . sql . Date ( cal . getTimeInMillis ( ) ) ; */
// The else , value is java . lang . Short .
if ( i32 == 0 ) { value = "0000" ; } else { value = String . valueOf ( ( short ) ( i32 + 1900 ) ) ; } // It might seem more correct to create a java . sql . Types . DATE
// value
// for this date , but it is much simpler to pass the value as an
// integer . The MySQL JDBC specification states that one can
// pass a java int between 1901 and 2055 . Creating a DATE value
// causes truncation errors with certain SQL _ MODES
// ( e . g . " STRICT _ TRANS _ TABLES " ) .
javaType = Types . VARCHAR ; // Types . INTEGER ;
length = 1 ; break ; } case LogEvent . MYSQL_TYPE_ENUM : { final int int32 ; /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
switch ( len ) { case 1 : int32 = buffer . getUint8 ( ) ; break ; case 2 : int32 = buffer . getUint16 ( ) ; break ; default : throw new IllegalArgumentException ( "!! Unknown ENUM packlen = " + len ) ; } // logger . warn ( " MYSQL _ TYPE _ ENUM : This enumeration value is "
// + " only used internally and cannot exist in a binlog ! " ) ;
value = Integer . valueOf ( int32 ) ; javaType = Types . INTEGER ; length = len ; break ; } case LogEvent . MYSQL_TYPE_SET : { final int nbits = ( meta & 0xFF ) * 8 ; len = ( nbits + 7 ) / 8 ; if ( nbits > 1 ) { // byte [ ] bits = new byte [ len ] ;
// buffer . fillBytes ( bits , 0 , len ) ;
// 转化为unsign long
switch ( len ) { case 1 : value = buffer . getUint8 ( ) ; break ; case 2 : value = buffer . getUint16 ( ) ; break ; case 3 : value = buffer . getUint24 ( ) ; break ; case 4 : value = buffer . getUint32 ( ) ; break ; case 5 : value = buffer . getUlong40 ( ) ; break ; case 6 : value = buffer . getUlong48 ( ) ; break ; case 7 : value = buffer . getUlong56 ( ) ; break ; case 8 : value = buffer . getUlong64 ( ) ; break ; default : throw new IllegalArgumentException ( "!! Unknown Set len = " + len ) ; } } else { final int bit = buffer . getInt8 ( ) ; // value = ( bit ! = 0 ) ? Boolean . TRUE : Boolean . FALSE ;
value = bit ; } javaType = Types . BIT ; length = len ; break ; } case LogEvent . MYSQL_TYPE_TINY_BLOB : { /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
logger . warn ( "MYSQL_TYPE_TINY_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!" ) ; } case LogEvent . MYSQL_TYPE_MEDIUM_BLOB : { /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
logger . warn ( "MYSQL_TYPE_MEDIUM_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!" ) ; } case LogEvent . MYSQL_TYPE_LONG_BLOB : { /* * log _ event . h : This enumeration value is only used internally
* and cannot exist in a binlog . */
logger . warn ( "MYSQL_TYPE_LONG_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!" ) ; } case LogEvent . MYSQL_TYPE_BLOB : { /* * BLOB or TEXT datatype */
switch ( meta ) { case 1 : { /* TINYBLOB / TINYTEXT */
final int len8 = buffer . getUint8 ( ) ; byte [ ] binary = new byte [ len8 ] ; buffer . fillBytes ( binary , 0 , len8 ) ; value = binary ; javaType = Types . VARBINARY ; length = len8 ; break ; } case 2 : { /* BLOB / TEXT */
final int len16 = buffer . getUint16 ( ) ; byte [ ] binary = new byte [ len16 ] ; buffer . fillBytes ( binary , 0 , len16 ) ; value = binary ; javaType = Types . LONGVARBINARY ; length = len16 ; break ; } case 3 : { /* MEDIUMBLOB / MEDIUMTEXT */
final int len24 = buffer . getUint24 ( ) ; byte [ ] binary = new byte [ len24 ] ; buffer . fillBytes ( binary , 0 , len24 ) ; value = binary ; javaType = Types . LONGVARBINARY ; length = len24 ; break ; } case 4 : { /* LONGBLOB / LONGTEXT */
final int len32 = ( int ) buffer . getUint32 ( ) ; byte [ ] binary = new byte [ len32 ] ; buffer . fillBytes ( binary , 0 , len32 ) ; value = binary ; javaType = Types . LONGVARBINARY ; length = len32 ; break ; } default : throw new IllegalArgumentException ( "!! Unknown BLOB packlen = " + meta ) ; } break ; } case LogEvent . MYSQL_TYPE_VARCHAR : case LogEvent . MYSQL_TYPE_VAR_STRING : { /* * Except for the data length calculation , MYSQL _ TYPE _ VARCHAR ,
* MYSQL _ TYPE _ VAR _ STRING and MYSQL _ TYPE _ STRING are handled the
* same way . */
len = meta ; if ( len < 256 ) { len = buffer . getUint8 ( ) ; } else { len = buffer . getUint16 ( ) ; } if ( isBinary ) { // fixed issue # 66 , binary类型在binlog中为var _ string
/* fill binary */
byte [ ] binary = new byte [ len ] ; buffer . fillBytes ( binary , 0 , len ) ; javaType = Types . VARBINARY ; value = binary ; } else { value = buffer . getFullString ( len , charsetName ) ; javaType = Types . VARCHAR ; } length = len ; break ; } case LogEvent . MYSQL_TYPE_STRING : { if ( len < 256 ) { len = buffer . getUint8 ( ) ; } else { len = buffer . getUint16 ( ) ; } if ( isBinary ) { /* fill binary */
byte [ ] binary = new byte [ len ] ; buffer . fillBytes ( binary , 0 , len ) ; javaType = Types . BINARY ; value = binary ; } else { value = buffer . getFullString ( len , charsetName ) ; javaType = Types . CHAR ; // Types . VARCHAR ;
} length = len ; break ; } case LogEvent . MYSQL_TYPE_JSON : { switch ( meta ) { case 1 : { len = buffer . getUint8 ( ) ; break ; } case 2 : { len = buffer . getUint16 ( ) ; break ; } case 3 : { len = buffer . getUint24 ( ) ; break ; } case 4 : { len = ( int ) buffer . getUint32 ( ) ; break ; } default : throw new IllegalArgumentException ( "!! Unknown JSON packlen = " + meta ) ; } if ( partialBits . get ( 1 ) ) { // print _ json _ diff
int position = buffer . position ( ) ; StringBuilder builder = JsonDiffConversion . print_json_diff ( buffer , len , columnName , columnIndex , charsetName ) ; value = builder . toString ( ) ; buffer . position ( position + len ) ; } else { if ( 0 == len ) { // fixed issue # 1 by lava , json column of zero length
// has no
// value , value parsing should be skipped
value = "" ; } else { int position = buffer . position ( ) ; Json_Value jsonValue = JsonConversion . parse_value ( buffer . getUint8 ( ) , buffer , len - 1 , charsetName ) ; StringBuilder builder = new StringBuilder ( ) ; jsonValue . toJsonString ( builder , charsetName ) ; value = builder . toString ( ) ; buffer . position ( position + len ) ; } } javaType = Types . VARCHAR ; length = len ; break ; } case LogEvent . MYSQL_TYPE_GEOMETRY : { /* * MYSQL _ TYPE _ GEOMETRY : copy from BLOB or TEXT */
switch ( meta ) { case 1 : len = buffer . getUint8 ( ) ; break ; case 2 : len = buffer . getUint16 ( ) ; break ; case 3 : len = buffer . getUint24 ( ) ; break ; case 4 : len = ( int ) buffer . getUint32 ( ) ; break ; default : throw new IllegalArgumentException ( "!! Unknown MYSQL_TYPE_GEOMETRY packlen = " + meta ) ; } /* fill binary */
byte [ ] binary = new byte [ len ] ; buffer . fillBytes ( binary , 0 , len ) ; /* Warning unsupport cloumn type */
// logger . warn ( String . format ( " ! ! Unsupport column type MYSQL _ TYPE _ GEOMETRY : meta = % d ( % 04X ) , len = % d " ,
// meta ,
// meta ,
// len ) ) ;
javaType = Types . BINARY ; value = binary ; length = len ; break ; } default : logger . error ( String . format ( "!! Don't know how to handle column type=%d meta=%d (%04X)" , type , meta , meta ) ) ; javaType = Types . OTHER ; value = null ; length = 0 ; } return value ; |
public class PersistentUserManagedEhcache { /** * { @ inheritDoc } */
@ Override public Map < K , V > getAll ( Set < ? extends K > keys ) throws BulkCacheLoadingException { } } | return cache . getAll ( keys ) ; |
public class ResourceManager { /** * Registers a new TaskExecutor .
* @ param taskExecutorGateway to communicate with the registering TaskExecutor
* @ param taskExecutorAddress address of the TaskExecutor
* @ param taskExecutorResourceId ResourceID of the TaskExecutor
* @ param dataPort port used for data transfer
* @ param hardwareDescription of the registering TaskExecutor
* @ return RegistrationResponse */
private RegistrationResponse registerTaskExecutorInternal ( TaskExecutorGateway taskExecutorGateway , String taskExecutorAddress , ResourceID taskExecutorResourceId , int dataPort , HardwareDescription hardwareDescription ) { } } | WorkerRegistration < WorkerType > oldRegistration = taskExecutors . remove ( taskExecutorResourceId ) ; if ( oldRegistration != null ) { // TODO : : suggest old taskExecutor to stop itself
log . debug ( "Replacing old registration of TaskExecutor {}." , taskExecutorResourceId ) ; // remove old task manager registration from slot manager
slotManager . unregisterTaskManager ( oldRegistration . getInstanceID ( ) ) ; } final WorkerType newWorker = workerStarted ( taskExecutorResourceId ) ; if ( newWorker == null ) { log . warn ( "Discard registration from TaskExecutor {} at ({}) because the framework did " + "not recognize it" , taskExecutorResourceId , taskExecutorAddress ) ; return new RegistrationResponse . Decline ( "unrecognized TaskExecutor" ) ; } else { WorkerRegistration < WorkerType > registration = new WorkerRegistration < > ( taskExecutorGateway , newWorker , dataPort , hardwareDescription ) ; log . info ( "Registering TaskManager with ResourceID {} ({}) at ResourceManager" , taskExecutorResourceId , taskExecutorAddress ) ; taskExecutors . put ( taskExecutorResourceId , registration ) ; taskManagerHeartbeatManager . monitorTarget ( taskExecutorResourceId , new HeartbeatTarget < Void > ( ) { @ Override public void receiveHeartbeat ( ResourceID resourceID , Void payload ) { // the ResourceManager will always send heartbeat requests to the
// TaskManager
} @ Override public void requestHeartbeat ( ResourceID resourceID , Void payload ) { taskExecutorGateway . heartbeatFromResourceManager ( resourceID ) ; } } ) ; return new TaskExecutorRegistrationSuccess ( registration . getInstanceID ( ) , resourceId , clusterInformation ) ; } |
public class XBELConverter { /** * Create a new JAXB { @ link Marshaller } instance to handle conversion to
* XBEL .
* @ return a new { @ link Marshaller } instance
* @ throws JAXBException Thrown if an error was encountered creating the
* JAXB { @ link Marshaller }
* @ throws PropertyException Thrown if an error was encountered setting
* properties on the { @ link Marshaller } */
private Marshaller createNewMarshaller ( ) throws JAXBException , PropertyException { } } | final Marshaller marshaller = ctxt . createMarshaller ( ) ; marshaller . setProperty ( JAXB_ENCODING , UTF_8 ) ; marshaller . setProperty ( JAXB_FORMATTED_OUTPUT , true ) ; marshaller . setProperty ( JAXB_SCHEMA_LOCATION , XBELConstants . SCHEMA_URI + " " + XBELConstants . SCHEMA_URL ) ; return marshaller ; |
public class SIPRegistrarSbb { /** * call backs from data source child sbb */
@ Override public void getBindingsResult ( int resultCode , List < RegistrationBinding > bindings ) { } } | ServerTransaction serverTransaction = getRegisterTransactionToReply ( ) ; if ( serverTransaction == null ) { tracer . warning ( "failed to find SIP server tx to send response" ) ; return ; } try { if ( resultCode < 300 ) { sendRegisterSuccessResponse ( resultCode , bindings , serverTransaction ) ; } else { sendErrorResponse ( resultCode , serverTransaction ) ; } } catch ( Exception e ) { tracer . severe ( "failed to send SIP response" , e ) ; } |
public class PhoneNumberUtil { /** * format phone number in DIN 5008 national format with cursor position handling .
* @ param pphoneNumber phone number as String to format with cursor position
* @ return formated phone number as String with new cursor position */
public final ValueWithPos < String > formatDin5008National ( final ValueWithPos < String > pphoneNumber ) { } } | return valueWithPosDefaults ( this . formatDin5008NationalWithPos ( this . parsePhoneNumber ( pphoneNumber ) ) , pphoneNumber ) ; |
public class CallCenterApp { /** * Prints a one line update on performance that can be printed
* periodically during a benchmark . */
public synchronized void printStatistics ( ) { } } | ClientStats stats = periodicStatsContext . fetchAndResetBaseline ( ) . getStats ( ) ; long time = Math . round ( ( stats . getEndTimestamp ( ) - benchmarkStartTS ) / 1000.0 ) ; System . out . printf ( "%02d:%02d:%02d " , time / 3600 , ( time / 60 ) % 60 , time % 60 ) ; System . out . printf ( "Throughput %d/s, " , stats . getTxnThroughput ( ) ) ; System . out . printf ( "Aborts/Failures %d/%d" , stats . getInvocationAborts ( ) , stats . getInvocationErrors ( ) ) ; System . out . printf ( "\n" ) ; |
public class ArrayUtil { /** * Returns the number of elements shared between the two arrays containing
* sets . < p >
* Return the number of elements shared by two column index arrays .
* This method assumes that each of these arrays contains a set ( each
* element index is listed only once in each index array ) . Otherwise the
* returned number will NOT represent the number of unique column indexes
* shared by both index array .
* @ param arra int [ ] ; first array of column indexes .
* @ param arrb int [ ] ; second array of column indexes
* @ return int ; number of elements shared by < code > a < / code > and < code > b < / code > */
public static int countCommonElements ( int [ ] arra , int [ ] arrb ) { } } | int k = 0 ; for ( int i = 0 ; i < arra . length ; i ++ ) { for ( int j = 0 ; j < arrb . length ; j ++ ) { if ( arra [ i ] == arrb [ j ] ) { k ++ ; } } } return k ; |
public class ResourceAdapterModuleMBeanImpl { /** * setResourceAdapterChild add a child of type ResourceAdapterMBeanImpl to this MBean .
* @ param key the String value which will be used as the key for the ResourceAdapterMBeanImpl item
* @ param ra the ResourceAdapterMBeanImpl value to be associated with the specified key
* @ return The previous value associated with key , or null if there was no mapping for key .
* ( A null return can also indicate that the map previously associated null with key . ) */
protected ResourceAdapterMBeanImpl setResourceAdapterChild ( String key , ResourceAdapterMBeanImpl ra ) { } } | return raMBeanChildrenList . put ( key , ra ) ; |
public class ImmutableMatrixFactory { /** * Returns an immutable matrix with the same values as the argument .
* @ param source the matrix containing the data for the new immutable matrix
* @ return an immutable matrix containing the same elements as { @ code source }
* @ throws NullPointerException if { @ code source } is { @ code null } */
public static ImmutableMatrix copy ( Matrix source ) { } } | if ( source instanceof ImmutableMatrix ) return ( ImmutableMatrix ) source ; if ( source . getColumnCount ( ) == 1 ) return copy ( ( Vector ) source ) ; if ( source . getRowCount ( ) == 2 && source . getColumnCount ( ) == 2 ) return new ImmutableMatrix2 ( ( Matrix2 ) source ) ; if ( source . getRowCount ( ) == 3 && source . getColumnCount ( ) == 3 ) return new ImmutableMatrix3 ( ( Matrix3 ) source ) ; return new ImmutableMatrix ( source ) ; |
public class TimeProvider { /** * Returns a non - decreasing number , assumed to be used as a " timestamp " .
* < p > Approximate system time interval between two calls of this method is retrievable via
* { @ link # systemTimeIntervalBetween ( long , long , TimeUnit ) } , applied to the returned values
* from those { @ code currentTime ( ) } calls .
* < p > Safe and scalable for concurrent use from multiple threads .
* @ return the current timestamp */
public static long currentTime ( ) { } } | long now = MILLISECONDS . toNanos ( millisecondSupplier . getAsLong ( ) ) ; while ( true ) { long lastTime = lastTimeHolder . get ( ) ; if ( now <= lastTime ) return lastTime ; if ( lastTimeHolder . compareAndSet ( lastTime , now ) ) return now ; } |
public class IntListUtil { /** * Converts an array of Integer objects to an array of primitives . */
public static int [ ] unbox ( Integer [ ] list ) { } } | if ( list == null ) { return null ; } int [ ] unboxed = new int [ list . length ] ; for ( int ii = 0 ; ii < list . length ; ii ++ ) { unboxed [ ii ] = list [ ii ] ; } return unboxed ; |
public class RemoteAsyncResultImpl { /** * Unexport this object so that it is not remotely accessible .
* < p > The caller must be holding the monitor lock on the
* RemoteAsyncResultReaper prior to calling this method if the async work
* was successfully scheduled . */
protected void unexportObject ( ) { } } | // d623593
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "unexportObject: " + this ) ; if ( ivObjectID != null ) { // Either the allowed timeout occurred or a method was called and we
// know that the client no longer needs this server resource .
try { ivRemoteRuntime . deactivateAsyncResult ( ivObjectID ) ; } catch ( Throwable e ) { // We failed to unexport the object . This should never happen , but
// it ' s not fatal .
if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "unexportObject exception" , e ) ; FFDCFilter . processException ( e , CLASS_NAME + ".unexportObject" , "237" , this ) ; } this . ivObjectID = null ; } |
public class ManagementEnforcer { /** * addNamedGroupingPolicy adds a named role inheritance rule to the current policy .
* If the rule already exists , the function returns false and the rule will not be added .
* Otherwise the function returns true by adding the new rule .
* @ param ptype the policy type , can be " g " , " g2 " , " g3 " , . .
* @ param params the " g " policy rule .
* @ return succeeds or not . */
public boolean addNamedGroupingPolicy ( String ptype , String ... params ) { } } | return addNamedGroupingPolicy ( ptype , Arrays . asList ( params ) ) ; |
public class DefaultMonetaryAmountFormat { /** * Formats a value of { @ code T } to a { @ code String } . { @ link java . util . Locale }
* passed defines the overall target { @ link Locale } . This locale state , how the
* { @ link MonetaryAmountFormat } should generally behave . The
* { @ link java . util . Locale } allows to configure the formatting and parsing
* in arbitrary details . The attributes that are supported are determined by
* the according { @ link MonetaryAmountFormat } implementation :
* @ param amount the amount to print , not { @ code null }
* @ return the string printed using the settings of this formatter
* @ throws UnsupportedOperationException if the formatter is unable to print */
public String format ( MonetaryAmount amount ) { } } | StringBuilder builder = new StringBuilder ( ) ; try { print ( builder , amount ) ; } catch ( IOException e ) { throw new IllegalStateException ( "Error foratting of " + amount , e ) ; } return builder . toString ( ) ; |
public class Jar { /** * Returns an attribute ' s map value from this JAR ' s manifest ' s main section .
* The attributes string value will be split on whitespace into map entries , and each entry will be split on ' = ' to get the key - value pair .
* The returned map may be safely modified .
* @ param name the attribute ' s name */
public Map < String , String > getMapAttribute ( String name , String defaultValue ) { } } | return mapSplit ( getAttribute ( name ) , defaultValue ) ; |
public class HessianFactory { /** * Creates a new Hessian 2.0 serializer . */
public Hessian2Output createHessian2Output ( ) { } } | Hessian2Output out = _freeHessian2Output . allocate ( ) ; if ( out == null ) { out = new Hessian2Output ( ) ; out . setSerializerFactory ( getSerializerFactory ( ) ) ; } return out ; |
public class PreferenceFragment { /** * Adds a new fragment to a builder , which allows to create wizard dialogs .
* @ param builder
* The builder , the fragment should be added to , as an instance of the class { @ link
* WizardDialog . Builder }
* @ param index
* The index of the fragment , which should be added */
private void addFragment ( @ NonNull final WizardDialog . Builder builder , final int index ) { } } | Bundle arguments = new Bundle ( ) ; arguments . putInt ( DialogFragment . INDEX_EXTRA , index ) ; CharSequence title = shouldHeaderBeShown ( ) ? null : String . format ( getString ( R . string . dialog_tab_text ) , index ) ; builder . addFragment ( title , DialogFragment . class , arguments ) ; |
public class ListGroupsResult { /** * Information about a group .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setGroups ( java . util . Collection ) } or { @ link # withGroups ( java . util . Collection ) } if you want to override the
* existing values .
* @ param groups
* Information about a group .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListGroupsResult withGroups ( GroupInformation ... groups ) { } } | if ( this . groups == null ) { setGroups ( new java . util . ArrayList < GroupInformation > ( groups . length ) ) ; } for ( GroupInformation ele : groups ) { this . groups . add ( ele ) ; } return this ; |
public class SSTableExport { /** * JSON Hash Key serializer
* @ param out The output steam to write data
* @ param value value to set as a key */
private static void writeKey ( PrintStream out , String value ) { } } | writeJSON ( out , value ) ; out . print ( ": " ) ; |
public class ZKPaths { /** * Given a full path , return the the individual parts , without slashes .
* The root path will return an empty list .
* @ param path the path
* @ return an array of parts */
public static List < String > split ( String path ) { } } | PathUtils . validatePath ( path ) ; return PATH_SPLITTER . splitToList ( path ) ; |
public class ListSecretsResult { /** * A list of the secrets in the account .
* @ param secretList
* A list of the secrets in the account . */
public void setSecretList ( java . util . Collection < SecretListEntry > secretList ) { } } | if ( secretList == null ) { this . secretList = null ; return ; } this . secretList = new java . util . ArrayList < SecretListEntry > ( secretList ) ; |
public class BaseLuceneStorage { /** * Build query to match entry ' s space - id and key .
* @ param spaceId
* @ param key
* @ return */
protected Query buildQuery ( String spaceId , String key ) { } } | BooleanQuery . Builder builder = new BooleanQuery . Builder ( ) ; if ( ! StringUtils . isBlank ( spaceId ) ) { builder . add ( new TermQuery ( new Term ( FIELD_SPACE_ID , spaceId . trim ( ) ) ) , Occur . MUST ) ; } if ( ! StringUtils . isBlank ( key ) ) { builder . add ( new TermQuery ( new Term ( FIELD_KEY , key . trim ( ) ) ) , Occur . MUST ) ; } return builder . build ( ) ; |
public class MetaBeanProperty { /** * Get the property of the given object .
* @ param object which to be got
* @ return the property of the given object
* @ throws RuntimeException if the property could not be evaluated */
public Object getProperty ( Object object ) { } } | MetaMethod getter = getGetter ( ) ; if ( getter == null ) { if ( field != null ) return field . getProperty ( object ) ; // TODO : create a WriteOnlyException class ?
throw new GroovyRuntimeException ( "Cannot read write-only property: " + name ) ; } return getter . invoke ( object , MetaClassHelper . EMPTY_ARRAY ) ; |
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcPreferredSurfaceCurveRepresentationToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class ReflectionUtil { /** * Returns the parameter { @ link Annotation } s for the
* given { @ link Method } .
* @ param method the { @ link Method }
* @ return the { @ link Annotation } s */
private static List < List < Annotation > > getParameterAnnotations ( Method method ) { } } | if ( methodParamAnnotationCache . containsKey ( method ) ) { return methodParamAnnotationCache . get ( method ) ; } List < List < Annotation > > annotations = new ArrayList < > ( ) ; for ( Annotation [ ] paramAnnotations : method . getParameterAnnotations ( ) ) { List < Annotation > listAnnotations = new ArrayList < > ( ) ; Collections . addAll ( listAnnotations , paramAnnotations ) ; annotations . add ( listAnnotations ) ; } annotations = Collections . unmodifiableList ( annotations ) ; methodParamAnnotationCache . put ( method , annotations ) ; return annotations ; |
public class PropertyLoaderFromResource { /** * Load properties from a resource .
* @ param resourceName
* @ param failOnResourceNotFoundOrNotLoaded when true , a ConfigException
* is raised if the resource cannot be found or loaded
* @ return a { @ link Properties } instance with the properties loaded from the resource ;
* might be empty if the resource is not found or if it cannot be loaded
* @ throws ConfigException if the resource cannot be found or loaded ,
* and failOnResourceNotFound is true
* @ see Properties # load ( InputStream ) */
private static Properties loadPropsFromResource ( String resourceName , boolean failOnResourceNotFoundOrNotLoaded ) { } } | Properties props = new Properties ( ) ; InputStream resource = PropertyLoaderFromResource . class . getResourceAsStream ( resourceName ) ; boolean resourceNotFound = ( resource == null ) ; if ( resourceNotFound ) { if ( failOnResourceNotFoundOrNotLoaded ) { throw new ConfigException ( "resource " + resourceName + " not found" ) ; } else { // if the resource is not found , return an empty Properties
logger . warn ( "Skipping resource " + resourceName + ": file not found." ) ; return props ; } } try { props . load ( resource ) ; } catch ( IOException e ) { if ( failOnResourceNotFoundOrNotLoaded ) throw new ConfigException ( "Cannot load properties from " + resourceName , e ) ; else logger . warn ( "Cannot load properties from " + resourceName + ": " + e . getMessage ( ) ) ; } return props ; |
public class TrustedCertificates { /** * Returns all signing policies */
public SigningPolicy [ ] getSigningPolicies ( ) { } } | if ( this . policyDNMap == null ) { return null ; } Collection values = this . policyDNMap . values ( ) ; return ( SigningPolicy [ ] ) this . policyDNMap . values ( ) . toArray ( new SigningPolicy [ values . size ( ) ] ) ; |
public class Parameters { /** * Gets a parameter whose value is a ( possibly empty ) comma - separated list of Strings , if
* present . */
public Optional < List < String > > getOptionalStringList ( final String param ) { } } | if ( isPresent ( param ) ) { return Optional . of ( getStringList ( param ) ) ; } return Optional . absent ( ) ; |
public class CSTransformer { /** * Apply the relationships to all of the nodes in the content spec . This should be the last step since all nodes have to be converted
* to levels and topics before this method can work . */
protected static void applyRelationships ( final ContentSpec contentSpec , final Map < Integer , Node > nodes , final Map < String , SpecTopic > targetTopics , final List < CSNodeWrapper > relationshipFromNodes , final List < Process > processes , final DataProviderFactory providerFactory ) { } } | // Apply the user defined relationships stored in the database
for ( final CSNodeWrapper node : relationshipFromNodes ) { boolean initialContentTopic = node . getNodeType ( ) == CommonConstants . CS_NODE_INITIAL_CONTENT_TOPIC ; boolean level = EntityUtilities . isNodeALevel ( node ) ; // In 1.3 or lower initial content relationships were stored on the topic , however in 1.4 + they are now on the initial
// content level , so do the migration here
final SpecNodeWithRelationships fromNode ; if ( initialContentTopic ) { final CSNodeWrapper parentNode = node . getParent ( ) ; final SpecNodeWithRelationships parent = ( SpecNodeWithRelationships ) nodes . get ( parentNode . getId ( ) ) ; if ( parent instanceof InitialContent ) { fromNode = parent ; } else { final Level parentLevel = ( Level ) parent ; if ( parentLevel . getFirstSpecNode ( ) instanceof InitialContent ) { fromNode = ( SpecNodeWithRelationships ) parentLevel . getFirstSpecNode ( ) ; } else { fromNode = new InitialContent ( parent . getLineNumber ( ) , CSConstants . LEVEL_INITIAL_CONTENT + ":" ) ; if ( parentLevel . getChildNodes ( ) . isEmpty ( ) ) { parentLevel . appendChild ( fromNode ) ; } else { parentLevel . insertBefore ( fromNode , parentLevel . getChildNodes ( ) . get ( 0 ) ) ; } } } } else { fromNode = ( SpecNodeWithRelationships ) nodes . get ( node . getId ( ) ) ; } // Check if we have any relationships to process
if ( node . getRelatedToNodes ( ) == null || node . getRelatedToNodes ( ) . isEmpty ( ) ) continue ; final List < CSRelatedNodeWrapper > relatedToNodes = node . getRelatedToNodes ( ) . getItems ( ) ; // Sort the relationships into the correct order based on the sort variable
Collections . sort ( relatedToNodes , new CSRelatedNodeSorter ( ) ) ; // Add the relationships to the topic
for ( final CSRelatedNodeWrapper relatedToNode : relatedToNodes ) { final Node toNode = nodes . get ( relatedToNode . getId ( ) ) ; if ( toNode == null ) { throw new IllegalStateException ( "The related node does not exist in the content specification" ) ; } else if ( toNode instanceof Level ) { // Relationships to levels
final Level toLevel = ( Level ) toNode ; // Ensure that the level has a target id if not create one .
if ( toLevel . getTargetId ( ) == null ) { toLevel . setTargetId ( "T00" + relatedToNode . getId ( ) ) ; } // Add the relationship
fromNode . addRelationshipToTarget ( toLevel , RelationshipType . getRelationshipType ( relatedToNode . getRelationshipType ( ) ) , initialContentTopic || level ? null : toLevel . getTitle ( ) ) ; } else { // Relationships to topics
final SpecTopic toSpecTopic = ( SpecTopic ) toNode ; final String title = toSpecTopic . getTitle ( ) ; // Add the relationship
if ( relatedToNode . getRelationshipMode ( ) . equals ( CommonConstants . CS_RELATIONSHIP_MODE_TARGET ) ) { fromNode . addRelationshipToTarget ( toSpecTopic , RelationshipType . getRelationshipType ( relatedToNode . getRelationshipType ( ) ) , initialContentTopic || level ? null : title ) ; } else { fromNode . addRelationshipToTopic ( toSpecTopic , RelationshipType . getRelationshipType ( relatedToNode . getRelationshipType ( ) ) , initialContentTopic || level ? null : title ) ; } } } } // Create the unique id map
final Map < String , SpecTopic > uniqueIdSpecTopicMap = ContentSpecUtilities . getUniqueIdSpecTopicMap ( contentSpec ) ; // Apply the process relationships
for ( final Process process : processes ) { process . processTopics ( uniqueIdSpecTopicMap , targetTopics , providerFactory . getProvider ( TopicProvider . class ) , providerFactory . getProvider ( ServerSettingsProvider . class ) ) ; } |
public class RunningWorkers { /** * Concurrency : Called by multiple threads .
* Parameter : Called exactly once per vortexWorkerManager . */
void addWorker ( final VortexWorkerManager vortexWorkerManager ) { } } | lock . lock ( ) ; try { if ( ! terminated ) { if ( ! removedBeforeAddedWorkers . contains ( vortexWorkerManager . getId ( ) ) ) { this . runningWorkers . put ( vortexWorkerManager . getId ( ) , vortexWorkerManager ) ; this . schedulingPolicy . workerAdded ( vortexWorkerManager ) ; this . workerAggregateFunctionMap . put ( vortexWorkerManager . getId ( ) , new HashSet < Integer > ( ) ) ; // Notify ( possibly ) waiting scheduler
noWorkerOrResource . signal ( ) ; } } else { // Terminate the worker
vortexWorkerManager . terminate ( ) ; } } finally { lock . unlock ( ) ; } |
public class VirtualHostConfiguration { /** * Return the port that should be used for secure redirect
* from http to https .
* Host aliases are paired : * : 80 < - > * : 443 . If the VirtualHost supports
* several aliases , they may not all be paired for failover .
* @ param hostAlias The http host alias to find the partner for .
* @ return secure https port associated with the given alias ( via endpoint configuration ) ,
* or - 1 if unconfigured . */
public int getSecureHttpPort ( String hostAlias ) { } } | com . ibm . wsspi . http . VirtualHost local = config ; if ( local == null ) return - 1 ; else return local . getSecureHttpPort ( hostAlias ) ; |
public class TsdbQuery { /** * Sets the end time for the query . If this isn ' t set , the system time will be
* used when the query is executed or { @ link # getEndTime } is called
* @ param timestamp Unix epoch timestamp in seconds or milliseconds
* @ throws IllegalArgumentException if the timestamp is invalid or less
* than the start time ( if set ) */
@ Override public void setEndTime ( final long timestamp ) { } } | if ( timestamp < 0 || ( ( timestamp & Const . SECOND_MASK ) != 0 && timestamp > 9999999999999L ) ) { throw new IllegalArgumentException ( "Invalid timestamp: " + timestamp ) ; } else if ( start_time != UNSET && timestamp <= getStartTime ( ) ) { throw new IllegalArgumentException ( "new end time (" + timestamp + ") is less than or equal to start time: " + getStartTime ( ) ) ; } end_time = timestamp ; |
public class UnsupportedAvailabilityZoneException { /** * The supported Availability Zones for your account . Choose subnets in these Availability Zones for your cluster .
* @ param validZones
* The supported Availability Zones for your account . Choose subnets in these Availability Zones for your
* cluster . */
@ com . fasterxml . jackson . annotation . JsonProperty ( "validZones" ) public void setValidZones ( java . util . Collection < String > validZones ) { } } | if ( validZones == null ) { this . validZones = null ; return ; } this . validZones = new java . util . ArrayList < String > ( validZones ) ; |
public class TableBodyBox { /** * Calculates new cell positions regarding the rowspans */
private void calcOffsets ( ) { } } | // Find the longest line
int rowidx [ ] = new int [ rows . size ( ) ] ; int maxCells = 0 ; for ( int r = 0 ; r < rows . size ( ) ; r ++ ) { int count = rows . elementAt ( r ) . getCellCount ( ) ; if ( count > maxCells ) maxCells = count ; rowidx [ r ] = 0 ; rows . elementAt ( r ) . rewind ( ) ; } // determine the cell positions
int col = 0 ; boolean cell_found = true ; while ( cell_found ) { cell_found = false ; int r = 0 ; while ( r < rows . size ( ) ) { TableRowBox row = rows . elementAt ( r ) ; if ( row . hasNext ( ) ) { cell_found = true ; if ( rowidx [ r ] <= col ) // we are not in the middle of some colspan for this row
{ // get the current element
TableCellBox cell = row . next ( ) ; // set the new position
cell . setCellPosition ( rowidx [ r ] , r ) ; // move the row indices
for ( int nr = r ; nr < r + cell . getRowspan ( ) ; nr ++ ) { if ( nr < rows . size ( ) ) { rowidx [ nr ] += cell . getColspan ( ) ; if ( rowidx [ nr ] > numCols ) numCols = rowidx [ nr ] ; } else cell . rowspan -- ; } r += cell . getRowspan ( ) ; } else r ++ ; } else r ++ ; } col ++ ; } // build the cell array
for ( int i = 0 ; i < rows . size ( ) ; i ++ ) rows . elementAt ( i ) . rewind ( ) ; cells = new TableCellBox [ numCols ] [ rows . size ( ) ] ; for ( int c = 0 ; c < maxCells ; c ++ ) { for ( int r = 0 ; r < rows . size ( ) ; r ++ ) { TableRowBox row = rows . elementAt ( r ) ; if ( row . hasNext ( ) ) { TableCellBox cell = row . next ( ) ; if ( cell . getRow ( ) + cell . getRowspan ( ) > rows . size ( ) ) cell . setRowspan ( rows . size ( ) - cell . getRow ( ) ) ; if ( cell . getColumn ( ) + cell . getColspan ( ) > numCols ) cell . setColspan ( numCols - cell . getColumn ( ) ) ; int endrow = cell . getRow ( ) + cell . getRowspan ( ) ; int endcol = cell . getColumn ( ) + cell . getColspan ( ) ; // add it to the mesh
for ( int nr = cell . getRow ( ) ; nr < endrow ; nr ++ ) for ( int nc = cell . getColumn ( ) ; nc < endcol ; nc ++ ) cells [ nc ] [ nr ] = cell ; } } } |
public class BasicBinder { /** * Register an UnMarshaller with the given source and target class .
* The unmarshaller is used as follows : Instances of the source can be marshalled into the target class .
* @ param key Converter Key to use
* @ param converter The FromUnmarshaller to be registered */
public final < S , T > void registerUnmarshaller ( ConverterKey < S , T > key , FromUnmarshaller < S , T > converter ) { } } | registerConverter ( key . invert ( ) , new FromUnmarshallerConverter < S , T > ( converter ) ) ; |
public class OptionsBuilder { /** * Adds to the excluded option
* @ param excludes
* List of excluded paths
* @ return updated OptionBuilder instance
* @ see Options # EXCLUDES */
public OptionsBuilder excludes ( String ... excludes ) { } } | return excludes != null ? excludes ( Arrays . asList ( excludes ) ) : this ; |
public class LogSubscriptionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LogSubscription logSubscription , ProtocolMarshaller protocolMarshaller ) { } } | if ( logSubscription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( logSubscription . getDirectoryId ( ) , DIRECTORYID_BINDING ) ; protocolMarshaller . marshall ( logSubscription . getLogGroupName ( ) , LOGGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( logSubscription . getSubscriptionCreatedDateTime ( ) , SUBSCRIPTIONCREATEDDATETIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RelationalOperations { /** * multipathB . */
private static boolean linearPathOverlapsLinearPath_ ( MultiPath multipathA , MultiPath multipathB , double tolerance ) { } } | int dim = linearPathIntersectsLinearPathMaxDim_ ( multipathA , multipathB , tolerance , null ) ; if ( dim < 1 ) return false ; Envelope2D env_a = new Envelope2D ( ) , env_b = new Envelope2D ( ) ; multipathA . queryEnvelope2D ( env_a ) ; multipathB . queryEnvelope2D ( env_b ) ; boolean bIntAExtB = interiorEnvExteriorEnv_ ( env_a , env_b , tolerance ) ; boolean bIntBExtA = interiorEnvExteriorEnv_ ( env_b , env_a , tolerance ) ; if ( bIntAExtB && bIntBExtA ) return true ; if ( bIntAExtB && ! bIntBExtA ) return ! linearPathWithinLinearPath_ ( multipathB , multipathA , tolerance , false ) ; if ( bIntBExtA && ! bIntAExtB ) return ! linearPathWithinLinearPath_ ( multipathA , multipathB , tolerance , false ) ; return ! linearPathWithinLinearPath_ ( multipathA , multipathB , tolerance , false ) && ! linearPathWithinLinearPath_ ( multipathB , multipathA , tolerance , false ) ; |
public class ConsistentKeyLocker { /** * Try to write a lock record remotely up to the configured number of
* times . If the store produces
* { @ link TemporaryLockingException } , then we ' ll call mutate again to add a
* new column with an updated timestamp and to delete the column that tried
* to write when the store threw an exception . We continue like that up to
* the retry limit . If the store throws anything else , such as an unchecked
* exception or a { @ link com . thinkaurelius . titan . diskstorage . PermanentBackendException } , then we ' ll try to
* delete whatever we added and return without further retries .
* @ param lockID lock to acquire
* @ param txh transaction
* @ return the timestamp , in nanoseconds since UNIX Epoch , on the lock
* column that we successfully wrote to the store
* @ throws TemporaryLockingException if the lock retry count is exceeded without successfully
* writing the lock in less than the wait limit
* @ throws Throwable if the storage layer throws anything else */
@ Override protected ConsistentKeyLockStatus writeSingleLock ( KeyColumn lockID , StoreTransaction txh ) throws Throwable { } } | final StaticBuffer lockKey = serializer . toLockKey ( lockID . getKey ( ) , lockID . getColumn ( ) ) ; StaticBuffer oldLockCol = null ; for ( int i = 0 ; i < lockRetryCount ; i ++ ) { WriteResult wr = tryWriteLockOnce ( lockKey , oldLockCol , txh ) ; if ( wr . isSuccessful ( ) && wr . getDuration ( ) . compareTo ( lockWait ) <= 0 ) { final Instant writeInstant = wr . getWriteTimestamp ( ) ; final Instant expireInstant = writeInstant . plus ( lockExpire ) ; return new ConsistentKeyLockStatus ( writeInstant , expireInstant ) ; } oldLockCol = wr . getLockCol ( ) ; handleMutationFailure ( lockID , lockKey , wr , txh ) ; } tryDeleteLockOnce ( lockKey , oldLockCol , txh ) ; // TODO log exception or successful too - slow write here
throw new TemporaryBackendException ( "Lock write retry count exceeded" ) ; |
public class SheepdogRedundancy { @ Nonnull public static SheepdogRedundancy full ( @ Nonnull SheepdogRedundancyFull full ) { } } | SheepdogRedundancy self = new SheepdogRedundancy ( ) ; self . type = SheepdogRedundancyType . full ; self . full = full ; return self ; |
public class PipelineBuilder { /** * Creates a pull processor .
* The API server is started by this method .
* The returned pull processor is thread - safe .
* Note that the history will not be used .
* @ return A PullMetricRegistryInstance that performs a scrape and evaluates rules .
* @ throws Exception indicating construction failed . */
public PullProcessorPipeline build ( ) throws Exception { } } | ApiServer api = null ; PullMetricRegistryInstance registry = null ; try { final EndpointRegistration epr ; if ( epr_ == null ) epr = api = new ApiServer ( api_sockaddr_ ) ; else epr = epr_ ; registry = cfg_ . create ( PullMetricRegistryInstance :: new , epr ) ; if ( api != null ) api . start ( ) ; return new PullProcessorPipeline ( registry ) ; } catch ( Exception ex ) { // Close API server .
try { if ( api != null ) api . close ( ) ; } catch ( Exception ex1 ) { ex . addSuppressed ( ex1 ) ; } // Close registry .
try { if ( registry != null ) registry . close ( ) ; } catch ( Exception ex1 ) { ex . addSuppressed ( ex1 ) ; } throw ex ; } |
public class PromiseSampleService { /** * these methods are identical . . . */
public IPromise < String > getDataSimple ( ) { } } | Promise result = new Promise ( ) ; result . complete ( "Data" , null ) ; return result ; |
public class PathHelper { /** * Return part of the given path before the last separator or the root path ( " / " ) if no separator was found
* @ param path
* @ return name */
public static String getFolderPath ( final String path ) { } } | String cleanedPath = clean ( path ) ; if ( cleanedPath != null && cleanedPath . contains ( PATH_SEP ) ) { return PATH_SEP + StringUtils . substringBeforeLast ( cleanedPath , PATH_SEP ) ; } else { return PATH_SEP ; } |
public class CmsNullIgnoringConcurrentMap { /** * Sets the given map value for the given key , unless either of them is null . < p >
* If the value is null ,
* @ param key the key
* @ param value the value
* @ return the old value
* @ see java . util . Map # put ( java . lang . Object , java . lang . Object ) */
public V put ( K key , V value ) { } } | if ( ( key != null ) && ( value != null ) ) { return m_internalMap . put ( key , value ) ; } Exception e = new Exception ( ) ; try { // we want to print a stack trace when null is used as a key / value
throw e ; } catch ( Exception e2 ) { e = e2 ; } if ( key == null ) { LOG . warn ( "Invalid null key in map" , e ) ; return null ; } if ( value == null ) { LOG . warn ( "Invalid null value in map" , e ) ; return m_internalMap . remove ( key ) ; } return null ; |
public class cacheobject { /** * Use this API to save cacheobject resources . */
public static base_responses save ( nitro_service client , cacheobject resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { cacheobject saveresources [ ] = new cacheobject [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { saveresources [ i ] = new cacheobject ( ) ; saveresources [ i ] . locator = resources [ i ] . locator ; } result = perform_operation_bulk_request ( client , saveresources , "save" ) ; } return result ; |
public class DeleteMembersRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteMembersRequest deleteMembersRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteMembersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteMembersRequest . getAccountIds ( ) , ACCOUNTIDS_BINDING ) ; protocolMarshaller . marshall ( deleteMembersRequest . getDetectorId ( ) , DETECTORID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class KickflipApiClient { /** * Get public user info
* @ param username The Kickflip user ' s username
* @ param cb This callback will receive a User in { @ link io . kickflip . sdk . api . KickflipCallback # onSuccess ( io . kickflip . sdk . api . json . Response ) }
* or an Exception { @ link io . kickflip . sdk . api . KickflipCallback # onError ( io . kickflip . sdk . exception . KickflipException ) } . */
public void getUserInfo ( String username , final KickflipCallback cb ) { } } | if ( ! assertActiveUserAvailable ( cb ) ) return ; GenericData data = new GenericData ( ) ; data . put ( "username" , username ) ; post ( GET_USER_PUBLIC , new UrlEncodedContent ( data ) , User . class , new KickflipCallback ( ) { @ Override public void onSuccess ( final Response response ) { if ( VERBOSE ) Log . i ( TAG , "getUserInfo response: " + response ) ; postResponseToCallback ( cb , response ) ; } @ Override public void onError ( final KickflipException error ) { Log . w ( TAG , "getUserInfo Error: " + error ) ; postExceptionToCallback ( cb , error ) ; } } ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcShellBasedSurfaceModel ( ) { } } | if ( ifcShellBasedSurfaceModelEClass == null ) { ifcShellBasedSurfaceModelEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 513 ) ; } return ifcShellBasedSurfaceModelEClass ; |
public class HibernateUpdateClause { /** * Set the lock mode for the given path .
* @ return the current object */
@ SuppressWarnings ( "unchecked" ) public HibernateUpdateClause setLockMode ( Path < ? > path , LockMode lockMode ) { } } | lockModes . put ( path , lockMode ) ; return this ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcDerivedUnitEnum ( ) { } } | if ( ifcDerivedUnitEnumEEnum == null ) { ifcDerivedUnitEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 809 ) ; } return ifcDerivedUnitEnumEEnum ; |
public class JSONObject { /** * Sets a property on the target bean . < br >
* Bean may be a Map or a POJO . */
private static void setProperty ( Object bean , String key , Object value , JsonConfig jsonConfig ) throws Exception { } } | PropertySetStrategy propertySetStrategy = jsonConfig . getPropertySetStrategy ( ) != null ? jsonConfig . getPropertySetStrategy ( ) : PropertySetStrategy . DEFAULT ; propertySetStrategy . setProperty ( bean , key , value , jsonConfig ) ; |
public class HttpRequestManager { /** * Handles request synchronously */
void dispatchRequest ( final HttpRequest request ) { } } | networkQueue . dispatchAsync ( new DispatchTask ( ) { @ Override protected void execute ( ) { request . dispatchSync ( networkQueue ) ; } } ) ; |
public class DescribeSpotPriceHistoryRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeSpotPriceHistoryRequest > getDryRunRequest ( ) { } } | Request < DescribeSpotPriceHistoryRequest > request = new DescribeSpotPriceHistoryRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class CmsADECache { /** * Caches the given group container under the given key and for the given project . < p >
* @ param key the cache key
* @ param groupContainer the object to cache
* @ param online if to cache in online or offline project */
public void setCacheGroupContainer ( String key , CmsXmlGroupContainer groupContainer , boolean online ) { } } | try { m_lock . writeLock ( ) . lock ( ) ; if ( online ) { m_groupContainersOnline . put ( key , groupContainer ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_DEBUG_CACHE_SET_ONLINE_2 , new Object [ ] { key , groupContainer } ) ) ; } } else { m_groupContainersOffline . put ( key , groupContainer ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_DEBUG_CACHE_SET_OFFLINE_2 , new Object [ ] { key , groupContainer } ) ) ; } } } finally { m_lock . writeLock ( ) . unlock ( ) ; } |
public class SiteSwitcherHandlerInterceptor { /** * Creates a site switcher that redirects to a custom domain for normal site requests that either
* originate from a mobile device or indicate a mobile site preference .
* Uses a { @ link CookieSitePreferenceRepository } that saves a cookie that is shared between the two domains .
* @ param normalServerName the ' normal ' domain name e . g . " normal . com "
* @ param mobileServerName the ' mobile domain name e . g . " mobile . com "
* @ param cookieDomain the name to use for saving the cookie
* @ see # standard ( String , String , String , Boolean )
* @ see # standard ( String , String , String , String )
* @ see StandardSiteUrlFactory */
public static SiteSwitcherHandlerInterceptor standard ( String normalServerName , String mobileServerName , String cookieDomain ) { } } | return new SiteSwitcherHandlerInterceptor ( StandardSiteSwitcherHandlerFactory . standard ( normalServerName , mobileServerName , cookieDomain ) ) ; |
public class DebugViewActivity { /** * 20 MB */
private static OkHttpClient . Builder createOkHttpClientBuilder ( Application app ) { } } | // Install an HTTP cache in the application cache directory .
File cacheDir = new File ( app . getCacheDir ( ) , "okhttp3" ) ; Cache cache = new Cache ( cacheDir , DISK_CACHE_SIZE ) ; return new OkHttpClient . Builder ( ) . cache ( cache ) . addInterceptor ( LogsModule . chuckInterceptor ( app ) ) . addInterceptor ( NetworkQualityModule . interceptor ( app ) ) . readTimeout ( 10 , TimeUnit . SECONDS ) . writeTimeout ( 10 , TimeUnit . SECONDS ) . connectTimeout ( 10 , TimeUnit . SECONDS ) ; |
public class Crossing { /** * Returns how many times ray from point ( x , y ) cross quard curve */
public static int crossQuad ( float x1 , float y1 , float cx , float cy , float x2 , float y2 , float x , float y ) { } } | // LEFT / RIGHT / UP / EMPTY
if ( ( x < x1 && x < cx && x < x2 ) || ( x > x1 && x > cx && x > x2 ) || ( y > y1 && y > cy && y > y2 ) || ( x1 == cx && cx == x2 ) ) { return 0 ; } // DOWN
if ( y < y1 && y < cy && y < y2 && x != x1 && x != x2 ) { if ( x1 < x2 ) { return x1 < x && x < x2 ? 1 : 0 ; } return x2 < x && x < x1 ? - 1 : 0 ; } // INSIDE
QuadCurve c = new QuadCurve ( x1 , y1 , cx , cy , x2 , y2 ) ; float px = x - x1 , py = y - y1 ; float [ ] res = new float [ 3 ] ; int rc = c . solvePoint ( res , px ) ; return c . cross ( res , rc , py , py ) ; |
public class Transformers { /** * Buffers the source { @ link Flowable } into { @ link List } s , emitting Lists when
* the size of a list reaches { @ code maxSize } or if the elapsed time since last
* emission from the source reaches the given duration .
* { @ link Schedulers # computation } is used for scheduling an inserted emission .
* @ param maxSize
* max size of emitted lists
* @ param duration
* buffered list is emitted if the elapsed time since last emission
* from the source reaches this duration
* @ param unit
* unit of { @ code duration }
* @ param < T >
* type of the source stream items
* @ return source with operator applied */
public static < T > FlowableTransformer < T , List < T > > buffer ( final int maxSize , final long duration , final TimeUnit unit ) { } } | return buffer ( maxSize , Functions . constant ( duration ) , unit ) ; |
public class ForwardingClient { /** * Stop all remote forwarding .
* @ param killActiveTunnels
* Should any active tunnels be closed .
* @ throws SshException */
public synchronized void cancelAllRemoteForwarding ( boolean killActiveTunnels ) throws SshException { } } | if ( remoteforwardings == null ) { return ; } for ( Enumeration < String > e = remoteforwardings . keys ( ) ; e . hasMoreElements ( ) ; ) { String host = ( String ) e . nextElement ( ) ; if ( host == null ) return ; try { int idx = host . indexOf ( ':' ) ; int port = - 1 ; if ( idx == - 1 ) { port = Integer . parseInt ( host ) ; host = "" ; } else { port = Integer . parseInt ( host . substring ( idx + 1 ) ) ; host = host . substring ( 0 , idx ) ; } cancelRemoteForwarding ( host , port , killActiveTunnels ) ; } catch ( NumberFormatException nfe ) { } } |
public class SqlAgentImpl { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . SqlAgent # insert ( Object ) */
@ SuppressWarnings ( "unchecked" ) @ Override public int insert ( final Object entity ) { } } | @ SuppressWarnings ( "rawtypes" ) EntityHandler handler = this . getEntityHandler ( ) ; if ( ! handler . getEntityType ( ) . isInstance ( entity ) ) { throw new IllegalArgumentException ( "Entity type not supported" ) ; } try { Class < ? > type = entity . getClass ( ) ; TableMetadata metadata = handler . getMetadata ( this . transactionManager , type ) ; SqlContext context = handler . createInsertContext ( this , metadata , type ) ; context . setSqlKind ( SqlKind . INSERT ) ; // IDアノテーションが付与されたカラム情報を取得する
MappingColumn [ ] idColumns = MappingUtils . getIdMappingColumns ( type ) ; setGeneratedKeyColumns ( context , idColumns , metadata ) ; handler . setInsertParams ( context , entity ) ; int count = handler . doInsert ( this , context , entity ) ; if ( idColumns != null && idColumns . length > 0 ) { BigDecimal [ ] ids = context . getGeneratedKeyValues ( ) ; int idx = 0 ; for ( MappingColumn col : idColumns ) { BigDecimal id = ids [ idx ++ ] ; setEntityIdValue ( entity , id , col ) ; } } return count ; } catch ( SQLException e ) { throw new EntitySqlRuntimeException ( SqlKind . INSERT , e ) ; } |
public class FastaReader { /** * Return next FASTA record or { @ literal null } if end of stream is reached .
* < p > This method is thread - safe . < / p >
* @ return next FASTA record or { @ literal null } if end of stream is reached */
public FastaRecord < S > take ( ) { } } | RawFastaRecord rawRecord = takeRawRecord ( ) ; // On EOF
if ( rawRecord == null ) return null ; return new FastaRecord < > ( id ++ , rawRecord . description , alphabet . parse ( rawRecord . sequence ) ) ; |
public class TeasyExpectedConditions { /** * Expected condition to look for elements in frames that will return as soon as elements are found in any frame
* @ param locator
* @ return */
public static ExpectedCondition < List < WebElement > > visibilityOfFirstElements ( final By locator ) { } } | return new ExpectedCondition < List < WebElement > > ( ) { @ Override public List < WebElement > apply ( final WebDriver driver ) { return getFirstVisibleWebElements ( driver , null , locator ) ; } @ Override public String toString ( ) { return String . format ( "visibility of element located by %s" , locator ) ; } } ; |
public class JdbcTable { /** * Rename this table
* @ param tableName
* @ param newTableName
* @ return */
public boolean renameTable ( String tableName , String newTableName ) throws DBException { } } | try { if ( DBConstants . TRUE . equals ( ( String ) this . getDatabase ( ) . getProperties ( ) . get ( SQLParams . NO_PREPARED_STATEMENTS_ON_CREATE ) ) ) this . setStatement ( null , DBConstants . SQL_CREATE_TYPE ) ; if ( this . getStatement ( DBConstants . SQL_CREATE_TYPE ) == null ) this . setStatement ( ( ( JdbcDatabase ) this . getDatabase ( ) ) . getJDBCConnection ( ) . createStatement ( ) , DBConstants . SQL_CREATE_TYPE ) ; String sql = "RENAME TABLE " + tableName + " TO " + newTableName ; this . getStatement ( DBConstants . SQL_CREATE_TYPE ) . execute ( sql ) ; if ( DBConstants . TRUE . equals ( ( String ) this . getDatabase ( ) . getProperties ( ) . get ( SQLParams . NO_PREPARED_STATEMENTS_ON_CREATE ) ) ) this . setStatement ( null , DBConstants . SQL_CREATE_TYPE ) ; } catch ( SQLException e ) { throw this . getDatabase ( ) . convertError ( e ) ; } return true ; |
public class CrawlerPack { /** * 取得遠端格式為 HTML / Html5 的資料
* @ param uri required Apache Common VFS supported file systems and response HTML format content .
* @ return org . jsoup . nodes . Document */
public org . jsoup . nodes . Document getFromHtml ( String uri ) { } } | // 取回資料
String html = getFromRemote ( uri ) ; // 轉化為 Jsoup 物件
return htmlToJsoupDoc ( html ) ; |
public class JCRAssert { /** * Asserts the primary node type of the node
* @ param node
* the node whose primary node type should be checked
* @ param nodeType
* the nodetype that is asserted to be the node type of the node
* @ throws RepositoryException */
public static void assertPrimaryNodeType ( final Node node , final String nodeType ) throws RepositoryException { } } | final NodeType primaryNodeType = node . getPrimaryNodeType ( ) ; assertEquals ( nodeType , primaryNodeType . getName ( ) ) ; |
public class CommerceOrderNoteUtil { /** * Returns the commerce order note with the primary key or throws a { @ link NoSuchOrderNoteException } if it could not be found .
* @ param commerceOrderNoteId the primary key of the commerce order note
* @ return the commerce order note
* @ throws NoSuchOrderNoteException if a commerce order note with the primary key could not be found */
public static CommerceOrderNote findByPrimaryKey ( long commerceOrderNoteId ) throws com . liferay . commerce . exception . NoSuchOrderNoteException { } } | return getPersistence ( ) . findByPrimaryKey ( commerceOrderNoteId ) ; |
public class LocalizationActivityDelegate { /** * Provide method to set application language by country name . */
public final void setLanguage ( Context context , String language ) { } } | Locale locale = new Locale ( language ) ; setLanguage ( context , locale ) ; |
public class HttpClient { /** * returns the inputstream from URLConnection
* @ return InputStream */
public InputStream getInputStream ( ) { } } | try { int responseCode = this . urlConnection . getResponseCode ( ) ; try { // HACK : manually follow redirects , for the login to work
// HTTPUrlConnection auto redirect doesn ' t respect the provided headers
if ( responseCode == 302 ) { HttpClient redirectClient = new HttpClient ( proxyHost , proxyPort , urlConnection . getHeaderField ( "Location" ) , headers , urlConnection . getRequestMethod ( ) , callback , authHeader ) ; redirectClient . getInputStream ( ) . close ( ) ; } } catch ( Throwable e ) { System . out . println ( "Following redirect failed" ) ; } setCookieHeader = this . urlConnection . getHeaderField ( "Set-Cookie" ) ; InputStream in = responseCode != HttpURLConnection . HTTP_OK ? this . urlConnection . getErrorStream ( ) : this . urlConnection . getInputStream ( ) ; return in ; } catch ( Exception e ) { return null ; } |
public class PortForward { /** * PortForward to a container .
* @ param namespace The namespace of the Pod
* @ param name The name of the Pod
* @ param ports The ports to forward
* @ return The result of the Port Forward request . */
public PortForwardResult forward ( String namespace , String name , List < Integer > ports ) throws ApiException , IOException { } } | String path = makePath ( namespace , name ) ; WebSocketStreamHandler handler = new WebSocketStreamHandler ( ) ; PortForwardResult result = new PortForwardResult ( handler , ports ) ; List < Pair > queryParams = new ArrayList < > ( ) ; for ( Integer port : ports ) { queryParams . add ( new Pair ( "ports" , port . toString ( ) ) ) ; } WebSockets . stream ( path , "GET" , queryParams , apiClient , handler ) ; // Wait for streams to start .
result . init ( ) ; return result ; |
public class AsynchConsumer { /** * Register the AsynchConsumerCallback . If callback is null then
* this is the equivalent of deregister , i . e . callbackRegistered
* is set to false .
* @ param callback */
void registerCallback ( AsynchConsumerCallback callback ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerCallback" , callback ) ; asynchConsumerCallback = callback ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerCallback" ) ; |
public class PersonGroupPersonsImpl { /** * Update a person persisted face ' s userData field .
* @ param personGroupId Id referencing a particular person group .
* @ param personId Id referencing a particular person .
* @ param persistedFaceId Id referencing a particular persistedFaceId of an existing face .
* @ param updateFaceOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > updateFaceAsync ( String personGroupId , UUID personId , UUID persistedFaceId , UpdateFaceOptionalParameter updateFaceOptionalParameter ) { } } | return updateFaceWithServiceResponseAsync ( personGroupId , personId , persistedFaceId , updateFaceOptionalParameter ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class NumberGenerator { /** * Returns for given parameter < i > _ name < / i > the instance of class
* { @ link Type } .
* @ param _ name name of the type to get
* @ return instance of class { @ link Type }
* @ throws CacheReloadException on error */
public static NumberGenerator get ( final String _name ) throws CacheReloadException { } } | final Cache < String , NumberGenerator > cache = InfinispanCache . get ( ) . < String , NumberGenerator > getCache ( NumberGenerator . NAMECACHE ) ; if ( ! cache . containsKey ( _name ) ) { NumberGenerator . getNumberGeneratorFromDB ( NumberGenerator . SQL_NAME , _name ) ; } return cache . get ( _name ) ; |
public class SwingGroovyMethods { /** * Overloads the left shift operator to provide an easy way to add
* components to a popupMenu . < p >
* @ param self a JPopupMenu
* @ param component a component to be added to the popupMenu .
* @ return same popupMenu , after the value was added to it .
* @ since 1.6.4 */
public static JPopupMenu leftShift ( JPopupMenu self , Component component ) { } } | self . add ( component ) ; return self ; |
public class DataFrameJoiner { /** * Joins the joiner to the table2 , using the given column for the second table and returns the resulting table
* @ param table2 The table to join with
* @ param col2Name The column to join on . If col2Name refers to a double column , the join is performed after
* rounding to integers .
* @ return The resulting table */
public Table inner ( Table table2 , String col2Name ) { } } | return inner ( table2 , false , col2Name ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.