signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CommonOps_DDF3 { /** * Sets all the diagonal elements equal to one and everything else equal to zero . * If this is a square matrix then it will be an identity matrix . * @ param a A matrix . */ public static void setIdentity ( DMatrix3x3 a ) { } }
a . a11 = 1 ; a . a21 = 0 ; a . a31 = 0 ; a . a12 = 0 ; a . a22 = 1 ; a . a32 = 0 ; a . a13 = 0 ; a . a23 = 0 ; a . a33 = 1 ;
public class OperationFuture { /** * Cancel this operation , if possible . * @ param ign not used * @ deprecated * @ return true if the operation has not yet been written to the network */ public boolean cancel ( boolean ign ) { } }
assert op != null : "No operation" ; op . cancel ( ) ; notifyListeners ( ) ; return op . getState ( ) == OperationState . WRITE_QUEUED ;
public class VdmLineBreakpointPropertyPage { /** * ( non - Javadoc ) * @ see * org . eclipse . jdt . internal . debug . ui . propertypages . JavaBreakpointPage # createTypeSpecificLabels ( org . eclipse . swt . widgets * . Composite ) */ protected void createTypeSpecificLabels ( Composite parent ) { } }
// Line number IVdmLineBreakpoint breakpoint = ( IVdmLineBreakpoint ) getBreakpoint ( ) ; StringBuffer lineNumber = new StringBuffer ( 4 ) ; try { int lNumber = breakpoint . getLineNumber ( ) ; if ( lNumber > 0 ) { lineNumber . append ( lNumber ) ; } } catch ( CoreException ce ) { // JDIDebugUIPlugin . log ( ce ) ; } if ( lineNumber . length ( ) > 0 ) { createLabel ( parent , "Line:" ) ; Text text = SWTFactory . createText ( parent , SWT . READ_ONLY , 1 , 1 ) ; text . setText ( lineNumber . toString ( ) ) ; text . setBackground ( parent . getBackground ( ) ) ; } // IMember member = BreakpointUtils . getMember ( breakpoint ) ; // if ( member = = null ) { // return ; // TODO : I removed this label until we find a solution to fidn the member // String label = " Member : " ; / / PropertyPageMessages . JavaLineBreakpointPage _ 3; // if ( breakpoint instanceof IJavaMethodBreakpoint ) { // label = PropertyPageMessages . JavaLineBreakpointPage _ 4; // } else if ( breakpoint instanceof IJavaWatchpoint ) { // label = PropertyPageMessages . JavaLineBreakpointPage _ 5; // createLabel ( parent , label ) ; // TODO : the member should be reated here // Text text = SWTFactory . createText ( parent , SWT . READ _ ONLY , 1 , 1 ) ; // text . setText ( " fJavaLabelProvider . getText ( member ) " ) ; // text . setBackground ( parent . getBackground ( ) ) ;
public class SearchParameterMap { /** * If set , tells the server to load these results synchronously , and not to load * more than X results . Note that setting this to a value will also set * { @ link # setLoadSynchronous ( boolean ) } to true */ public SearchParameterMap setLoadSynchronousUpTo ( Integer theLoadSynchronousUpTo ) { } }
myLoadSynchronousUpTo = theLoadSynchronousUpTo ; if ( myLoadSynchronousUpTo != null ) { setLoadSynchronous ( true ) ; } return this ;
public class HldAccProcessorNames { /** * < p > Get thing for given class and thing name . < / p > * @ param pClass a Class * @ param pThingName Thing Name * @ return a thing */ @ Override public final String getFor ( final Class < ? > pClass , final String pThingName ) { } }
if ( "list" . equals ( pThingName ) ) { if ( pClass == PaymentFrom . class || pClass == PaymentTo . class || pClass == PrepaymentFrom . class || pClass == PrepaymentTo . class || pClass == SubaccountLine . class || pClass == AdditionCostLine . class || pClass == Account . class ) { return PrcPageWithSubaccTypes . class . getSimpleName ( ) ; } else { return PrcEntitiesPage . class . getSimpleName ( ) ; } } else if ( "about" . equals ( pThingName ) ) { return PrcAbout . class . getSimpleName ( ) ; } return null ;
public class Parse { /** * Gets the value of the sections property . * @ return * possible object is * { @ link Sections } */ @ XmlElementWrapper ( name = "sections" ) @ XmlElement ( name = "s" , type = S . class ) public List < S > getSections ( ) { } }
return sections ;
public class DITableInfo { /** * Retrieves the scope of the best row identifier . < p > * This implements the rules described in * DatabaseInformationMain . SYSTEM _ BESTROWIDENTIFIER . < p > * @ return the scope of the best row identifier */ Integer getBRIScope ( ) { } }
return ( table . isWritable ( ) ) ? ValuePool . getInt ( bestRowTemporary ) : ValuePool . getInt ( bestRowSession ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcAddressTypeEnum ( ) { } }
if ( ifcAddressTypeEnumEEnum == null ) { ifcAddressTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 773 ) ; } return ifcAddressTypeEnumEEnum ;
public class H2OHttpViewImpl { /** * normal login handler part / / todo : consider using mostly the same code as in proxy part below */ @ Override public boolean loginHandler ( String target , HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
if ( ! isLoginTarget ( target ) ) { return false ; } if ( isPageRequest ( request ) ) { sendLoginForm ( request , response ) ; } else { ServletUtils . sendResponseError ( response , HttpServletResponse . SC_UNAUTHORIZED , "Access denied. Please login." ) ; } return true ;
public class DeveloperUtilitiesServiceProgrammatic { @ Programmatic public Clob downloadMetaModel ( ) { } }
final Collection < ObjectSpecification > specifications = specificationLoader . allSpecifications ( ) ; final List < MetaModelRow > rows = Lists . newArrayList ( ) ; for ( final ObjectSpecification spec : specifications ) { if ( exclude ( spec ) ) { continue ; } final List < ObjectAssociation > properties = spec . getAssociations ( Contributed . EXCLUDED , ObjectAssociation . Filters . PROPERTIES ) ; for ( final ObjectAssociation property : properties ) { final OneToOneAssociation otoa = ( OneToOneAssociation ) property ; if ( exclude ( otoa ) ) { continue ; } rows . add ( new MetaModelRow ( spec , otoa ) ) ; } final List < ObjectAssociation > associations = spec . getAssociations ( Contributed . EXCLUDED , ObjectAssociation . Filters . COLLECTIONS ) ; for ( final ObjectAssociation collection : associations ) { final OneToManyAssociation otma = ( OneToManyAssociation ) collection ; if ( exclude ( otma ) ) { continue ; } rows . add ( new MetaModelRow ( spec , otma ) ) ; } final List < ObjectAction > actions = spec . getObjectActions ( Contributed . INCLUDED ) ; for ( final ObjectAction action : actions ) { if ( exclude ( action ) ) { continue ; } rows . add ( new MetaModelRow ( spec , action ) ) ; } } Collections . sort ( rows ) ; final StringBuilder buf = new StringBuilder ( ) ; buf . append ( MetaModelRow . header ( ) ) . append ( "\n" ) ; for ( final MetaModelRow row : rows ) { buf . append ( row . asTextCsv ( ) ) . append ( "\n" ) ; } return new Clob ( "metamodel.csv" , mimeTypeTextCsv , buf . toString ( ) . toCharArray ( ) ) ;
public class LetterOrNumberValidator { /** * Sets the case sensitivity , which should be used by the validator . * @ param caseSensitivty * The case senstivitiy , which should be set , as a value of the enum { @ link Case } . The * value may either be < code > UPPERCASE < / code > , < code > LOWERCASE < / code > or * < code > CASE _ INSENSITIVE < / code > */ public final void setCaseSensitivity ( @ NonNull final Case caseSensitivty ) { } }
Condition . INSTANCE . ensureNotNull ( caseSensitivty , "The case sensitivity may not be null" ) ; this . caseSensitivity = caseSensitivty ;
public class ChineseValidator { /** * 判断中文汉字和符号 */ public static boolean isChinese ( String strName ) { } }
char [ ] ch = strName . toCharArray ( ) ; for ( int i = 0 ; i < ch . length ; i ++ ) { char c = ch [ i ] ; if ( isChinese ( c ) ) { return true ; } } return false ;
public class TargetSession { /** * Sets the session ' s type ( discovery or operational ) . * The type may and must be set just once . Repeated calls of this method will fail . < code > true < / code > will be * returned if the session type was set successfully , < code > false < / code > if not . * @ param sessionType the session type * @ return < code > true < / code > if the session type was set successfully , < code > false < / code > if not */ boolean setSessionType ( SessionType sessionType ) { } }
// allow just once , accept only non - null parameter if ( sessionType == null || this . sessionType != null ) return false ; this . sessionType = sessionType ; return true ;
public class ExternalEventHandlerBase { /** * Invoke a service ( synchronous ) process . The method cannot be used * if the process is not a service process . * Performance level : * 0 - to be determined by property , which will set the level to one of the following * 1,3,5,7,9 - use asynchronous engine ( 9 not implemented : 9 = 7 * 2,4,6,8,10 - use synchronous engine * 9,10 - use single transaction * 7,8,9,10 - all cache options CACHE _ ONLY * 5,6 - CACHE _ OFF for activity / transition , CACHE _ ONLY for variable / document * 3,4 - CACHE _ OFF for activity / transition , CACHE _ ON for variable / document * 1,2 - all cache options CACHE _ OFF * Defaults : * a . if PROP _ DO _ PARTIAL _ DB _ LOGGING property is specified : * - use 6 if its value is true * - use 10 otherwise ( the property is specified but value is not true ) * b . if MDW _ PERFORMANCE _ LEVEL _ SERVICE is specified * - use that value * c . use 3. * For additional information , see javadoc for ProcessEngineDriver : invokeService * @ param process the process definition . You can use { @ link # getProcessDefinition ( Long ) } * to retrieve the process definition from a process ID . * @ param eventInstId external event instance ID * @ param masterRequestId master request ID * @ param masterRequest the request content * @ param parameters Input parameter bindings for the process instance to be created * @ param responseVarName optional response variable ( otherwise implicit " response " is used ) * @ param performanceLevel * @ param headers requestHeaders * @ return response message , which is obtained from the response variable */ protected String invokeServiceProcess ( Long processId , Long eventInstId , String masterRequestId , String masterRequest , Map < String , Object > parameters , String responseVarName , int performanceLevel , Map < String , String > headers ) throws Exception { } }
Map < String , String > stringParams = translateParameters ( processId , parameters ) ; ProcessEngineDriver engineDriver = new ProcessEngineDriver ( ) ; return engineDriver . invokeService ( processId , OwnerType . DOCUMENT , eventInstId , masterRequestId , masterRequest , stringParams , responseVarName , performanceLevel , null , null , headers ) ;
public class TitlePaneIconifyButtonPainter { /** * Paint the foreground restore button enabled state . * @ param g the Graphics2D context to paint with . * @ param c the component . * @ param width the width of the component . * @ param height the height of the component . */ private void paintRestoreEnabled ( Graphics2D g , JComponent c , int width , int height ) { } }
restorePainter . paintEnabled ( g , c , width , height ) ;
public class SVGParser { /** * Parse the style attributes for an element . */ private void parseAttributesStyle ( SvgElementBase obj , Attributes attributes ) throws SVGParseException { } }
for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { String val = attributes . getValue ( i ) . trim ( ) ; if ( val . length ( ) == 0 ) { // Empty attribute . Ignore it . continue ; } // boolean inherit = val . equals ( " inherit " ) ; / / NYI switch ( SVGAttr . fromString ( attributes . getLocalName ( i ) ) ) { case style : parseStyle ( obj , val ) ; break ; case CLASS : obj . classNames = CSSParser . parseClassAttribute ( val ) ; break ; default : if ( obj . baseStyle == null ) obj . baseStyle = new Style ( ) ; processStyleProperty ( obj . baseStyle , attributes . getLocalName ( i ) , attributes . getValue ( i ) . trim ( ) ) ; break ; } }
public class CoverageData { /** * Get the coverage data values from the tile results scaled to the provided * dimensions * @ param tileMatrix * tile matrix * @ param tileResults * tile results * @ param request * coverage data request * @ param tileWidth * tile width * @ param tileHeight * tile height * @ param overlappingPixels * overlapping request pixels * @ return coverage data values */ private Double [ ] [ ] getValues ( TileMatrix tileMatrix , TileResultSet tileResults , CoverageDataRequest request , int tileWidth , int tileHeight , int overlappingPixels ) { } }
Double [ ] [ ] values = null ; // Tiles are ordered by rows and then columns . Track the last column // coverage data of the tile to the left and the last rows of the tiles // in the row above Double [ ] [ ] leftLastColumns = null ; Map < Long , Double [ ] [ ] > lastRowsByColumn = null ; Map < Long , Double [ ] [ ] > previousLastRowsByColumn = null ; long previousRow = - 1 ; long previousColumn = Long . MAX_VALUE ; // Process each coverage data tile while ( tileResults . moveToNext ( ) ) { // Get the next coverage data tile TileRow tileRow = tileResults . getRow ( ) ; long currentRow = tileRow . getTileRow ( ) ; long currentColumn = tileRow . getTileColumn ( ) ; // If the row has changed , save off the previous last rows and begin // tracking this row . Clear the left last columns . if ( currentRow > previousRow ) { previousLastRowsByColumn = lastRowsByColumn ; lastRowsByColumn = new HashMap < Long , Double [ ] [ ] > ( ) ; leftLastColumns = null ; } // If there was a previous row , retrieve the top left and top // overlapping rows Double [ ] [ ] topLeftRows = null ; Double [ ] [ ] topRows = null ; if ( previousLastRowsByColumn != null ) { topLeftRows = previousLastRowsByColumn . get ( currentColumn - 1 ) ; topRows = previousLastRowsByColumn . get ( currentColumn ) ; } // If the current column is not the column after the previous clear // the left values if ( currentColumn < previousColumn || currentColumn != previousColumn + 1 ) { leftLastColumns = null ; } // Get the bounding box of the coverage data BoundingBox tileBoundingBox = TileBoundingBoxUtils . getBoundingBox ( coverageBoundingBox , tileMatrix , currentColumn , currentRow ) ; // Get the bounding box where the request and coverage data tile // overlap BoundingBox overlap = request . overlap ( tileBoundingBox ) ; // Get the gridded tile value for the tile GriddedTile griddedTile = getGriddedTile ( tileRow . getId ( ) ) ; // Get the coverage data tile image TImage image = createImage ( tileRow ) ; // If the tile overlaps with the requested box if ( overlap != null ) { // Get the rectangle of the tile coverage data with matching // values ImageRectangleF src = TileBoundingBoxJavaUtils . getFloatRectangle ( tileMatrix . getTileWidth ( ) , tileMatrix . getTileHeight ( ) , tileBoundingBox , overlap ) ; // Get the rectangle of where to store the results ImageRectangleF dest = null ; if ( request . getProjectedBoundingBox ( ) . equals ( overlap ) ) { if ( request . isPoint ( ) ) { // For single points request only a single destination // pixel dest = new ImageRectangleF ( 0 , 0 , 0 , 0 ) ; } else { // The overlap is equal to the request , set as the full // destination size dest = new ImageRectangleF ( 0 , 0 , tileWidth , tileHeight ) ; } } else { dest = TileBoundingBoxJavaUtils . getFloatRectangle ( tileWidth , tileHeight , request . getProjectedBoundingBox ( ) , overlap ) ; } if ( src . isValidAllowEmpty ( ) && dest . isValidAllowEmpty ( ) ) { // Create the coverage data array first time through if ( values == null ) { values = new Double [ tileHeight ] [ tileWidth ] ; } // Get the destination widths float destWidth = dest . getRight ( ) - dest . getLeft ( ) ; float destHeight = dest . getBottom ( ) - dest . getTop ( ) ; // Get the destination heights float srcWidth = src . getRight ( ) - src . getLeft ( ) ; float srcHeight = src . getBottom ( ) - src . getTop ( ) ; // Determine the source to destination ratio and how many // destination pixels equal half a source pixel float widthRatio ; float halfDestWidthPixel ; if ( destWidth == 0 ) { widthRatio = 0.0f ; halfDestWidthPixel = 0.0f ; } else { widthRatio = srcWidth / destWidth ; halfDestWidthPixel = 0.5f / widthRatio ; } float heightRatio ; float halfDestHeightPixel ; if ( destHeight == 0 ) { heightRatio = 0.0f ; halfDestHeightPixel = 0.0f ; } else { heightRatio = srcHeight / destHeight ; halfDestHeightPixel = 0.5f / heightRatio ; } float algorithmDestWidthPixelOverlap = halfDestWidthPixel * overlappingPixels ; float algorithmDestHeightPixelOverlap = halfDestHeightPixel * overlappingPixels ; // Determine the range of destination values to set int minDestY = ( int ) Math . floor ( dest . getTop ( ) - algorithmDestHeightPixelOverlap ) ; int maxDestY = ( int ) Math . ceil ( dest . getBottom ( ) + algorithmDestHeightPixelOverlap ) ; int minDestX = ( int ) Math . floor ( dest . getLeft ( ) - algorithmDestWidthPixelOverlap ) ; int maxDestX = ( int ) Math . ceil ( dest . getRight ( ) + algorithmDestWidthPixelOverlap ) ; minDestY = Math . max ( minDestY , 0 ) ; minDestX = Math . max ( minDestX , 0 ) ; maxDestY = Math . min ( maxDestY , tileHeight - 1 ) ; maxDestX = Math . min ( maxDestX , tileWidth - 1 ) ; // Read and set the coverage data values for ( int y = minDestY ; y <= maxDestY ; y ++ ) { for ( int x = minDestX ; x <= maxDestX ; x ++ ) { if ( values [ y ] [ x ] == null ) { // Determine the coverage data based upon the // selected algorithm Double value = null ; switch ( algorithm ) { case NEAREST_NEIGHBOR : value = getNearestNeighborValue ( griddedTile , image , leftLastColumns , topLeftRows , topRows , y , x , widthRatio , heightRatio , dest . getTop ( ) , dest . getLeft ( ) , src . getTop ( ) , src . getLeft ( ) ) ; break ; case BILINEAR : value = getBilinearInterpolationValue ( griddedTile , image , leftLastColumns , topLeftRows , topRows , y , x , widthRatio , heightRatio , dest . getTop ( ) , dest . getLeft ( ) , src . getTop ( ) , src . getLeft ( ) ) ; break ; case BICUBIC : value = getBicubicInterpolationValue ( griddedTile , image , leftLastColumns , topLeftRows , topRows , y , x , widthRatio , heightRatio , dest . getTop ( ) , dest . getLeft ( ) , src . getTop ( ) , src . getLeft ( ) ) ; break ; default : throw new UnsupportedOperationException ( "Algorithm is not supported: " + algorithm ) ; } if ( value != null ) { values [ y ] [ x ] = value ; } } } } } } // Determine and store the coverage data of the last columns and // rows leftLastColumns = new Double [ overlappingPixels ] [ ( int ) tileMatrix . getTileHeight ( ) ] ; Double [ ] [ ] lastRows = new Double [ overlappingPixels ] [ ( int ) tileMatrix . getTileWidth ( ) ] ; lastRowsByColumn . put ( currentColumn , lastRows ) ; // For each overlapping pixel for ( int lastIndex = 0 ; lastIndex < overlappingPixels ; lastIndex ++ ) { // Store the last column row coverage data values int lastColumnIndex = ( int ) tileMatrix . getTileWidth ( ) - lastIndex - 1 ; for ( int row = 0 ; row < tileMatrix . getTileHeight ( ) ; row ++ ) { Double value = getValue ( griddedTile , image , lastColumnIndex , row ) ; leftLastColumns [ lastIndex ] [ row ] = value ; } // Store the last row column coverage data values int lastRowIndex = ( int ) tileMatrix . getTileHeight ( ) - lastIndex - 1 ; for ( int column = 0 ; column < tileMatrix . getTileWidth ( ) ; column ++ ) { Double value = getValue ( griddedTile , image , column , lastRowIndex ) ; lastRows [ lastIndex ] [ column ] = value ; } } // Update the previous row and column previousRow = currentRow ; previousColumn = currentColumn ; } return values ;
public class FindBugs { /** * Configure training databases . * @ param findBugs * the IFindBugsEngine to configure * @ throws IOException */ public static void configureTrainingDatabases ( IFindBugsEngine findBugs ) throws IOException { } }
if ( findBugs . emitTrainingOutput ( ) ) { String trainingOutputDir = findBugs . getTrainingOutputDir ( ) ; if ( ! new File ( trainingOutputDir ) . isDirectory ( ) ) { throw new IOException ( "Training output directory " + trainingOutputDir + " does not exist" ) ; } AnalysisContext . currentAnalysisContext ( ) . setDatabaseOutputDir ( trainingOutputDir ) ; // XXX : hack System . setProperty ( "findbugs.checkreturn.savetraining" , new File ( trainingOutputDir , "checkReturn.db" ) . getPath ( ) ) ; } if ( findBugs . useTrainingInput ( ) ) { String trainingInputDir = findBugs . getTrainingInputDir ( ) ; if ( ! new File ( trainingInputDir ) . isDirectory ( ) ) { throw new IOException ( "Training input directory " + trainingInputDir + " does not exist" ) ; } AnalysisContext . currentAnalysisContext ( ) . setDatabaseInputDir ( trainingInputDir ) ; AnalysisContext . currentAnalysisContext ( ) . loadInterproceduralDatabases ( ) ; // XXX : hack System . setProperty ( "findbugs.checkreturn.loadtraining" , new File ( trainingInputDir , "checkReturn.db" ) . getPath ( ) ) ; } else { AnalysisContext . currentAnalysisContext ( ) . loadDefaultInterproceduralDatabases ( ) ; }
public class WebSocketNativeBridgeHandler { /** * Establishes the websocket connection */ @ Override public synchronized void processConnect ( WebSocketChannel channel , WSURI uri , String [ ] protocols ) { } }
LOG . entering ( CLASS_NAME , "processConnect" , new Object [ ] { uri , protocols } ) ; try { WebSocketNativeChannel nativeChannel = ( WebSocketNativeChannel ) channel ; if ( nativeChannel . getProxy ( ) != null ) { throw new IllegalStateException ( "Bridge proxy previously set" ) ; } Proxy proxy = BridgeUtil . createProxy ( uri . getURI ( ) , this ) ; proxy . setPeer ( channel ) ; nativeChannel . setProxy ( proxy ) ; String [ ] params ; if ( protocols != null ) { String s = "" ; for ( int i = 0 ; i < protocols . length ; i ++ ) { if ( i > 0 ) { s += "," ; } s += protocols [ i ] ; } params = new String [ ] { "WEBSOCKET" , uri . toString ( ) , s , "" } ; } else { params = new String [ ] { "WEBSOCKET" , uri . toString ( ) } ; } proxy . processEvent ( XoaEventKind . CREATE , params ) ; } catch ( Exception e ) { LOG . log ( Level . FINE , "While initializing WebSocket proxy: " + e . getMessage ( ) , e ) ; listener . connectionFailed ( channel , e ) ; }
public class DocumentInputStreamFactory { /** * Method used to instantiate the appropriate input stream reader , * a standard one , or one which can deal with " encrypted " data . * @ param directory directory entry * @ param name file name * @ return new input stream * @ throws IOException */ public InputStream getInstance ( DirectoryEntry directory , String name ) throws IOException { } }
DocumentEntry entry = ( DocumentEntry ) directory . getEntry ( name ) ; InputStream stream ; if ( m_encrypted ) { stream = new EncryptedDocumentInputStream ( entry , m_encryptionCode ) ; } else { stream = new DocumentInputStream ( entry ) ; } return stream ;
public class ExtractorUtils { /** * If necessary , replace the input for the configured repositories to their values * under the current environment . We are not allowing for the input or the value to be empty . */ private static void replaceRepositoryInputForValues ( ArtifactoryClientConfiguration configuration , Run build , String resolverReleaseInput , String resolverSnapshotInput , Map < String , String > env ) { } }
if ( StringUtils . isBlank ( resolverReleaseInput ) || StringUtils . isBlank ( resolverSnapshotInput ) ) { build . setResult ( Result . FAILURE ) ; throw new IllegalStateException ( "Input for resolve repositories cannot be empty." ) ; } String resolveReleaseRepo = Util . replaceMacro ( resolverReleaseInput , env ) ; String resolveSnapshotRepo = Util . replaceMacro ( resolverSnapshotInput , env ) ; if ( StringUtils . isBlank ( resolveReleaseRepo ) || StringUtils . isBlank ( resolveSnapshotRepo ) ) { build . setResult ( Result . FAILURE ) ; throw new IllegalStateException ( "Resolver repository variable cannot be replaces with empty value." ) ; } configuration . resolver . setDownloadSnapshotRepoKey ( resolveSnapshotRepo ) ; configuration . resolver . setRepoKey ( resolveReleaseRepo ) ;
public class FileExtensions { /** * Not yet implemented . Checks if the given file is open . * @ param file * The file to check . * @ return Return true if the file is open otherwise false . * @ throws IOException * Signals that an I / O exception has occurred . */ public static boolean isOpen ( final File file ) throws IOException { } }
boolean open = false ; FileLock lock = null ; try ( RandomAccessFile fileAccess = new RandomAccessFile ( file . getAbsolutePath ( ) , "rw" ) ) { lock = fileAccess . getChannel ( ) . tryLock ( ) ; if ( lock == null ) { open = true ; } else { lock . release ( ) ; } } return open ;
public class TypedIdKey { /** * Retrieves a value with the given type and id from the given map . * @ param < V > the value type * @ param map the map * @ param type the type * @ param id the id * @ return the associated value , if any */ @ SuppressWarnings ( { } }
"unchecked" } ) public static < V > Optional < V > get ( Map < ? , ? > map , Class < V > type , Serializable id ) { return Optional . ofNullable ( ( V ) map . get ( new TypedIdKey < > ( type , id ) ) ) ;
public class ConnectionstateResponse { /** * Returns a textual representation of the status code . * @ return short description of status as string */ public String getStatusString ( ) { } }
switch ( status ) { case ErrorCodes . NO_ERROR : return "connection state is normal" ; case ErrorCodes . CONNECTION_ID : return "server could not find active data connection with specified ID" ; case ErrorCodes . DATA_CONNECTION : return "server detected error concerning the data connection" ; case ErrorCodes . KNX_CONNECTION : return "server detected error concerning the KNX bus/subsystem connection" ; default : return "unknown status" ; }
public class SyncGroupsInner { /** * Creates or updates a sync group . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database on which the sync group is hosted . * @ param syncGroupName The name of the sync group . * @ param parameters The requested sync group resource state . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the SyncGroupInner object */ public Observable < SyncGroupInner > beginCreateOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName , String syncGroupName , SyncGroupInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , syncGroupName , parameters ) . map ( new Func1 < ServiceResponse < SyncGroupInner > , SyncGroupInner > ( ) { @ Override public SyncGroupInner call ( ServiceResponse < SyncGroupInner > response ) { return response . body ( ) ; } } ) ;
public class Benchmark { /** * Rastrigin ' s function */ static public double rastrigin ( double [ ] x ) { } }
double sum = 0.0 ; for ( int i = 0 ; i < x . length ; i ++ ) { sum += ( x [ i ] * x [ i ] ) - ( 10.0 * Math . cos ( PIx2 * x [ i ] ) ) + 10.0 ; } return ( sum ) ;
public class VisitorState { /** * Validates a type string , ensuring it is not generic and not an array type . */ private static void validateTypeStr ( String typeStr ) { } }
if ( typeStr . contains ( "[" ) || typeStr . contains ( "]" ) ) { throw new IllegalArgumentException ( String . format ( "Cannot convert array types (%s), please build them using getType()" , typeStr ) ) ; } if ( typeStr . contains ( "<" ) || typeStr . contains ( ">" ) ) { throw new IllegalArgumentException ( String . format ( "Cannot convert generic types (%s), please build them using getType()" , typeStr ) ) ; }
public class Util { /** * Prepend or postpend the given string with the given character to fill the given field length . * If the given string is equal or greater than the given field length , it will be returned * without modification . * @ param s the given string * @ param fieldLength the desired field length * @ param padChar the desired pad character * @ param postpend if true append the pacCharacters to the end of the string . * @ return prepended or postpended given string with the given character to fill the given field * length . */ public static final String characterPad ( final String s , final int fieldLength , final char padChar , final boolean postpend ) { } }
final char [ ] chArr = s . toCharArray ( ) ; final int sLen = chArr . length ; if ( sLen < fieldLength ) { final char [ ] out = new char [ fieldLength ] ; final int blanks = fieldLength - sLen ; if ( postpend ) { for ( int i = 0 ; i < sLen ; i ++ ) { out [ i ] = chArr [ i ] ; } for ( int i = sLen ; i < fieldLength ; i ++ ) { out [ i ] = padChar ; } } else { // prepend for ( int i = 0 ; i < blanks ; i ++ ) { out [ i ] = padChar ; } for ( int i = blanks ; i < fieldLength ; i ++ ) { out [ i ] = chArr [ i - blanks ] ; } } return String . valueOf ( out ) ; } return s ;
public class Config { /** * the kubeconfig references some assets via relative paths . */ private static boolean loadFromKubeconfig ( Config config , String context , String kubeconfigContents , String kubeconfigPath ) { } }
try { io . fabric8 . kubernetes . api . model . Config kubeConfig = KubeConfigUtils . parseConfigFromString ( kubeconfigContents ) ; if ( context != null ) { kubeConfig . setCurrentContext ( context ) ; } Context currentContext = KubeConfigUtils . getCurrentContext ( kubeConfig ) ; Cluster currentCluster = KubeConfigUtils . getCluster ( kubeConfig , currentContext ) ; if ( currentCluster != null ) { config . setMasterUrl ( currentCluster . getServer ( ) ) ; config . setNamespace ( currentContext . getNamespace ( ) ) ; config . setTrustCerts ( currentCluster . getInsecureSkipTlsVerify ( ) != null && currentCluster . getInsecureSkipTlsVerify ( ) ) ; config . setDisableHostnameVerification ( currentCluster . getInsecureSkipTlsVerify ( ) != null && currentCluster . getInsecureSkipTlsVerify ( ) ) ; config . setCaCertData ( currentCluster . getCertificateAuthorityData ( ) ) ; AuthInfo currentAuthInfo = KubeConfigUtils . getUserAuthInfo ( kubeConfig , currentContext ) ; if ( currentAuthInfo != null ) { // rewrite tls asset paths if needed String caCertFile = currentCluster . getCertificateAuthority ( ) ; String clientCertFile = currentAuthInfo . getClientCertificate ( ) ; String clientKeyFile = currentAuthInfo . getClientKey ( ) ; if ( kubeconfigPath != null && ! kubeconfigPath . isEmpty ( ) ) { caCertFile = absolutify ( new File ( kubeconfigPath ) , currentCluster . getCertificateAuthority ( ) ) ; clientCertFile = absolutify ( new File ( kubeconfigPath ) , currentAuthInfo . getClientCertificate ( ) ) ; clientKeyFile = absolutify ( new File ( kubeconfigPath ) , currentAuthInfo . getClientKey ( ) ) ; } config . setCaCertFile ( caCertFile ) ; config . setClientCertFile ( clientCertFile ) ; config . setClientCertData ( currentAuthInfo . getClientCertificateData ( ) ) ; config . setClientKeyFile ( clientKeyFile ) ; config . setClientKeyData ( currentAuthInfo . getClientKeyData ( ) ) ; config . setOauthToken ( currentAuthInfo . getToken ( ) ) ; config . setUsername ( currentAuthInfo . getUsername ( ) ) ; config . setPassword ( currentAuthInfo . getPassword ( ) ) ; if ( Utils . isNullOrEmpty ( config . getOauthToken ( ) ) && currentAuthInfo . getAuthProvider ( ) != null && ! Utils . isNullOrEmpty ( currentAuthInfo . getAuthProvider ( ) . getConfig ( ) . get ( ACCESS_TOKEN ) ) ) { config . setOauthToken ( currentAuthInfo . getAuthProvider ( ) . getConfig ( ) . get ( ACCESS_TOKEN ) ) ; } else if ( config . getOauthTokenProvider ( ) == null ) { // https : / / kubernetes . io / docs / reference / access - authn - authz / authentication / # client - go - credential - plugins ExecConfig exec = currentAuthInfo . getExec ( ) ; if ( exec != null ) { String apiVersion = exec . getApiVersion ( ) ; if ( "client.authentication.k8s.io/v1alpha1" . equals ( apiVersion ) || "client.authentication.k8s.io/v1beta1" . equals ( apiVersion ) ) { List < String > argv = new ArrayList < String > ( ) ; String command = exec . getCommand ( ) ; if ( command . contains ( "/" ) && ! command . startsWith ( "/" ) && kubeconfigPath != null && ! kubeconfigPath . isEmpty ( ) ) { // Appears to be a relative path ; normalize . Spec is vague about how to detect this situation . command = Paths . get ( kubeconfigPath ) . resolveSibling ( command ) . normalize ( ) . toString ( ) ; } argv . add ( command ) ; List < String > args = exec . getArgs ( ) ; if ( args != null ) { argv . addAll ( args ) ; } ProcessBuilder pb = new ProcessBuilder ( argv ) ; List < ExecEnvVar > env = exec . getEnv ( ) ; if ( env != null ) { Map < String , String > environment = pb . environment ( ) ; env . forEach ( var -> environment . put ( var . getName ( ) , var . getValue ( ) ) ) ; } // TODO check behavior of tty & stdin Process p = pb . start ( ) ; if ( p . waitFor ( ) != 0 ) { LOGGER . warn ( IOHelpers . readFully ( p . getErrorStream ( ) ) ) ; } ExecCredential ec = Serialization . unmarshal ( p . getInputStream ( ) , ExecCredential . class ) ; if ( ! apiVersion . equals ( ec . apiVersion ) ) { LOGGER . warn ( "Wrong apiVersion {} vs. {}" , ec . apiVersion , apiVersion ) ; } if ( ec . status != null && ec . status . token != null ) { config . setOauthToken ( ec . status . token ) ; } else { LOGGER . warn ( "No token returned" ) ; } } else { // TODO v1beta1? LOGGER . warn ( "Unsupported apiVersion: {}" , apiVersion ) ; } } } config . getErrorMessages ( ) . put ( 401 , "Unauthorized! Token may have expired! Please log-in again." ) ; config . getErrorMessages ( ) . put ( 403 , "Forbidden! User " + currentContext . getUser ( ) + " doesn't have permission." ) ; } return true ; } } catch ( Exception e ) { LOGGER . error ( "Failed to parse the kubeconfig." , e ) ; } return false ;
public class AbstractMetadataPublishingController { /** * Returns the metadata for the entity . * @ param request * the HTTP request * @ param acceptHeader * the Accept header value * @ return an HttpEntity holding the SAML metadata */ public HttpEntity < byte [ ] > getMetadata ( HttpServletRequest request , @ RequestHeader ( name = "Accept" , required = false ) String acceptHeader ) { } }
logger . debug ( "Request to download metadata from {}" , request . getRemoteAddr ( ) ) ; try { // Check if the metadata is up - to - date according to how the container was configured . if ( this . metadataContainer . updateRequired ( true ) ) { logger . debug ( "Metadata needs to be updated ..." ) ; this . metadataContainer . update ( true ) ; logger . debug ( "Metadata was updated and signed" ) ; } else { logger . debug ( "Metadata is up-to-date, using cached metadata" ) ; } // Get the DOM for the metadata and serialize it . Element dom = this . metadataContainer . marshall ( ) ; ByteArrayOutputStream stream = new ByteArrayOutputStream ( ) ; SerializeSupport . writeNode ( dom , stream ) ; // Assign the HTTP headers . HttpHeaders header = new HttpHeaders ( ) ; if ( acceptHeader != null && ! acceptHeader . contains ( APPLICATION_SAML_METADATA ) ) { header . setContentType ( MediaType . APPLICATION_XML ) ; } else { header . setContentType ( MediaType . valueOf ( APPLICATION_SAML_METADATA ) ) ; } // TODO : turn off caching byte [ ] documentBody = stream . toByteArray ( ) ; header . setContentLength ( documentBody . length ) ; return new HttpEntity < byte [ ] > ( documentBody , header ) ; } catch ( SignatureException | MarshallingException e ) { logger . error ( "Failed to return valid metadata" , e ) ; return new ResponseEntity < byte [ ] > ( HttpStatus . INTERNAL_SERVER_ERROR ) ; }
public class RequestArguments { /** * Returns a json object containing all arguments specified to the page . * Useful for redirects and polling . */ protected JsonObject argumentsToJson ( ) { } }
JsonObject result = new JsonObject ( ) ; for ( Argument a : _arguments ) { if ( a . specified ( ) ) result . addProperty ( a . _name , a . originalValue ( ) ) ; } return result ;
public class PersistenceController { /** * Insert new message statuses obtained from message query . * @ param conversationId Unique conversation id . * @ param profileId Profile id from current session details . * @ param msgStatusList New message statuses . * @ return Observable emitting result . */ public Observable < Boolean > upsertMessageStatuses ( String conversationId , String profileId , List < MessageStatusUpdate > msgStatusList ) { } }
return asObservable ( new Executor < Boolean > ( ) { @ Override void execute ( ChatStore store , Emitter < Boolean > emitter ) { store . beginTransaction ( ) ; boolean isSuccess = false ; for ( MessageStatusUpdate statusUpdate : msgStatusList ) { for ( String messageId : statusUpdate . getMessageIds ( ) ) { LocalMessageStatus status = null ; if ( MessageStatus . delivered . name ( ) . equals ( statusUpdate . getStatus ( ) ) ) { status = LocalMessageStatus . delivered ; } else if ( MessageStatus . read . name ( ) . equals ( statusUpdate . getStatus ( ) ) ) { status = LocalMessageStatus . read ; } if ( status != null ) { isSuccess = store . update ( ChatMessageStatus . builder ( ) . populate ( conversationId , messageId , profileId , status , DateHelper . getUTCMilliseconds ( statusUpdate . getTimestamp ( ) ) , null ) . build ( ) ) ; } } } store . endTransaction ( ) ; emitter . onNext ( isSuccess ) ; emitter . onCompleted ( ) ; } } ) ;
public class BlockHeartbeatPRequest { /** * < code > optional . alluxio . grpc . block . BlockHeartbeatPOptions options = 5 ; < / code > */ public alluxio . grpc . BlockHeartbeatPOptions getOptions ( ) { } }
return options_ == null ? alluxio . grpc . BlockHeartbeatPOptions . getDefaultInstance ( ) : options_ ;
public class DateUtils { /** * Takes a byte size and formats it for display with ' friendly ' units . * This involves converting it to the largest unit * ( of B , KiB , MiB , GiB , TiB ) for which the amount will be > 1. * Additionally , at least 2 significant digits are always displayed . * Negative numbers will be returned as ' 0 B ' . * @ param amount the amount of bytes * @ return A string containing the amount , properly formated . */ public static String formatBytesForDisplay ( long amount ) { } }
double displayAmount = ( double ) amount ; int unitPowerOf1024 = 0 ; if ( amount <= 0 ) { return "0 B" ; } while ( displayAmount >= 1024 && unitPowerOf1024 < 4 ) { displayAmount = displayAmount / 1024 ; unitPowerOf1024 ++ ; } final String [ ] units = { " B" , " KiB" , " MiB" , " GiB" , " TiB" } ; // ensure at least 2 significant digits ( # . # ) for small displayValues int fractionDigits = ( displayAmount < 10 ) ? 1 : 0 ; return doubleToString ( displayAmount , fractionDigits , fractionDigits ) + units [ unitPowerOf1024 ] ;
public class Terminals { /** * Returns a { @ link Terminals } object for lexing and parsing the operators with names specified in * { @ code ops } , and for lexing and parsing the keywords case insensitively . Parsers for operators * and keywords can be obtained through { @ link # token } ; parsers for identifiers through * { @ link # identifier } . * < p > In detail , keywords and operators are lexed as { @ link Tokens . Fragment } with * { @ link Tag # RESERVED } tag . Words that are not among { @ code keywords } are lexed as * { @ code Fragment } with { @ link Tag # IDENTIFIER } tag . * @ param wordScanner the scanner that returns a word in the language . * @ param ops the operator names . * @ param keywords the keyword names . * @ return the Terminals instance . * @ deprecated Use { @ code operators ( ops ) * . words ( wordScanner ) * . caseInsensitiveKeywords ( keywords ) * . build ( ) } instead . */ @ Deprecated public static Terminals caseInsensitive ( Parser < String > wordScanner , String [ ] ops , String [ ] keywords ) { } }
return operators ( ops ) . words ( wordScanner ) . caseInsensitiveKeywords ( keywords ) . build ( ) ;
public class ClientFlakeIdGeneratorConfig { /** * Sets how many IDs are pre - fetched on the background when one call to * { @ link FlakeIdGenerator # newId ( ) } is made . Default is 100. * @ param prefetchCount the desired prefetch count , in the range 1 . . 100,000. * @ return this instance for fluent API */ public ClientFlakeIdGeneratorConfig setPrefetchCount ( int prefetchCount ) { } }
checkTrue ( prefetchCount > 0 && prefetchCount <= MAXIMUM_PREFETCH_COUNT , "prefetch-count must be 1.." + MAXIMUM_PREFETCH_COUNT + ", not " + prefetchCount ) ; this . prefetchCount = prefetchCount ; return this ;
public class AbstractCasWebflowConfigurer { /** * Create mapping to subflow state . * @ param name the name * @ param value the value * @ param required the required * @ param type the type * @ return the default mapping */ public DefaultMapping createMappingToSubflowState ( final String name , final String value , final boolean required , final Class type ) { } }
val parser = this . flowBuilderServices . getExpressionParser ( ) ; val source = parser . parseExpression ( value , new FluentParserContext ( ) ) ; val target = parser . parseExpression ( name , new FluentParserContext ( ) ) ; val mapping = new DefaultMapping ( source , target ) ; mapping . setRequired ( required ) ; val typeConverter = new RuntimeBindingConversionExecutor ( type , this . flowBuilderServices . getConversionService ( ) ) ; mapping . setTypeConverter ( typeConverter ) ; return mapping ;
public class HttpServletResponseImpl { /** * Return < code > true < / code > if the specified URL should be encoded with * a session identifier . This will be true if all of the following * conditions are met : * < ul > * < li > The request we are responding to asked for a valid session * < li > The requested session ID was not received via a cookie * < li > The specified URL points back to somewhere within the web * application that is responding to this request * < / ul > * @ param location Absolute URL to be validated */ private boolean isEncodeable ( final String location ) { } }
if ( location == null ) return ( false ) ; // Is this an intra - document reference ? if ( location . startsWith ( "#" ) ) return ( false ) ; // Are we in a valid session that is not using cookies ? final HttpServletRequestImpl hreq = exchange . getAttachment ( ServletRequestContext . ATTACHMENT_KEY ) . getOriginalRequest ( ) ; // Is URL encoding permitted if ( ! originalServletContext . getEffectiveSessionTrackingModes ( ) . contains ( SessionTrackingMode . URL ) ) { return false ; } final HttpSession session = hreq . getSession ( false ) ; if ( session == null ) { return false ; } else if ( hreq . isRequestedSessionIdFromCookie ( ) ) { return false ; } else if ( ! hreq . isRequestedSessionIdFromURL ( ) && ! session . isNew ( ) ) { return false ; } return doIsEncodeable ( hreq , session , location ) ;
public class RunContainer { /** * Return the content of this container as a ShortBuffer . This creates a copy and might be * relatively slow . * @ return the ShortBuffer */ public ShortBuffer toShortBuffer ( ) { } }
ShortBuffer sb = ShortBuffer . allocate ( this . nbrruns * 2 ) ; sb . put ( this . valueslength , 0 , this . nbrruns * 2 ) ; return sb ;
public class BitInputStream { /** * Expect . * @ param bits the bits * @ throws IOException the io exception */ public void expect ( final Bits bits ) throws IOException { } }
int size = Math . min ( availible ( ) , bits . bitLength ) ; Bits read = read ( size ) ; if ( ! bits . range ( 0 , size ) . equals ( read ) ) { throw new RuntimeException ( String . format ( "%s is not expected %s" , read , bits ) ) ; }
public class AbstractDraweeController { /** * Sets the hierarchy . * < p > The controller should be detached when this method is called . * @ param hierarchy This must be an instance of { @ link SettableDraweeHierarchy } */ @ Override public void setHierarchy ( @ Nullable DraweeHierarchy hierarchy ) { } }
if ( FLog . isLoggable ( FLog . VERBOSE ) ) { FLog . v ( TAG , "controller %x %s: setHierarchy: %s" , System . identityHashCode ( this ) , mId , hierarchy ) ; } mEventTracker . recordEvent ( ( hierarchy != null ) ? Event . ON_SET_HIERARCHY : Event . ON_CLEAR_HIERARCHY ) ; // force release in case request was submitted if ( mIsRequestSubmitted ) { mDeferredReleaser . cancelDeferredRelease ( this ) ; release ( ) ; } // clear the existing hierarchy if ( mSettableDraweeHierarchy != null ) { mSettableDraweeHierarchy . setControllerOverlay ( null ) ; mSettableDraweeHierarchy = null ; } // set the new hierarchy if ( hierarchy != null ) { Preconditions . checkArgument ( hierarchy instanceof SettableDraweeHierarchy ) ; mSettableDraweeHierarchy = ( SettableDraweeHierarchy ) hierarchy ; mSettableDraweeHierarchy . setControllerOverlay ( mControllerOverlay ) ; }
public class Type { /** * Returns the descriptor corresponding to the given argument and return * types . * @ param returnType * the return type of the method . * @ param argumentTypes * the argument types of the method . * @ return the descriptor corresponding to the given argument and return * types . */ public static String getMethodDescriptor ( final Type returnType , final Type ... argumentTypes ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( '(' ) ; for ( int i = 0 ; i < argumentTypes . length ; ++ i ) { argumentTypes [ i ] . getDescriptor ( sb ) ; } sb . append ( ')' ) ; returnType . getDescriptor ( sb ) ; return sb . toString ( ) ;
public class ByteArrayUtil { /** * Put the source < i > float < / i > into the destination byte array starting at the given offset * in big endian order . * There is no bounds checking . * @ param array destination byte array * @ param offset destination offset * @ param value source < i > float < / i > */ public static void putFloatBE ( final byte [ ] array , final int offset , final float value ) { } }
putIntBE ( array , offset , Float . floatToRawIntBits ( value ) ) ;
public class ExtensionHook { /** * Adds the given { @ code httpSenderListener } to the extension hook , to be later added to the * { @ link org . parosproxy . paros . network . HttpSender HttpSender } . * By default , the { @ code HttpSenderListener } s added to this extension hook are removed from the { @ code HttpSender } when the * extension is unloaded . * @ param httpSenderListener the HttpSenderListener that will be added to the { @ code HttpSender } * @ throws IllegalArgumentException if the given { @ code httpSenderListener } is { @ code null } . * @ since 2.7.0 */ public void addHttpSenderListener ( HttpSenderListener httpSenderListener ) { } }
if ( httpSenderListener == null ) { throw new IllegalArgumentException ( "Parameter httpSenderListener must not be null." ) ; } if ( httpSenderListeners == null ) { httpSenderListeners = new ArrayList < > ( ) ; } httpSenderListeners . add ( httpSenderListener ) ;
public class OpenAPIImpl { /** * helpers */ @ Override public OpenAPI path ( String name , PathItem path ) { } }
if ( this . paths == null ) { this . paths = new PathsImpl ( ) ; } this . paths . addPathItem ( name , path ) ; return this ;
public class Settings { /** * Checks whether a feature is enabled in the effective configuration . * < p > A feature is identified by a path expression relative to { @ link # CONFIG _ ROOT } , such as * { @ code context - propagation . executor } . The feature is enabled iff the config element at the * requested path has a child element { @ code enabled } with a value of { @ code true } , { @ code on } , or * { @ code yes } . * @ param featurePath the feature ' s path expression * @ return true , if enabled , otherwise false * @ since 0.10 */ public boolean isEnabled ( String featurePath ) { } }
checkArgument ( ! Strings . isNullOrEmpty ( featurePath ) ) ; return config . getConfig ( featurePath ) . getBoolean ( "enabled" ) ;
public class GenericMetadataSupport { /** * Registers the type variables for the given type and all of its superclasses and superinterfaces . */ protected void registerAllTypeVariables ( Type classType ) { } }
Queue < Type > typesToRegister = new LinkedList < Type > ( ) ; Set < Type > registeredTypes = new HashSet < Type > ( ) ; typesToRegister . add ( classType ) ; while ( ! typesToRegister . isEmpty ( ) ) { Type typeToRegister = typesToRegister . poll ( ) ; if ( typeToRegister == null || registeredTypes . contains ( typeToRegister ) ) { continue ; } registerTypeVariablesOn ( typeToRegister ) ; registeredTypes . add ( typeToRegister ) ; Class < ? > rawType = extractRawTypeOf ( typeToRegister ) ; typesToRegister . add ( rawType . getGenericSuperclass ( ) ) ; typesToRegister . addAll ( Arrays . asList ( rawType . getGenericInterfaces ( ) ) ) ; }
public class BucketTimer { /** * { @ inheritDoc } */ @ Override public Long getValue ( int pollerIndex ) { } }
final long cnt = getCount ( pollerIndex ) ; return ( cnt == 0 ) ? 0L : totalTime . getValue ( ) . longValue ( ) / cnt ;
public class NullSafe { /** * Convert a list of a given type using converter . * @ param source a list to convert * @ param converter the map function to apply * @ param < S > type to convert from * @ param < R > type to convert to * @ return A converted List with all pre - conversion and post - conversion null values removed . Can * return an empty list . Will return null if source is null . */ public static < S , R > List < R > convert ( List < S > source , Function < S , R > converter ) { } }
return ( source == null ) ? null : source . stream ( ) . filter ( Objects :: nonNull ) . map ( converter ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ;
public class BrokerHelper { /** * Return key Values of an Identity * @ param cld * @ param oid * @ param convertToSql * @ return Object [ ] * @ throws PersistenceBrokerException */ public ValueContainer [ ] getKeyValues ( ClassDescriptor cld , Identity oid , boolean convertToSql ) throws PersistenceBrokerException { } }
FieldDescriptor [ ] pkFields = cld . getPkFields ( ) ; ValueContainer [ ] result = new ValueContainer [ pkFields . length ] ; Object [ ] pkValues = oid . getPrimaryKeyValues ( ) ; try { for ( int i = 0 ; i < result . length ; i ++ ) { FieldDescriptor fd = pkFields [ i ] ; Object cv = pkValues [ i ] ; if ( convertToSql ) { // BRJ : apply type and value mapping cv = fd . getFieldConversion ( ) . javaToSql ( cv ) ; } result [ i ] = new ValueContainer ( cv , fd . getJdbcType ( ) ) ; } } catch ( Exception e ) { throw new PersistenceBrokerException ( "Can't generate primary key values for given Identity " + oid , e ) ; } return result ;
public class SimpleConversionService { /** * Registers all the { @ link Converter Converters } provided by { @ literal cp - elements } * in { @ link org . cp . elements . data . conversion . converters } . * @ throws IllegalArgumentException if the resource { @ link URL } of the chosen { @ link Converter } { @ link Class } * cannot be resolved . */ private void registerConverters ( ) { } }
String converterClassResourceName = toResourceName ( CONVERTER_CLASS ) ; URL converterClassResourceLocation = resolveResourceLocation ( converterClassResourceName ) ; Assert . notNull ( converterClassResourceLocation , "Could not resolve URL for Converter class [%1$s] having resource name [%2$s]" , CONVERTER_CLASS . getName ( ) , converterClassResourceName ) ; try { if ( isJarFile ( converterClassResourceLocation ) ) { registerConvertersFromJarFile ( converterClassResourceName , converterClassResourceLocation ) ; } else { registerConvertersFromFileSystem ( converterClassResourceName , converterClassResourceLocation ) ; } } catch ( Exception ignore ) { registerConvertersFromPackage ( ) ; }
public class LinkedList { /** * Synchronized . Get the first entry in the list . * @ return the first entry in the list . */ public Entry getFirst ( ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getFirst" ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getFirst" , first ) ; return first ;
public class AdminToolQuartzServiceImpl { /** * / * ( non - Javadoc ) * @ see de . chandre . admintool . quartz . AdminToolQuartzService # stopScheduler ( ) */ @ Override public void stopScheduler ( ) { } }
if ( ! config . isStopSchedulerAllowed ( ) ) { LOGGER . warn ( "not allowed to stop the scheduler" ) ; return ; } if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "stopping scheduler factory" ) ; schedulerFactory . stop ( ) ;
public class CClassLoader { /** * analyse the content of the given jar file * @ param jarFile * the jar to analise */ private final void readDirectories ( final URL jarFile ) { } }
JarInputStream jarIn = null ; try { if ( ! jarFile . getPath ( ) . endsWith ( ".jar" ) ) { return ; } if ( CClassLoader . sl ( CClassLoader . DEBUG ) ) { CClassLoader . log ( "opening jar : " + jarFile . toExternalForm ( ) , CClassLoader . DEBUG ) ; } jarIn = new JarInputStream ( jarFile . openStream ( ) ) ; JarEntry jarEntry = null ; while ( ( jarEntry = jarIn . getNextJarEntry ( ) ) != null ) { if ( jarEntry . isDirectory ( ) ) { continue ; } final URL url = new URL ( "yahpjarloader://" + CBASE64Codec . encode ( jarFile . toExternalForm ( ) . getBytes ( "utf-8" ) ) . replaceAll ( "\n" , "" ) + "/" + jarEntry . getName ( ) ) ; if ( CClassLoader . sl ( CClassLoader . DEBUG ) ) { CClassLoader . log ( "found entry : " + url . toString ( ) , CClassLoader . DEBUG ) ; } if ( jarEntry . getName ( ) . endsWith ( ".class" ) ) { if ( ! this . classesMap . containsKey ( jarEntry . getName ( ) ) ) { if ( ! this . booResourceOnly ) { this . classesMap . put ( jarEntry . getName ( ) , url ) ; } } if ( this . resourcesMap . containsKey ( jarEntry . getName ( ) ) ) { final Object to = this . resourcesMap . get ( jarEntry . getName ( ) ) ; if ( to instanceof URL ) { final URL uo = ( URL ) to ; final List l = new ArrayList ( ) ; l . add ( uo ) ; l . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , l ) ; } else if ( to instanceof List ) { final List uo = ( List ) to ; uo . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , uo ) ; } } else { this . resourcesMap . put ( jarEntry . getName ( ) , url ) ; } } else if ( jarEntry . getName ( ) . startsWith ( "native/" ) ) { String system = jarEntry . getName ( ) . substring ( 7 ) ; system = system . substring ( 0 , system . indexOf ( '/' ) ) ; if ( ! this . dllMap . containsKey ( system ) ) { this . dllMap . put ( system , url ) ; } if ( this . resourcesMap . containsKey ( jarEntry . getName ( ) ) ) { final Object to = this . resourcesMap . get ( jarEntry . getName ( ) ) ; if ( to instanceof URL ) { final URL uo = ( URL ) to ; final List l = new ArrayList ( ) ; l . add ( uo ) ; l . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , l ) ; } else if ( to instanceof List ) { final List uo = ( List ) to ; uo . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , uo ) ; } } else { this . resourcesMap . put ( jarEntry . getName ( ) , url ) ; } } else { if ( this . resourcesMap . containsKey ( jarEntry . getName ( ) ) ) { final Object to = this . resourcesMap . get ( jarEntry . getName ( ) ) ; if ( to instanceof URL ) { final URL uo = ( URL ) to ; final List l = new ArrayList ( ) ; l . add ( uo ) ; l . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , l ) ; } else if ( to instanceof List ) { final List uo = ( List ) to ; uo . add ( url ) ; this . resourcesMap . put ( jarEntry . getName ( ) , uo ) ; } } else { this . resourcesMap . put ( jarEntry . getName ( ) , url ) ; } } } if ( CClassLoader . sl ( CClassLoader . DEBUG ) ) { CClassLoader . log ( "opening jar : " + jarFile . getFile ( ) . toString ( ) + " done." , CClassLoader . DEBUG ) ; } } catch ( final MalformedURLException mue ) { mue . printStackTrace ( ) ; if ( CClassLoader . sl ( CClassLoader . FATAL ) ) { CClassLoader . log ( mue . getMessage ( ) , CClassLoader . FATAL ) ; } } catch ( final IOException ioe ) { ioe . printStackTrace ( ) ; if ( CClassLoader . sl ( CClassLoader . FATAL ) ) { CClassLoader . log ( ioe . getMessage ( ) , CClassLoader . FATAL ) ; } } catch ( final Exception e ) { e . printStackTrace ( ) ; if ( CClassLoader . sl ( CClassLoader . FATAL ) ) { CClassLoader . log ( e . getMessage ( ) , CClassLoader . FATAL ) ; } } finally { try { jarIn . close ( ) ; jarIn = null ; } catch ( final Exception e ) { } }
public class IdentityAssertionLoginModule { /** * < p > Initialize this login module . < / p > * This is called by the < code > LoginContext < / code > after this login module is * instantiated . The relevant information is passed from the < code > LoginContext < / code > * to this login module . If the login module does not understands any of the data * stored in the < code > sharedState < / code > and < code > options < / code > parameters , * they can be ignored . * @ param subject The subject to be authenticated . * @ param callbackHandler * A < code > CallbackHandler < / code > for communicating with the end user to gather login information ( e . g . , username and password ) . * @ param sharedState * The state shared with other configured login modules . * @ param options The options specified in the login configuration for this particular login module . */ @ Override public void initialize ( Subject subject , CallbackHandler callbackHandler , Map sharedState , Map options ) { } }
this . subject = subject ; this . sharedState = sharedState ;
public class RoadType { /** * Gets the value of the genericApplicationPropertyOfRoad property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfRoad property . * For example , to add a new item , do as follows : * < pre > * get _ GenericApplicationPropertyOfRoad ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ public List < JAXBElement < Object > > get_GenericApplicationPropertyOfRoad ( ) { } }
if ( _GenericApplicationPropertyOfRoad == null ) { _GenericApplicationPropertyOfRoad = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfRoad ;
public class EJBWrapperProxy { /** * Adds a definition for Object . hashCode method for the No - Interface view * ( LocalBean ) . * @ param cw ASM ClassWriter to add the method to . * @ param implClassName name of the wrapper class being generated . */ private static void addClassProxyHashCodeMethod ( ClassWriter cw , String implClassName ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , INDENT + "adding method : hashCode ()I" ) ; // public boolean equals ( Object other ) final String desc = "()I" ; MethodVisitor mv = cw . visitMethod ( ACC_PUBLIC , "hashCode" , desc , null , null ) ; mv . visitCode ( ) ; // return this . ivProxy . hashCode ( ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitFieldInsn ( GETFIELD , implClassName , LOCAL_BEAN_PROXY_FIELD , LOCAL_BEAN_PROXY_FIELD_TYPE_NAME ) ; mv . visitMethodInsn ( INVOKEVIRTUAL , "java/lang/Object" , "hashCode" , desc ) ; mv . visitInsn ( IRETURN ) ; mv . visitMaxs ( 1 , 2 ) ; mv . visitEnd ( ) ;
public class CmsSearchWidgetDialog { /** * Returns a list of < code > { @ link CmsSelectWidgetOption } < / code > objects for field list selection . < p > * @ return a list of < code > { @ link CmsSelectWidgetOption } < / code > objects */ private List < CmsSelectWidgetOption > getFieldList ( ) { } }
List < CmsSelectWidgetOption > retVal = new ArrayList < CmsSelectWidgetOption > ( ) ; try { Iterator < CmsSearchField > i = getSearchFields ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { CmsLuceneField field = ( CmsLuceneField ) i . next ( ) ; retVal . add ( new CmsSelectWidgetOption ( field . getName ( ) , true , getMacroResolver ( ) . resolveMacros ( field . getDisplayName ( ) ) ) ) ; } } catch ( Exception e ) { // noop } return retVal ;
public class TextComponentInterceptor { /** * Converts the given component to a < code > JTextComponent < / code > . This can be a * simple cast if the component is already a text component , or an embedded component * ( for example a JSpinner ) . * This method is protected , and can be overridden when necessary . * @ param component the component * @ return a < code > JTextComponent < / code > , or < code > null < / code > */ protected JTextComponent getTextComponent ( JComponent component ) { } }
if ( component instanceof JSpinner ) { JSpinner spinner = ( JSpinner ) component ; if ( spinner . getEditor ( ) instanceof JSpinner . DefaultEditor ) { return ( ( JSpinner . DefaultEditor ) spinner . getEditor ( ) ) . getTextField ( ) ; } else if ( spinner . getEditor ( ) instanceof JTextField ) { return ( JTextField ) spinner . getEditor ( ) ; } else { logger . warn ( "Cannot use JSpinner editor of type " + spinner . getEditor ( ) . getClass ( ) ) ; return null ; } } else if ( component instanceof JTextComponent ) { return ( JTextComponent ) component ; } else { return null ; }
public class CmsJspNavBuilder { /** * Initializes this bean . < p > * @ param cms the current cms context * @ param locale the locale for which properties should be read */ public void init ( CmsObject cms , Locale locale ) { } }
init ( cms , locale , cms . getRequestContext ( ) . getUri ( ) ) ;
public class Args { /** * Produces a single element list with a key / value pair comma separated string from a { @ link Map } . * @ return { @ code [ " key1 = value1 , key2 = value2 , . . . " ] } or { @ code [ ] } if keyValueMapping = empty / null */ static List < String > keyValueString ( @ Nullable Map < ? , ? > keyValueMapping ) { } }
List < String > result = Lists . newArrayList ( ) ; if ( keyValueMapping != null && keyValueMapping . size ( ) > 0 ) { for ( Map . Entry < ? , ? > entry : keyValueMapping . entrySet ( ) ) { result . add ( entry . getKey ( ) + "=" + entry . getValue ( ) ) ; } Joiner joiner = Joiner . on ( "," ) ; return Collections . singletonList ( joiner . join ( result ) ) ; } return Collections . emptyList ( ) ;
public class GeoJsonReaderDriver { /** * Sets the parsed geometry to the table * * @ param jp * @ throws IOException * @ throws SQLException */ private void setGeometry ( JsonParser jp , Object [ ] values ) throws IOException , SQLException { } }
if ( jp . nextToken ( ) != JsonToken . VALUE_NULL ) { // START _ OBJECT { in case of null geometry jp . nextToken ( ) ; // FIELD _ NAME type jp . nextToken ( ) ; // VALUE _ STRING Point String geometryType = jp . getText ( ) ; values [ 0 ] = parseGeometry ( jp , geometryType ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcDerivedUnitElement ( ) { } }
if ( ifcDerivedUnitElementEClass == null ) { ifcDerivedUnitElementEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 173 ) ; } return ifcDerivedUnitElementEClass ;
public class GwtRunner { /** * Generates the output code , taking into account the passed { @ code flags } . */ private static ChunkOutput writeChunkOutput ( Compiler compiler , Flags flags , List < JSModule > chunks ) { } }
ArrayList < File > outputFiles = new ArrayList < > ( ) ; ChunkOutput output = new ChunkOutput ( ) ; Map < String , String > parsedModuleWrappers = parseModuleWrappers ( Arrays . asList ( getStringArray ( flags , "chunkWrapper" ) ) , chunks ) ; for ( JSModule c : chunks ) { if ( flags . createSourceMap != null && ! flags . createSourceMap . equals ( false ) ) { compiler . resetAndIntitializeSourceMap ( ) ; } File file = new File ( ) ; file . path = flags . chunkOutputPathPrefix + c . getName ( ) + ".js" ; String code = compiler . toSource ( c ) ; int lastSeparatorIndex = file . path . lastIndexOf ( '/' ) ; if ( lastSeparatorIndex < 0 ) { lastSeparatorIndex = file . path . lastIndexOf ( '\\' ) ; } String baseName = file . path . substring ( Math . max ( 0 , lastSeparatorIndex ) ) ; String wrapper = parsedModuleWrappers . get ( c . getName ( ) ) . replace ( "%basename%" , baseName ) ; StringBuilder out = new StringBuilder ( ) ; int pos = wrapper . indexOf ( "%s" ) ; if ( pos != - 1 ) { String prefix = "" ; if ( pos > 0 ) { prefix = wrapper . substring ( 0 , pos ) ; out . append ( prefix ) ; } out . append ( code ) ; int suffixStart = pos + "%s" . length ( ) ; if ( suffixStart != wrapper . length ( ) ) { // Something after placeholder ? out . append ( wrapper , suffixStart , wrapper . length ( ) ) ; } // Make sure we always end output with a line feed . out . append ( '\n' ) ; // If we have a source map , adjust its offsets to match // the code WITHIN the wrapper . if ( compiler != null && compiler . getSourceMap ( ) != null ) { compiler . getSourceMap ( ) . setWrapperPrefix ( prefix ) ; } } else { out . append ( code ) ; out . append ( '\n' ) ; } file . src = out . toString ( ) ; if ( flags . createSourceMap != null && ! flags . createSourceMap . equals ( false ) ) { StringBuilder b = new StringBuilder ( ) ; try { compiler . getSourceMap ( ) . appendTo ( b , file . path ) ; } catch ( IOException e ) { // ignore } file . sourceMap = b . toString ( ) ; } outputFiles . add ( file ) ; } output . compiledFiles = outputFiles . toArray ( new File [ 0 ] ) ; return output ;
public class DBRestore { /** * Restore table . */ private void restoreTable ( File storageDir , Connection jdbcConn , String tableName , TableTransformationRule restoreRule ) throws IOException , SQLException { } }
// Need privileges SecurityManager security = System . getSecurityManager ( ) ; if ( security != null ) { security . checkPermission ( JCRRuntimePermissions . MANAGE_REPOSITORY_PERMISSION ) ; } ZipObjectReader contentReader = null ; ZipObjectReader contentLenReader = null ; PreparedStatement insertNode = null ; ResultSet tableMetaData = null ; Statement stmt = null ; // switch table name to lower case if ( dialect . startsWith ( DBConstants . DB_DIALECT_PGSQL ) ) { tableName = tableName . toLowerCase ( ) ; } try { File contentFile = new File ( storageDir , restoreRule . getSrcTableName ( ) + DBBackup . CONTENT_FILE_SUFFIX ) ; // check old style backup format , when for every table was dedicated zip file if ( PrivilegedFileHelper . exists ( contentFile ) ) { contentReader = new ZipObjectReader ( PrivilegedFileHelper . zipInputStream ( contentFile ) ) ; contentReader . getNextEntry ( ) ; File contentLenFile = new File ( storageDir , restoreRule . getSrcTableName ( ) + DBBackup . CONTENT_LEN_FILE_SUFFIX ) ; contentLenReader = new ZipObjectReader ( PrivilegedFileHelper . zipInputStream ( contentLenFile ) ) ; contentLenReader . getNextEntry ( ) ; } else { contentFile = new File ( storageDir , DBBackup . CONTENT_ZIP_FILE ) ; contentReader = new ZipObjectReader ( PrivilegedFileHelper . zipInputStream ( contentFile ) ) ; while ( ! contentReader . getNextEntry ( ) . getName ( ) . equals ( restoreRule . getSrcTableName ( ) ) ) ; File contentLenFile = new File ( storageDir , DBBackup . CONTENT_LEN_ZIP_FILE ) ; contentLenReader = new ZipObjectReader ( PrivilegedFileHelper . zipInputStream ( contentLenFile ) ) ; while ( ! contentLenReader . getNextEntry ( ) . getName ( ) . equals ( restoreRule . getSrcTableName ( ) ) ) ; } // get information about source table int sourceColumnCount = contentReader . readInt ( ) ; List < Integer > columnType = new ArrayList < Integer > ( ) ; List < String > columnName = new ArrayList < String > ( ) ; for ( int i = 0 ; i < sourceColumnCount ; i ++ ) { columnType . add ( contentReader . readInt ( ) ) ; columnName . add ( contentReader . readString ( ) ) ; } int targetColumnCount = sourceColumnCount ; if ( restoreRule . getNewColumnIndex ( ) != null ) { targetColumnCount ++ ; columnType . add ( restoreRule . getNewColumnIndex ( ) , restoreRule . getNewColumnType ( ) ) ; String newColumnName = dialect . startsWith ( DBConstants . DB_DIALECT_PGSQL ) ? restoreRule . getNewColumnName ( ) . toLowerCase ( ) : restoreRule . getNewColumnName ( ) ; columnName . add ( restoreRule . getNewColumnIndex ( ) , newColumnName ) ; } // construct statement StringBuilder names = new StringBuilder ( ) ; StringBuilder parameters = new StringBuilder ( ) ; for ( int i = 0 ; i < targetColumnCount ; i ++ ) { if ( restoreRule . getSkipColumnIndex ( ) != null && restoreRule . getSkipColumnIndex ( ) == i ) { continue ; } names . append ( columnName . get ( i ) ) . append ( i == targetColumnCount - 1 ? "" : "," ) ; parameters . append ( "?" ) . append ( i == targetColumnCount - 1 ? "" : "," ) ; } int batchSize = 0 ; insertNode = jdbcConn . prepareStatement ( "INSERT INTO " + tableName + " (" + names + ") VALUES(" + parameters + ")" ) ; // set data outer : while ( true ) { for ( int i = 0 , targetIndex = 0 ; i < columnType . size ( ) ; i ++ , targetIndex ++ ) { InputStream stream ; long len ; if ( restoreRule . getNewColumnIndex ( ) != null && restoreRule . getNewColumnIndex ( ) == i ) { stream = new ByteArrayInputStream ( restoreRule . getDstContainerName ( ) . getBytes ( Constants . DEFAULT_ENCODING ) ) ; len = ( ( ByteArrayInputStream ) stream ) . available ( ) ; } else { try { len = contentLenReader . readLong ( ) ; } catch ( EOFException e ) { if ( i == 0 ) { // content length file is empty check content file try { contentReader . readByte ( ) ; } catch ( EOFException e1 ) { break outer ; } } throw new IOException ( "Content length file is empty but content still present" , e ) ; } stream = len == - 1 ? null : spoolInputStream ( contentReader , len ) ; } if ( restoreRule . getSkipColumnIndex ( ) != null && restoreRule . getSkipColumnIndex ( ) == i ) { targetIndex -- ; continue ; } // set if ( stream != null ) { if ( restoreRule . getConvertColumnIndex ( ) != null && restoreRule . getConvertColumnIndex ( ) . contains ( i ) ) { // convert column value ByteArrayInputStream ba = ( ByteArrayInputStream ) stream ; byte [ ] readBuffer = new byte [ ba . available ( ) ] ; ba . read ( readBuffer ) ; String currentValue = new String ( readBuffer , Constants . DEFAULT_ENCODING ) ; if ( currentValue . equals ( Constants . ROOT_PARENT_UUID ) ) { stream = new ByteArrayInputStream ( Constants . ROOT_PARENT_UUID . getBytes ( ) ) ; } else { if ( restoreRule . getDstMultiDb ( ) ) { if ( ! restoreRule . getSrcMultiDb ( ) ) { stream = new ByteArrayInputStream ( new String ( readBuffer , Constants . DEFAULT_ENCODING ) . substring ( restoreRule . getSrcContainerName ( ) . length ( ) ) . getBytes ( ) ) ; } } else { if ( restoreRule . getSrcMultiDb ( ) ) { StringBuilder builder = new StringBuilder ( ) ; builder . append ( restoreRule . getDstContainerName ( ) ) ; builder . append ( currentValue ) ; stream = new ByteArrayInputStream ( builder . toString ( ) . getBytes ( ) ) ; } else { StringBuilder builder = new StringBuilder ( ) ; builder . append ( restoreRule . getDstContainerName ( ) ) ; builder . append ( new String ( readBuffer , Constants . DEFAULT_ENCODING ) . substring ( restoreRule . getSrcContainerName ( ) . length ( ) ) ) ; stream = new ByteArrayInputStream ( builder . toString ( ) . getBytes ( ) ) ; } } } len = ( ( ByteArrayInputStream ) stream ) . available ( ) ; } if ( columnType . get ( i ) == Types . INTEGER || columnType . get ( i ) == Types . BIGINT || columnType . get ( i ) == Types . SMALLINT || columnType . get ( i ) == Types . TINYINT ) { ByteArrayInputStream ba = ( ByteArrayInputStream ) stream ; byte [ ] readBuffer = new byte [ ba . available ( ) ] ; ba . read ( readBuffer ) ; String value = new String ( readBuffer , Constants . DEFAULT_ENCODING ) ; insertNode . setLong ( targetIndex + 1 , Integer . parseInt ( value ) ) ; } else if ( columnType . get ( i ) == Types . BIT ) { ByteArrayInputStream ba = ( ByteArrayInputStream ) stream ; byte [ ] readBuffer = new byte [ ba . available ( ) ] ; ba . read ( readBuffer ) ; String value = new String ( readBuffer ) ; if ( dialect . startsWith ( DBConstants . DB_DIALECT_PGSQL ) ) { insertNode . setBoolean ( targetIndex + 1 , value . equalsIgnoreCase ( "t" ) ) ; } else { insertNode . setBoolean ( targetIndex + 1 , value . equals ( "1" ) ) ; } } else if ( columnType . get ( i ) == Types . BOOLEAN ) { ByteArrayInputStream ba = ( ByteArrayInputStream ) stream ; byte [ ] readBuffer = new byte [ ba . available ( ) ] ; ba . read ( readBuffer ) ; String value = new String ( readBuffer ) ; insertNode . setBoolean ( targetIndex + 1 , value . equalsIgnoreCase ( "true" ) ) ; } else if ( columnType . get ( i ) == Types . VARBINARY || columnType . get ( i ) == Types . LONGVARBINARY || columnType . get ( i ) == Types . BLOB || columnType . get ( i ) == Types . BINARY || columnType . get ( i ) == Types . OTHER ) { insertNode . setBinaryStream ( targetIndex + 1 , stream , ( int ) len ) ; } else { byte [ ] readBuffer = new byte [ ( int ) len ] ; stream . read ( readBuffer ) ; insertNode . setString ( targetIndex + 1 , new String ( readBuffer , Constants . DEFAULT_ENCODING ) ) ; } } else { insertNode . setNull ( targetIndex + 1 , columnType . get ( i ) ) ; } } // add statement to batch insertNode . addBatch ( ) ; if ( ++ batchSize == MAXIMUM_BATCH_SIZE ) { insertNode . executeBatch ( ) ; commitBatch ( ) ; batchSize = 0 ; } } if ( batchSize != 0 ) { insertNode . executeBatch ( ) ; commitBatch ( ) ; } if ( useSequence ) { batchSize = 0 ; String update = "DROP SEQUENCE " + tableName + "_seq" ; stmt = jdbcConn . createStatement ( ) ; if ( ( dialect . startsWith ( DBConstants . DB_DIALECT_MYSQL ) || dialect . startsWith ( DBConstants . DB_DIALECT_MSSQL ) || dialect . startsWith ( DBConstants . DB_DIALECT_SYBASE ) ) && tableName . equalsIgnoreCase ( this . itemTableName ) ) { boolean exist = checkEntry ( jdbcConn , tableName + "_SEQ" ) ; if ( exist ) { insertNode = jdbcConn . prepareStatement ( "UPDATE " + tableName + "_SEQ SET nextVal=? where name='LAST_N_ORDER_NUM'" ) ; } else { insertNode = jdbcConn . prepareStatement ( "INSERT INTO " + tableName + "_SEQ (name, nextVal) VALUES ('LAST_N_ORDER_NUM', ?)" ) ; } insertNode . setInt ( 1 , getStartValue ( jdbcConn , tableName ) ) ; insertNode . executeUpdate ( ) ; batchSize ++ ; } else if ( ( dialect . startsWith ( DBConstants . DB_DIALECT_PGSQL ) || dialect . startsWith ( DBConstants . DB_DIALECT_DB2 ) || dialect . startsWith ( DBConstants . DB_DIALECT_HSQLDB ) ) && ( tableName . equalsIgnoreCase ( this . itemTableName ) ) ) { stmt . execute ( update ) ; update = "CREATE SEQUENCE " + tableName + "_seq INCREMENT BY 1 MINVALUE -1 NO MAXVALUE NO CYCLE START WITH " + ( getStartValue ( jdbcConn , tableName ) + 1 ) ; stmt . execute ( update ) ; batchSize ++ ; } else if ( dialect . startsWith ( DBConstants . DB_DIALECT_H2 ) && ( tableName . equalsIgnoreCase ( this . itemTableName ) ) ) { stmt . execute ( update ) ; update = "CREATE SEQUENCE " + tableName + "_seq INCREMENT BY 1 START WITH " + ( getStartValue ( jdbcConn , tableName ) + 1 ) ; stmt . execute ( update ) ; batchSize ++ ; } else if ( dialect . startsWith ( DBConstants . DB_DIALECT_ORACLE ) && tableName . equalsIgnoreCase ( this . itemTableName ) ) { stmt . execute ( update ) ; update = "CREATE SEQUENCE " + tableName + "_seq INCREMENT BY 1 MINVALUE -1 NOMAXVALUE NOCACHE NOCYCLE START WITH " + ( getStartValue ( jdbcConn , tableName ) + 1 ) ; stmt . execute ( update ) ; batchSize ++ ; } if ( batchSize != 0 ) { commitBatch ( ) ; } } } finally { if ( contentReader != null ) { contentReader . close ( ) ; } if ( contentLenReader != null ) { contentLenReader . close ( ) ; } if ( insertNode != null ) { insertNode . close ( ) ; } if ( stmt != null ) { stmt . close ( ) ; } // delete all temporary files for ( File file : spoolFileList ) { if ( ! PrivilegedFileHelper . delete ( file ) ) { fileCleaner . addFile ( file ) ; } } if ( tableMetaData != null ) { tableMetaData . close ( ) ; } }
public class ReflectUtil { /** * Test if the given class is a proxy class that implements * non - public interface . Such proxy class may be in a non - restricted * package that bypasses checkPackageAccess . */ public static boolean isNonPublicProxyClass ( Class < ? > cls ) { } }
String name = cls . getName ( ) ; int i = name . lastIndexOf ( '.' ) ; String pkg = ( i != - 1 ) ? name . substring ( 0 , i ) : "" ; // NOTE : Android creates proxies in the " default " package ( and not com . sun . proxy ) , which // makes this check imprecise . However , this function is only ever called if there ' s // a security manager installed ( which is the never case on android ) . return Proxy . isProxyClass ( cls ) && ! pkg . isEmpty ( ) ;
public class CacheConfigurationBuilder { /** * Adds a { @ link ResilienceStrategy } configured through a class and optional constructor arguments to the configured * builder . * @ param resilienceStrategyClass the resilience strategy class * @ param arguments optional constructor arguments * @ return a new builder with the added resilience strategy configuration */ @ SuppressWarnings ( "rawtypes" ) public CacheConfigurationBuilder < K , V > withResilienceStrategy ( Class < ? extends ResilienceStrategy > resilienceStrategyClass , Object ... arguments ) { } }
return addOrReplaceConfiguration ( new DefaultResilienceStrategyConfiguration ( requireNonNull ( resilienceStrategyClass , "Null resilienceStrategyClass" ) , arguments ) ) ;
public class GoogleMapsTileMath { /** * Transforms given lat / lon in WGS84 Datum to XY in Spherical Mercator * EPSG : 3857 * @ param env The envelope to transform * @ return The envelope transformed to EPSG : 3857 */ public Envelope lngLatToMeters ( Envelope env ) { } }
Coordinate min = lngLatToMeters ( env . getMinX ( ) , env . getMinY ( ) ) ; Coordinate max = lngLatToMeters ( env . getMaxX ( ) , env . getMaxY ( ) ) ; Envelope result = new Envelope ( min . x , max . x , min . y , max . y ) ; return result ;
public class NaaccrXmlDictionaryUtils { /** * Merges the given base dictionary and user dictionaries into one dictionary . * < br / > < br / > * Sort order of the items is based on start column , items without a start column go to the end . * @ param baseDictionary base dictionary , required * @ param userDictionaries user dictionaries , optional * @ return a new merged dictionary containing the items of all the provided dictionaries . */ public static NaaccrDictionary mergeDictionaries ( NaaccrDictionary baseDictionary , NaaccrDictionary ... userDictionaries ) { } }
if ( baseDictionary == null ) throw new RuntimeException ( "Base dictionary is required" ) ; NaaccrDictionary result = new NaaccrDictionary ( ) ; result . setNaaccrVersion ( baseDictionary . getNaaccrVersion ( ) ) ; result . setDictionaryUri ( baseDictionary . getDictionaryUri ( ) + "[merged]" ) ; result . setSpecificationVersion ( baseDictionary . getSpecificationVersion ( ) ) ; result . setDescription ( baseDictionary . getDescription ( ) ) ; List < NaaccrDictionaryItem > items = new ArrayList < > ( baseDictionary . getItems ( ) ) ; for ( NaaccrDictionary userDictionary : userDictionaries ) items . addAll ( userDictionary . getItems ( ) ) ; items . sort ( Comparator . comparing ( NaaccrDictionaryItem :: getNaaccrId ) ) ; result . setItems ( items ) ; List < NaaccrDictionaryGroupedItem > groupedItems = new ArrayList < > ( baseDictionary . getGroupedItems ( ) ) ; for ( NaaccrDictionary userDictionary : userDictionaries ) groupedItems . addAll ( userDictionary . getGroupedItems ( ) ) ; groupedItems . sort ( Comparator . comparing ( NaaccrDictionaryItem :: getNaaccrId ) ) ; result . setGroupedItems ( groupedItems ) ; return result ;
public class Cob2AvroJob { /** * Sets the job input key mainframe record type . * @ param job The job to configure . * @ param cobolType The input key mainframe record type . */ public static void setInputKeyRecordType ( Job job , Class < ? extends CobolComplexType > cobolType ) { } }
job . getConfiguration ( ) . setClass ( CONF_INPUT_KEY_RECORD_TYPE , cobolType , CobolComplexType . class ) ;
public class ApiOvhDomain { /** * List of whois obfuscators * REST : GET / domain / { serviceName } / owo * @ param field [ required ] Filter the value of field property ( = ) * @ param serviceName [ required ] The internal name of your domain */ public ArrayList < OvhWhoisObfuscatorFieldsEnum > serviceName_owo_GET ( String serviceName , OvhWhoisObfuscatorFieldsEnum field ) throws IOException { } }
String qPath = "/domain/{serviceName}/owo" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "field" , field ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t7 ) ;
public class FitByMeanStatistics { /** * Computes the mean and standard deviation of the points from the model */ private void computeMean ( ) { } }
meanError = 0 ; int size = allPoints . size ( ) ; for ( PointIndex < Point > inlier : allPoints ) { Point pt = inlier . data ; meanError += modelError . computeDistance ( pt ) ; } meanError /= size ;
public class MementoUtil { /** * Use this entity as the new * @ param entity * the entity to be used as the new */ public void addCurrentEntity ( Object entity ) { } }
Object identifier = identifier ( entity ) ; Object o = memento . get ( identifier ) ; if ( o == null ) { inserted . add ( entity ) ; } else { potentiallyChanged . put ( identifier , entity ) ; }
public class Document { /** * Saves a new revision by letting the caller update the existing properties . * This method handles conflicts by retrying ( calling the block again ) . * The DocumentUpdater implementation should modify the properties of * the new revision and return YES to save or NO to cancel . * Be careful : the DocumentUpdater can be called multiple times if there is a conflict ! * @ param updater the callback DocumentUpdater implementation . Will be called on each * attempt to save . Should update the given revision ' s properties and then * return YES , or just return NO to cancel . * @ return The new saved revision , or null on cancellation . * @ throws CouchbaseLiteException */ @ InterfaceAudience . Public public SavedRevision update ( DocumentUpdater updater ) throws CouchbaseLiteException { } }
int lastErrorCode = Status . UNKNOWN ; do { // if there is a conflict error , get the latest revision from db instead of cache if ( lastErrorCode == Status . CONFLICT ) { forgetCurrentRevision ( ) ; } UnsavedRevision newRev = createRevision ( ) ; if ( updater . update ( newRev ) == false ) { break ; } try { SavedRevision savedRev = newRev . save ( ) ; if ( savedRev != null ) { return savedRev ; } } catch ( CouchbaseLiteException e ) { lastErrorCode = e . getCBLStatus ( ) . getCode ( ) ; if ( lastErrorCode != Status . CONFLICT ) throw e ; } } while ( lastErrorCode == Status . CONFLICT ) ; return null ;
public class Links { /** * Returns a Stream of links . * @ return Stream of Links */ @ SuppressWarnings ( "rawtypes" ) public Stream < Link > stream ( ) { } }
return links . values ( ) . stream ( ) . map ( obj -> { if ( obj instanceof List ) { return ( List ) obj ; } else { return singletonList ( obj ) ; } } ) . flatMap ( Collection :: stream ) ;
public class MapMakerInternalMap { /** * Gets the value from an entry . Returns { @ code null } if the entry is invalid , * partially - collected , computing , or expired . Unlike { @ link Segment # getLiveValue } this method * does not attempt to clean up stale entries . */ V getLiveValue ( ReferenceEntry < K , V > entry ) { } }
if ( entry . getKey ( ) == null ) { return null ; } V value = entry . getValueReference ( ) . get ( ) ; if ( value == null ) { return null ; } if ( expires ( ) && isExpired ( entry ) ) { return null ; } return value ;
public class CmsVfsTabHandler { /** * This method is called when a folder is selected or deselected in the VFS tab . < p > * @ param folder the folder which is selected or deselected * @ param selected true if the folder has been selected , false if it has been deselected */ public void onSelectFolder ( String folder , boolean selected ) { } }
if ( selected ) { m_controller . addFolder ( folder ) ; } else { m_controller . removeFolder ( folder ) ; }
public class Marc { /** * Run XML stream parser over an XML input stream , with an XML event consumer . * @ param consumer the XML event consumer * @ throws XMLStreamException if parsing fails */ public void parseEvents ( MarcXchangeEventConsumer consumer ) throws XMLStreamException { } }
Objects . requireNonNull ( consumer ) ; if ( builder . getMarcListeners ( ) != null ) { for ( Map . Entry < String , MarcListener > entry : builder . getMarcListeners ( ) . entrySet ( ) ) { consumer . setMarcListener ( entry . getKey ( ) , entry . getValue ( ) ) ; } } XMLInputFactory inputFactory = XMLInputFactory . newInstance ( ) ; XMLEventReader xmlEventReader = inputFactory . createXMLEventReader ( builder . getInputStream ( ) ) ; while ( xmlEventReader . hasNext ( ) ) { consumer . add ( xmlEventReader . nextEvent ( ) ) ; } xmlEventReader . close ( ) ;
public class UIMetricUtils { /** * Extract MetricName from ' CC @ SequenceTest4-1-1439469823 @ Merge @ 0 @ @ sys @ Emitted ' , which is ' Emitted ' */ public static String extractMetricName ( String [ ] strs ) { } }
if ( strs . length < 6 ) return null ; return strs [ strs . length - 1 ] ;
public class DefaultTaskFinder { /** * this method is not intended to be called by clients * @ since 2.12 */ @ Inject ( optional = true ) protected Pattern setEndTag ( @ Named ( AbstractMultiLineCommentProvider . END_TAG ) final String endTag ) { } }
return this . endTagPattern = Pattern . compile ( ( endTag + "\\z" ) ) ;
public class AbstractColorPickerPreference { /** * Obtains the format , which should be used to print a textual representation of the * preference ' s color , from a specific typed array . * @ param typedArray * The typed array , the color format should be obtained from , as an instance of the * class { @ link TypedArray } . The typed array may not be null */ private void obtainColorFormat ( @ NonNull final TypedArray typedArray ) { } }
int defaultValue = getContext ( ) . getResources ( ) . getInteger ( R . integer . color_picker_preference_default_color_format ) ; setColorFormat ( ColorFormat . fromValue ( typedArray . getInteger ( R . styleable . AbstractColorPickerPreference_colorFormat , defaultValue ) ) ) ;
public class PutMailboxPermissionsRequest { /** * The permissions granted to the grantee . SEND _ AS allows the grantee to send email as the owner of the mailbox ( the * grantee is not mentioned on these emails ) . SEND _ ON _ BEHALF allows the grantee to send email on behalf of the owner * of the mailbox ( the grantee is not mentioned as the physical sender of these emails ) . FULL _ ACCESS allows the * grantee full access to the mailbox , irrespective of other folder - level permissions set on the mailbox . * @ param permissionValues * The permissions granted to the grantee . SEND _ AS allows the grantee to send email as the owner of the * mailbox ( the grantee is not mentioned on these emails ) . SEND _ ON _ BEHALF allows the grantee to send email on * behalf of the owner of the mailbox ( the grantee is not mentioned as the physical sender of these emails ) . * FULL _ ACCESS allows the grantee full access to the mailbox , irrespective of other folder - level permissions * set on the mailbox . * @ see PermissionType */ public void setPermissionValues ( java . util . Collection < String > permissionValues ) { } }
if ( permissionValues == null ) { this . permissionValues = null ; return ; } this . permissionValues = new java . util . ArrayList < String > ( permissionValues ) ;
public class MetaPurge { /** * Returns a scanner to run over the UID table starting at the given row * @ return A scanner configured for the entire table * @ throws HBaseException if something goes boom */ private Scanner getScanner ( final byte [ ] table ) throws HBaseException { } }
short metric_width = TSDB . metrics_width ( ) ; final byte [ ] start_row = Arrays . copyOfRange ( Bytes . fromLong ( start_id ) , 8 - metric_width , 8 ) ; final byte [ ] end_row = Arrays . copyOfRange ( Bytes . fromLong ( end_id ) , 8 - metric_width , 8 ) ; final Scanner scanner = tsdb . getClient ( ) . newScanner ( table ) ; scanner . setStartKey ( start_row ) ; scanner . setStopKey ( end_row ) ; scanner . setFamily ( NAME_FAMILY ) ; return scanner ;
public class HttpPipelineCallContext { /** * Stores a key - value data in the context . * @ param key the key * @ param value the value */ public void setData ( String key , Object value ) { } }
this . data = this . data . addData ( key , value ) ;
public class ServletSupport { /** * Retrieves a value from a cookie * @ param request * @ param key * @ return */ public static String getCookieValue ( ServletRequest request , String key ) { } }
Cookie [ ] cookies = ( ( HttpServletRequest ) request ) . getCookies ( ) ; if ( cookies != null ) { for ( int i = 0 ; i < cookies . length ; i ++ ) { if ( cookies [ i ] . getName ( ) . equals ( key ) ) { return cookies [ i ] . getValue ( ) ; } } } return null ;
public class CmsVfsTab { /** * Un - checks the check boxes for each folder passed in the < code > folders < / code > parameter . < p > * @ param folders the folders for which the check boxes should be unchecked */ public void uncheckFolders ( Collection < String > folders ) { } }
for ( String folder : folders ) { CmsLazyTreeItem item = m_itemsByPath . get ( folder ) ; if ( ( item != null ) && ( item . getCheckBox ( ) != null ) ) { item . getCheckBox ( ) . setChecked ( false ) ; } }
public class CPDefinitionPersistenceImpl { /** * Caches the cp definition in the entity cache if it is enabled . * @ param cpDefinition the cp definition */ @ Override public void cacheResult ( CPDefinition cpDefinition ) { } }
entityCache . putResult ( CPDefinitionModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionImpl . class , cpDefinition . getPrimaryKey ( ) , cpDefinition ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { cpDefinition . getUuid ( ) , cpDefinition . getGroupId ( ) } , cpDefinition ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_C_ERC , new Object [ ] { cpDefinition . getCompanyId ( ) , cpDefinition . getExternalReferenceCode ( ) } , cpDefinition ) ; cpDefinition . resetOriginalValues ( ) ;
public class ModelsImpl { /** * Updates the composite entity extractor . * @ param appId The application ID . * @ param versionId The version ID . * @ param cEntityId The composite entity extractor ID . * @ param compositeModelUpdateObject A model object containing the new entity extractor name and children . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatus object if successful . */ public OperationStatus updateCompositeEntity ( UUID appId , String versionId , UUID cEntityId , CompositeEntityModel compositeModelUpdateObject ) { } }
return updateCompositeEntityWithServiceResponseAsync ( appId , versionId , cEntityId , compositeModelUpdateObject ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Projections { /** * Create a Bean populating projection for the given type and bindings * @ param < T > type of projection * @ param type type of the projection * @ param bindings property bindings * @ return factory expression */ public static < T > QBean < T > bean ( Path < ? extends T > type , Map < String , ? extends Expression < ? > > bindings ) { } }
return new QBean < T > ( type . getType ( ) , bindings ) ;
public class CarbonModule { /** * Get the ModuleData element ( if present ) * @ return */ public CarbonModuleData getModuleData ( ) { } }
final Element e = element . getChild ( MODULE_DATA_ELEMENT ) ; if ( e != null ) return new CarbonModuleData ( e ) ; else return null ;
public class QueryChemObject { /** * This should be triggered by an method that changes the content of an object * to that the registered listeners can react to it . */ @ Override public void notifyChanged ( ) { } }
if ( getNotification ( ) && getListenerCount ( ) > 0 ) { List < IChemObjectListener > listeners = lazyChemObjectListeners ( ) ; for ( Object listener : listeners ) { ( ( IChemObjectListener ) listener ) . stateChanged ( new QueryChemObjectChangeEvent ( this ) ) ; } }
public class QueryManagerImpl { /** * { @ inheritDoc } */ public Query getQuery ( Node node ) throws InvalidQueryException , RepositoryException { } }
sanityCheck ( ) ; return searchMgr . createQuery ( session , itemMgr , node ) ;
public class IntStream { /** * Returns a { @ code Stream } consisting of the results of applying the given * function to the elements of this stream . * < p > This is an intermediate operation . * @ param < R > the type result * @ param mapper the mapper function used to apply to each element * @ return the new { @ code Stream } */ @ NotNull public < R > Stream < R > mapToObj ( @ NotNull final IntFunction < ? extends R > mapper ) { } }
return new Stream < R > ( params , new IntMapToObj < R > ( iterator , mapper ) ) ;
public class JobQueueDetailMarshaller { /** * Marshall the given parameter object . */ public void marshall ( JobQueueDetail jobQueueDetail , ProtocolMarshaller protocolMarshaller ) { } }
if ( jobQueueDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( jobQueueDetail . getJobQueueName ( ) , JOBQUEUENAME_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getJobQueueArn ( ) , JOBQUEUEARN_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getState ( ) , STATE_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getStatusReason ( ) , STATUSREASON_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getPriority ( ) , PRIORITY_BINDING ) ; protocolMarshaller . marshall ( jobQueueDetail . getComputeEnvironmentOrder ( ) , COMPUTEENVIRONMENTORDER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractIndexWriter { /** * Get the index label for navigation bar . * @ return a content tree for the tree label */ @ Override protected Content getNavLinkIndex ( ) { } }
Content li = HtmlTree . LI ( HtmlStyle . navBarCell1Rev , contents . indexLabel ) ; return li ;
public class SpotFleetLaunchSpecification { /** * The tags to apply during creation . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTagSpecifications ( java . util . Collection ) } or { @ link # withTagSpecifications ( java . util . Collection ) } if * you want to override the existing values . * @ param tagSpecifications * The tags to apply during creation . * @ return Returns a reference to this object so that method calls can be chained together . */ public SpotFleetLaunchSpecification withTagSpecifications ( SpotFleetTagSpecification ... tagSpecifications ) { } }
if ( this . tagSpecifications == null ) { setTagSpecifications ( new com . amazonaws . internal . SdkInternalList < SpotFleetTagSpecification > ( tagSpecifications . length ) ) ; } for ( SpotFleetTagSpecification ele : tagSpecifications ) { this . tagSpecifications . add ( ele ) ; } return this ;
public class FileWatcher { /** * Start watching file path and notify watcher for updates on that file . * @ param file The file path to watch . * @ param watcher The watcher to be notified . */ @ Deprecated public void addWatcher ( Path file , Watcher watcher ) { } }
addWatcher ( file , ( Listener ) watcher ) ;
public class AbstractDrawerItem { /** * This method returns the ViewHolder for our item , using the provided View . * @ param parent * @ return the ViewHolder for this Item */ @ Override public VH getViewHolder ( ViewGroup parent ) { } }
return getViewHolder ( LayoutInflater . from ( parent . getContext ( ) ) . inflate ( getLayoutRes ( ) , parent , false ) ) ;
public class JFAPCommunicator { /** * Actually performs the handshake . * @ param serverMode Set to true if this is a handshake from ME - ME , otherwise false . * @ param usageType indicates how the conversation over which the handshake will be performed is to be used . * @ throws SIConnectionLostException if an error occurs . * @ throws SIConnectionDroppedException if the connection is knackered . */ protected void initiateCommsHandshakingImpl ( final boolean serverMode , final ConversationUsageType usageType ) throws SIConnectionLostException , SIConnectionDroppedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "initiateCommsHandshakingImpl" , new Object [ ] { Boolean . valueOf ( serverMode ) , usageType } ) ; // Get the conversation state and the Handshake group object from it // This group will hold all the information that is negotiated . final CATHandshakeProperties handshakeProperties = new CATHandshakeProperties ( ) ; // Get the connection info so we can return that in any NLS messages String connectionInfo = "Unknown" ; CommsConnection cConn = getCommsConnection ( ) ; if ( cConn != null ) connectionInfo = cConn . getConnectionInfo ( ) ; // At this point we can work out the client capabilities short capabilities = getClientCapabilities ( ) ; // Check the heartbeats are not being overriden by the SIB properties file short heartBeatInterval = ( short ) CommsUtils . getRuntimeIntProperty ( CommsConstants . HEARTBEAT_INTERVAL_KEY , "" + getConversation ( ) . getHeartbeatInterval ( ) ) ; short heartBeatTimeout = ( short ) CommsUtils . getRuntimeIntProperty ( CommsConstants . HEARTBEAT_TIMEOUT_KEY , "" + getConversation ( ) . getHeartbeatTimeout ( ) ) ; // Get the current FAP level , overriding from SIB properties if required short currentFapLevel = ( short ) CommsUtils . getRuntimeIntProperty ( CommsConstants . CLIENT_FAP_LEVEL_KEY , "" + CommsConstants . CURRENT_FAP_VERSION ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Client is FAP Level: " + currentFapLevel ) ; // Build Handshaking Message consisting of : // Handshake Type 0x01 Client // 0x02 ME - ME // Then follows named fields which have a BIT16 ID , BIT16 Length and then data . // Each field requires 4 + length of storage . Not all fields are sent up as part // of the client / server handshake . // Storage | Details // 6 | Product Version id = 0x0001 // 6 | FAP Level id = 0x0002 // 12 | MaximumMessageSize id = 0x0003 // 8 | MaxumumTXSize id = 0x0004 // 6 | HeartbeatInterval id = 0x0005 // 6 | HeartbeatTimeout id = 0x000D ( FAP 3 and higher only ) // 6 | Capabilities id = 0x0007 0x0001 Transactions are supported // | 0x0002 Reliable messages supported // | 0x0004 Assured messages supported // | 0x00 // | 0xFFF8 Reserved // 6 | Product Id id = 0x000B 0x0001 Jetstream // | 0x0002 WebSphere MQ // | 0x0002 . NET // 36 | Supported FAPS id = 0x000C 32 byte bit map with a 1 bit dentoting a // | supported level of the FAP . Big - endian with // | LSB = FAP level 0 . This is only present on FAP // | level 3 and higher . // 4 | Usage Type id = 0x000E Optional field which indicates how the conversation which initiated this handshake is to be used . // 4 + len | Cell Name id = 0x000F The name of the WAS cell associated with this process if it is running in a server environment . Not sent if null . // 4 + len | Node Name id = 0x0010 The name of the WAS node associated with this process if it is running in a server environment . Not sent if null . // 4 + len | Server Name id = 0x0011 The name of the WAS server associated with this process if it is running in a server environment . Not sent if null . // 4 + len | Cluster Name id = 0x0012 The name of the WAS cluster associated with this process if it is running in a server environment . Not sent if null . CommsByteBuffer hBuf = getCommsByteBuffer ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "----- Sending the following handshake data ------" ) ; // Handshake type if ( serverMode ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Handshake type : ME" ) ; hBuf . put ( CommsConstants . HANDSHAKE_ME ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Handshake type : CLIENT" ) ; hBuf . put ( CommsConstants . HANDSHAKE_CLIENT ) ; } // Product Version byte productMajor = ( byte ) SIMPConstants . API_MAJOR_VERSION ; byte productMinor = ( byte ) SIMPConstants . API_MINOR_VERSION ; hBuf . putShort ( CommsConstants . FIELDID_PRODUCT_VERSION ) ; hBuf . putShort ( 2 ) ; hBuf . put ( productMajor ) ; hBuf . put ( productMinor ) ; handshakeProperties . setMajorVersion ( productMajor ) ; handshakeProperties . setMinorVersion ( productMinor ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Product Version : " + productMajor + "." + productMinor ) ; // FAP version hBuf . putShort ( CommsConstants . FIELDID_FAP_LEVEL ) ; hBuf . putShort ( 2 ) ; hBuf . putShort ( currentFapLevel ) ; handshakeProperties . setFapLevel ( currentFapLevel ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " FAP Version : " + currentFapLevel ) ; // Max Message Size hBuf . putShort ( CommsConstants . FIELDID_MAX_MESSAGE_SIZE ) ; hBuf . putShort ( 8 ) ; hBuf . putLong ( CommsConstants . MAX_MESSAGE_SIZE ) ; handshakeProperties . setMaxMessageSize ( CommsConstants . MAX_MESSAGE_SIZE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Max Msg Size : " + CommsConstants . MAX_MESSAGE_SIZE ) ; // Max transmission size hBuf . putShort ( CommsConstants . FIELDID_MAX_TRANSMISSION_SIZE ) ; hBuf . putShort ( 4 ) ; hBuf . putInt ( CommsConstants . MAX_TRANSMISSION_SIZE ) ; handshakeProperties . setMaxTransmissionSize ( CommsConstants . MAX_TRANSMISSION_SIZE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Max Tx Size : " + CommsConstants . MAX_TRANSMISSION_SIZE ) ; // Heartbeat Interval hBuf . putShort ( CommsConstants . FIELDID_HEARTBEAT_INTERVAL ) ; hBuf . putShort ( 2 ) ; hBuf . putShort ( heartBeatInterval ) ; handshakeProperties . setHeartbeatInterval ( heartBeatInterval ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Heartbeat Interval: " + heartBeatInterval ) ; // Heartbeat timeout - only FAP 4 and above if ( currentFapLevel >= JFapChannelConstants . FAP_VERSION_5 ) { hBuf . putShort ( CommsConstants . FIELDID_HEARTBEAT_TIMEOUT ) ; hBuf . putShort ( 2 ) ; hBuf . putShort ( heartBeatTimeout ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Heartbeat Timeout : " + heartBeatTimeout ) ; } // Always set it in the handshake properties even if we didn ' t negotiate it handshakeProperties . setHeartbeatTimeout ( heartBeatTimeout ) ; // Put the capabilities hBuf . putShort ( CommsConstants . FIELDID_CAPABILITIES ) ; hBuf . putShort ( 2 ) ; hBuf . putShort ( capabilities ) ; handshakeProperties . setCapabilites ( capabilities ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Capabilities : 0x" + Integer . toHexString ( capabilities ) ) ; // Product Id hBuf . putShort ( CommsConstants . FIELDID_PRODUCT_ID ) ; hBuf . putShort ( 2 ) ; hBuf . putShort ( CommsConstants . PRODUCT_ID_JETSTREAM ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Product Id : 0x" + Integer . toHexString ( CommsConstants . PRODUCT_ID_JETSTREAM ) ) ; // Supported FAP versions . This is a bitmap of the supported FAP levels // with a 1 bit indicating support . Big - endian ( Java ) byte ordering is // used and the LSB corresponds to FAP version 1. if ( currentFapLevel >= JFapChannelConstants . FAP_VERSION_3 ) { byte [ ] fapBitmap = new byte [ 32 ] ; StringBuffer supportedFapLevels = null ; for ( int i = 255 ; i >= 0 ; -- i ) { // Determine if the bit is on or off boolean bitIsOn = CommsConstants . isFapLevelSupported ( i + 1 ) ; if ( bitIsOn ) { // Find the index into the byte array bitmap where the bit // is located . int byteOffset = ( 255 - i ) / 8 ; // Find the bit within this byte . int bitOffset = i % 8 ; // Work out the bit we want to set byte bitSet = ( byte ) ( 0x01 << bitOffset ) ; // Add this bit into the array fapBitmap [ byteOffset ] |= bitSet ; // Update the text string buffer if ( supportedFapLevels == null ) { supportedFapLevels = new StringBuffer ( ) ; supportedFapLevels . append ( i + 1 ) ; } else { supportedFapLevels . append ( ", " ) ; supportedFapLevels . append ( i + 1 ) ; } } } hBuf . putShort ( CommsConstants . FIELDID_SUPORTED_FAPS ) ; hBuf . putShort ( fapBitmap . length ) ; hBuf . put ( fapBitmap ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Supported FAP's : " + supportedFapLevels ) ; } // This is isn ' t a ' basic ' JFAP handshake then indicate that this is the case . if ( usageType != ConversationUsageType . JFAP ) { hBuf . putShort ( CommsConstants . FIELDID_CONVERSATION_USAGE_TYPE ) ; hBuf . putShort ( 4 ) ; usageType . serialize ( hBuf ) ; } // If the cell name is not null or too big , send it . if ( cellName != null && cellNameLengthInBuffer <= Short . MAX_VALUE ) { hBuf . putShort ( CommsConstants . FIELDID_CELL_NAME ) ; hBuf . putShort ( cellNameLengthInBuffer ) ; hBuf . putString ( cellName ) ; } // If the node name is not null or too big , send it . if ( nodeName != null && nodeNameLengthInBuffer <= Short . MAX_VALUE ) { hBuf . putShort ( CommsConstants . FIELDID_NODE_NAME ) ; hBuf . putShort ( nodeNameLengthInBuffer ) ; hBuf . putString ( nodeName ) ; } // If the server name is not null or too big , send it . if ( serverName != null && serverNameLengthInBuffer <= Short . MAX_VALUE ) { hBuf . putShort ( CommsConstants . FIELDID_SERVER_NAME ) ; hBuf . putShort ( serverNameLengthInBuffer ) ; hBuf . putString ( serverName ) ; } // If the cluster name is not null or too big , send it . if ( clusterName != null && clusterNameLengthInBuffer <= Short . MAX_VALUE ) { hBuf . putShort ( CommsConstants . FIELDID_CLUSTER_NAME ) ; hBuf . putShort ( clusterNameLengthInBuffer ) ; hBuf . putString ( clusterName ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "----- End of handshake data ---------------------" ) ; // Pass on call to server CommsByteBuffer buf = jfapExchange ( hBuf , JFapChannelConstants . SEG_HANDSHAKE , JFapChannelConstants . PRIORITY_MEDIUM , true ) ; try { short err = buf . getCommandCompletionCode ( JFapChannelConstants . SEG_HANDSHAKE ) ; if ( err != CommsConstants . SI_NO_EXCEPTION ) { checkFor_SIConnectionDroppedException ( buf , err ) ; checkFor_SIConnectionLostException ( buf , err ) ; defaultChecker ( buf , err ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "----- Received the following handshake data -----" ) ; // First get the connection type byte connectionType = buf . get ( ) ; if ( ( serverMode && connectionType != CommsConstants . HANDSHAKE_ME ) || ( ! serverMode && connectionType != CommsConstants . HANDSHAKE_CLIENT ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Unexpected connection type returned!" , Byte . toString ( connectionType ) ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) // d192293 ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { if ( serverMode ) SibTr . debug ( this , tc , " Handshake type : ME" ) ; else SibTr . debug ( this , tc , " Handshake type : CLIENT" ) ; } // Now loop round through the data and get any overriden values . // Note that we could get nothing back here which would indicate that // the server is happy with our proposed values . while ( buf . hasRemaining ( ) ) { short fieldId = buf . getShort ( ) ; switch ( fieldId ) { case CommsConstants . FIELDID_PRODUCT_VERSION : short productVersionFieldLength = buf . getShort ( ) ; if ( productVersionFieldLength != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid product version length: " + productVersionFieldLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } byte upperProductVersion = buf . get ( ) ; byte lowerProductVersion = buf . get ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Product Version : " + upperProductVersion + "." + lowerProductVersion ) ; // If the server replies saying that it has a higher major version , // we need to save it away as in future version we may need to // know that we are talking to a different server version handshakeProperties . setMajorVersion ( upperProductVersion ) ; handshakeProperties . setMinorVersion ( lowerProductVersion ) ; break ; case CommsConstants . FIELDID_FAP_LEVEL : short fapLevelFieldLength = buf . getShort ( ) ; if ( fapLevelFieldLength != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid FAP Field length: " + fapLevelFieldLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } short fapLevel = buf . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " FAP Version : " + fapLevel ) ; // If the server replies with a higher FAP level , we cannot continue . Note this // should not happen unless the server malfunctions . if ( fapLevel > currentFapLevel ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "FAP level returned (" + fapLevel + ") is greater than the requested FAP level (" + currentFapLevel + ")" ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) // d192293 ) ; } // Is the FAP level in the supported table of FAP levels ? if ( ! CommsConstants . isFapLevelSupported ( fapLevel ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "FAP level returned (" + fapLevel + ") is not supported" ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) // d192293 ) ; } handshakeProperties . setFapLevel ( fapLevel ) ; break ; case CommsConstants . FIELDID_MAX_MESSAGE_SIZE : short maxMessageFieldLength = buf . getShort ( ) ; if ( maxMessageFieldLength != 8 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Max Message Field length: " + maxMessageFieldLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } long maxMessageSize = buf . getLong ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Max Msg Size : " + maxMessageSize ) ; handshakeProperties . setMaxMessageSize ( Math . min ( maxMessageSize , CommsConstants . MAX_MESSAGE_SIZE ) ) ; break ; case CommsConstants . FIELDID_MAX_TRANSMISSION_SIZE : short maxTransmissionLength = buf . getShort ( ) ; if ( maxTransmissionLength != 4 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Max Transmission Field length: " + maxTransmissionLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } int maxTransmissionSize = buf . getInt ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Max Tx Size : " + maxTransmissionSize ) ; handshakeProperties . setMaxTransmissionSize ( Math . min ( maxTransmissionSize , CommsConstants . MAX_TRANSMISSION_SIZE ) ) ; break ; case CommsConstants . FIELDID_HEARTBEAT_INTERVAL : short heartbeatFieldLength = buf . getShort ( ) ; if ( heartbeatFieldLength != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Heartbeat Interval Field length: " + heartbeatFieldLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } short heartbeatInterval = buf . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Heartbeat Interval: " + heartbeatInterval ) ; handshakeProperties . setHeartbeatInterval ( heartbeatInterval ) ; break ; case CommsConstants . FIELDID_HEARTBEAT_TIMEOUT : short heartbeatTimeoutFieldLength = buf . getShort ( ) ; if ( heartbeatTimeoutFieldLength != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Heartbeat Timeout Field length: " + heartbeatTimeoutFieldLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } short heartbeatTimeout = buf . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Heartbeat Timeout : " + heartbeatTimeout ) ; handshakeProperties . setHeartbeatTimeout ( heartbeatTimeout ) ; break ; case CommsConstants . FIELDID_CAPABILITIES : short capabilityInterval = buf . getShort ( ) ; if ( capabilityInterval != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Capability Field length: " + capabilityInterval ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } short capability = buf . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Capabilities : " + capability ) ; handshakeProperties . setCapabilites ( capability ) ; break ; case CommsConstants . FIELDID_PRODUCT_ID : short productIdLength = buf . getShort ( ) ; if ( productIdLength != 2 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Invalid Product Id Field length: " + productIdLength ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNABLE_TO_NEGOTIATE_CONNECTION_SICO1023" , new Object [ ] { connectionInfo } , null ) ) ; } short productId = buf . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " Server Product Id : " + productId ) ; handshakeProperties . setPeerProductId ( productId ) ; break ; default : if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , " ** Unknown Parameter received: **" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Field Id: " + fieldId ) ; // We can throw this here as we have told the server what FAP version we // are , and therefore it should not negotiate with us any values that we // do not know about . SIConnectionLostException e = new SIConnectionLostException ( nls . getFormattedMessage ( "INVALID_PROP_SICO8009" , new Object [ ] { "" + fieldId } , null ) ) ; FFDCFilter . processException ( e , CLASS_NAME + ".initiateHandshaking" , CommsConstants . JFAPCOMMUNICATOR_INITIATEHANDSHAKING_03 , this ) ; throw e ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "----- End of handshake data ---------------------" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Handshake properties:" , handshakeProperties ) ; // Now we have completed handshaking , set the heartbeating values to whatever were // agreed upon getConversation ( ) . setHeartbeatInterval ( handshakeProperties . getHeartbeatInterval ( ) ) ; getConversation ( ) . setHeartbeatTimeout ( handshakeProperties . getHeartbeatTimeout ( ) ) ; } catch ( SIException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".initiateHandshaking" , CommsConstants . JFAPCOMMUNICATOR_INITIATEHANDSHAKING_02 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Caught an SIException" , e ) ; throw new SIConnectionLostException ( nls . getFormattedMessage ( "UNKNOWN_CORE_EXCP_SICO8002" , new Object [ ] { e } , null ) , e ) ; } finally { // Release pooled objects if ( buf != null ) buf . release ( ) ; } // Only set handshake properties if the usageType requires it . if ( usageType . requiresNormalHandshakeProcessing ( ) ) { getConversation ( ) . setHandshakeProperties ( handshakeProperties ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "initiateCommsHandshakingImpl" ) ;
public class IPv6AddressPool { /** * Private helper method to perform the allocation of a subnet within one of the free ranges . * @ param toAllocate subnet to allocate * @ param rangeToAllocateFrom free range to allocate from * @ return resulting pool */ private IPv6AddressPool doAllocate ( final IPv6Network toAllocate , final IPv6AddressRange rangeToAllocateFrom ) { } }
assert freeRanges . contains ( rangeToAllocateFrom ) ; assert rangeToAllocateFrom . contains ( toAllocate ) ; final TreeSet < IPv6AddressRange > newFreeRanges = new TreeSet < IPv6AddressRange > ( this . freeRanges ) ; // remove range from free ranges newFreeRanges . remove ( rangeToAllocateFrom ) ; // from the range , remove the allocated subnet final List < IPv6AddressRange > newRanges = rangeToAllocateFrom . remove ( toAllocate ) ; // and add the resulting ranges as new free ranges newFreeRanges . addAll ( newRanges ) ; return new IPv6AddressPool ( underlyingRange , allocationSubnetSize , newFreeRanges , toAllocate ) ;