signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JmsJcaActivationSpecImpl { /** * Set the AutoStopSequentialMessageFailure property
* @ param autoStopSequentialMessageFailure The maximum number of failed messages before stopping the MDB */
@ Override public void setAutoStopSequentialMessageFailure ( final String autoStopSequentialMessageFailure ) { } } | _autoStopSequentialMessageFailure = ( autoStopSequentialMessageFailure == null ? null : Integer . valueOf ( autoStopSequentialMessageFailure ) ) ; |
public class PropertiesManagers { /** * Create a new , default executor for use in a new { @ link PropertiesManager }
* . This executor uses cached , daemon threads so it will expand as
* necessary , but it will not block JVM shutdown . The executor will be
* automatically shutdown on JVM exit .
* @ return the newly created executor */
public static ExecutorService createExecutor ( ) { } } | ExecutorService executor = Executors . newCachedThreadPool ( new DaemonThreadFactory ( ) ) ; AUTO_GENERATED_EXECUTORS . add ( executor ) ; return executor ; |
public class IoUtils { /** * Copies all data from an InputStream to an OutputStream .
* @ return the number of bytes copied
* @ throws IOException if an I / O error occurs */
public static long copy ( InputStream input , OutputStream output ) throws IOException { } } | byte [ ] buffer = new byte [ 2 * 1024 ] ; long total = 0 ; int count ; while ( ( count = input . read ( buffer ) ) != - 1 ) { output . write ( buffer , 0 , count ) ; total += count ; } return total ; |
public class ParallelOracleBuilders { /** * Convenience method for { @ link # newStaticParallelOracle ( Collection ) } .
* @ param firstOracle
* the first ( mandatory ) oracle
* @ param otherOracles
* further ( optional ) oracles to be used by other threads
* @ param < I >
* input symbol type
* @ param < D >
* output domain type
* @ return a preconfigured oracle builder */
@ Nonnull @ SafeVarargs public static < I , D > StaticParallelOracleBuilder < I , D > newStaticParallelOracle ( MembershipOracle < I , D > firstOracle , MembershipOracle < I , D > ... otherOracles ) { } } | return newStaticParallelOracle ( Lists . asList ( firstOracle , otherOracles ) ) ; |
public class DListImpl { /** * Inserts the specified element at the specified position in this list
* ( optional operation ) . Shifts the element currently at that position
* ( if any ) and any subsequent elements to the right ( adds one to their
* indices ) .
* @ param index index at which the specified element is to be inserted .
* @ param element element to be inserted .
* @ throws UnsupportedOperationException if the < tt > add < / tt > method is not
* supported by this list .
* @ throws ClassCastException if the class of the specified element
* prevents it from being added to this list .
* @ throws IllegalArgumentException if some aspect of the specified
* element prevents it from being added to this list .
* @ throws IndexOutOfBoundsException if the index is out of range
* ( index & lt ; 0 | | index & gt ; size ( ) ) . */
public void add ( int index , Object element ) { } } | DListEntry entry = prepareEntry ( element ) ; elements . add ( index , entry ) ; // if we are in a transaction : acquire locks !
TransactionImpl tx = getTransaction ( ) ; if ( checkForOpenTransaction ( tx ) ) { RuntimeObject rt = new RuntimeObject ( this , tx ) ; List regList = tx . getRegistrationList ( ) ; tx . lockAndRegister ( rt , Transaction . WRITE , false , regList ) ; rt = new RuntimeObject ( element , tx ) ; tx . lockAndRegister ( rt , Transaction . READ , regList ) ; rt = new RuntimeObject ( entry , tx , true ) ; tx . lockAndRegister ( rt , Transaction . WRITE , false , regList ) ; } // changing the position markers of entries :
int offset = 0 ; try { offset = ( ( DListEntry ) elements . get ( index - 1 ) ) . getPosition ( ) ; } catch ( Exception ignored ) { } for ( int i = offset ; i < elements . size ( ) ; i ++ ) { entry = ( DListEntry ) elements . get ( i ) ; entry . setPosition ( i ) ; } |
public class ApiOvhHorizonView { /** * Add a domain user to add your desktop in your Active Directory
* REST : POST / horizonView / { serviceName } / domainTrust / { domainTrustId } / addDomainUserOnComposer
* @ param password [ required ] Password of the user
* @ param domain [ required ] Name of your Domain ( example : domain . local )
* @ param username [ required ] Name of the User who is going to add the Desktop in your Active Directory
* @ param serviceName [ required ] Domain of the service
* @ param domainTrustId [ required ] Domain trust id */
public OvhTask serviceName_domainTrust_domainTrustId_addDomainUserOnComposer_POST ( String serviceName , Long domainTrustId , String domain , String password , String username ) throws IOException { } } | String qPath = "/horizonView/{serviceName}/domainTrust/{domainTrustId}/addDomainUserOnComposer" ; StringBuilder sb = path ( qPath , serviceName , domainTrustId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "domain" , domain ) ; addBody ( o , "password" , password ) ; addBody ( o , "username" , username ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ; |
public class XDBHandler { /** * Receive notification of character data inside an element . */
@ Override public void characters ( char ch [ ] , int start , int len ) { } } | while ( len > 0 && Character . isWhitespace ( ch [ start ] ) ) { ++ start ; -- len ; } while ( len > 0 && Character . isWhitespace ( ch [ start + len - 1 ] ) ) { -- len ; } if ( _text . length ( ) > 0 ) { _text . append ( ' ' ) ; } _text . append ( ch , start , len ) ; |
public class MjdbcPoolBinder { /** * Returns new Pooled { @ link DataSource } implementation
* In case this function won ' t work - use { @ link # createDataSource ( java . util . Properties ) }
* @ param url Database connection url
* @ param userName Database user name
* @ param password Database user password
* @ return new Pooled { @ link DataSource } implementation
* @ throws SQLException */
public static DataSource createDataSource ( String url , String userName , String password ) throws SQLException { } } | assertNotNull ( url ) ; assertNotNull ( userName ) ; assertNotNull ( password ) ; BasicDataSource ds = new BasicDataSource ( ) ; ds . setUrl ( url ) ; ds . setUsername ( userName ) ; ds . setPassword ( password ) ; return ds ; |
public class MyZipUtils { /** * Compress a directory into a zip file
* @ param dir Directory
* @ param zipFile ZIP file to create
* @ throws IOException I / O Error */
public static void compress ( File dir , File zipFile ) throws IOException { } } | FileOutputStream fos = new FileOutputStream ( zipFile ) ; ZipOutputStream zos = new ZipOutputStream ( fos ) ; recursiveAddZip ( dir , zos , dir ) ; zos . finish ( ) ; zos . close ( ) ; |
public class Instrumented { /** * Get a { @ link org . apache . gobblin . metrics . MetricContext } to be used by an object needing instrumentation .
* This method will read the property " metrics . context . name " from the input State , and will attempt
* to find a MetricContext with that name in the global instance of { @ link org . apache . gobblin . metrics . GobblinMetricsRegistry } .
* If it succeeds , the generated MetricContext will be a child of the retrieved Context , otherwise it will
* be a parent - less context .
* The method will automatically add two tags to the context :
* < ul >
* < li > construct will contain the name of the { @ link org . apache . gobblin . Constructs } that klazz represents . < / li >
* < li > class will contain the canonical name of the input class . < / li >
* < / ul >
* @ param state { @ link org . apache . gobblin . configuration . State } used to find the parent MetricContext .
* @ param klazz Class of the object needing instrumentation .
* @ param tags Additional tags to add to the returned context .
* @ return A { @ link org . apache . gobblin . metrics . MetricContext } with the appropriate tags and parent . */
public static MetricContext getMetricContext ( State state , Class < ? > klazz , List < Tag < ? > > tags ) { } } | int randomId = RAND . nextInt ( Integer . MAX_VALUE ) ; List < Tag < ? > > generatedTags = Lists . newArrayList ( ) ; Constructs construct = null ; if ( Converter . class . isAssignableFrom ( klazz ) ) { construct = Constructs . CONVERTER ; } else if ( ForkOperator . class . isAssignableFrom ( klazz ) ) { construct = Constructs . FORK_OPERATOR ; } else if ( RowLevelPolicy . class . isAssignableFrom ( klazz ) ) { construct = Constructs . ROW_QUALITY_CHECKER ; } else if ( Extractor . class . isAssignableFrom ( klazz ) ) { construct = Constructs . EXTRACTOR ; } else if ( DataWriter . class . isAssignableFrom ( klazz ) ) { construct = Constructs . WRITER ; } if ( construct != null ) { generatedTags . add ( new Tag < > ( GobblinMetricsKeys . CONSTRUCT_META , construct . toString ( ) ) ) ; } if ( ! klazz . isAnonymousClass ( ) ) { generatedTags . add ( new Tag < > ( GobblinMetricsKeys . CLASS_META , klazz . getCanonicalName ( ) ) ) ; } Optional < GobblinMetrics > gobblinMetrics = state . contains ( METRIC_CONTEXT_NAME_KEY ) ? GobblinMetricsRegistry . getInstance ( ) . get ( state . getProp ( METRIC_CONTEXT_NAME_KEY ) ) : Optional . < GobblinMetrics > absent ( ) ; MetricContext . Builder builder = gobblinMetrics . isPresent ( ) ? gobblinMetrics . get ( ) . getMetricContext ( ) . childBuilder ( klazz . getCanonicalName ( ) + "." + randomId ) : MetricContext . builder ( klazz . getCanonicalName ( ) + "." + randomId ) ; return builder . addTags ( generatedTags ) . addTags ( tags ) . build ( ) ; |
public class appfwfieldtype { /** * Use this API to add appfwfieldtype resources . */
public static base_responses add ( nitro_service client , appfwfieldtype resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { appfwfieldtype addresources [ ] = new appfwfieldtype [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new appfwfieldtype ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . regex = resources [ i ] . regex ; addresources [ i ] . priority = resources [ i ] . priority ; addresources [ i ] . comment = resources [ i ] . comment ; } result = add_bulk_request ( client , addresources ) ; } return result ; |
public class RandomFloat { /** * Updates ( drifts ) a float value within specified range defined
* @ param value a float value to drift .
* @ param range ( optional ) a range . Default : 10 % of the value
* @ return updated random float value . */
public static float updateFloat ( float value , float range ) { } } | range = range == 0 ? ( float ) ( 0.1 * value ) : range ; float min = value - range ; float max = value + range ; return nextFloat ( min , max ) ; |
public class Option { /** * Set the specified sub - text to the option .
* @ param subtext */
public void setSubtext ( final String subtext ) { } } | if ( subtext != null ) attrMixin . setAttribute ( SUBTEXT , subtext ) ; else attrMixin . removeAttribute ( SUBTEXT ) ; |
public class ListOTAUpdatesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListOTAUpdatesRequest listOTAUpdatesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listOTAUpdatesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listOTAUpdatesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listOTAUpdatesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listOTAUpdatesRequest . getOtaUpdateStatus ( ) , OTAUPDATESTATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GroupServiceClient { /** * Creates a new group .
* < p > Sample code :
* < pre > < code >
* try ( GroupServiceClient groupServiceClient = GroupServiceClient . create ( ) ) {
* ProjectName name = ProjectName . of ( " [ PROJECT ] " ) ;
* Group group = Group . newBuilder ( ) . build ( ) ;
* Group response = groupServiceClient . createGroup ( name . toString ( ) , group ) ;
* < / code > < / pre >
* @ param name The project in which to create the group . The format is
* ` " projects / { project _ id _ or _ number } " ` .
* @ param group A group definition . It is an error to define the ` name ` field because the system
* assigns the name .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Group createGroup ( String name , Group group ) { } } | CreateGroupRequest request = CreateGroupRequest . newBuilder ( ) . setName ( name ) . setGroup ( group ) . build ( ) ; return createGroup ( request ) ; |
public class AnnotationVisitor { /** * Visit annotation on a class , field or method
* @ param annotationClass
* class of annotation
* @ param map
* map from names to values
* @ param runtimeVisible
* true if annotation is runtime visible */
public void visitAnnotation ( @ DottedClassName String annotationClass , Map < String , ElementValue > map , boolean runtimeVisible ) { } } | if ( DEBUG ) { System . out . println ( "Annotation: " + annotationClass ) ; for ( Map . Entry < String , ElementValue > e : map . entrySet ( ) ) { System . out . println ( " " + e . getKey ( ) ) ; System . out . println ( " -> " + e . getValue ( ) ) ; } } |
public class AlertPanel { /** * This method initializes treeAlert
* @ return javax . swing . JTree */
JTree getTreeAlert ( ) { } } | if ( treeAlert == null ) { treeAlert = new JTree ( ) { private static final long serialVersionUID = 1L ; @ Override public Point getPopupLocation ( final MouseEvent event ) { if ( event != null ) { // Select item on right click
TreePath tp = treeAlert . getPathForLocation ( event . getX ( ) , event . getY ( ) ) ; if ( tp != null ) { // Only select a new item if the current item is not
// already selected - this is to allow multiple items
// to be selected
if ( ! treeAlert . getSelectionModel ( ) . isPathSelected ( tp ) ) { treeAlert . getSelectionModel ( ) . setSelectionPath ( tp ) ; } } } return super . getPopupLocation ( event ) ; } } ; treeAlert . setName ( ALERT_TREE_PANEL_NAME ) ; treeAlert . setShowsRootHandles ( true ) ; treeAlert . setBorder ( javax . swing . BorderFactory . createEmptyBorder ( 0 , 0 , 0 , 0 ) ) ; treeAlert . setComponentPopupMenu ( new JPopupMenu ( ) { private static final long serialVersionUID = 1L ; @ Override public void show ( Component invoker , int x , int y ) { final int countSelectedNodes = treeAlert . getSelectionCount ( ) ; final ArrayList < HistoryReference > uniqueHistoryReferences = new ArrayList < > ( countSelectedNodes ) ; if ( countSelectedNodes > 0 ) { SortedSet < Integer > historyReferenceIdsAdded = new TreeSet < > ( ) ; for ( TreePath path : treeAlert . getSelectionPaths ( ) ) { final AlertNode node = ( AlertNode ) path . getLastPathComponent ( ) ; final Object userObject = node . getUserObject ( ) ; if ( userObject instanceof Alert ) { HistoryReference historyReference = ( ( Alert ) userObject ) . getHistoryRef ( ) ; if ( historyReference != null && ! historyReferenceIdsAdded . contains ( historyReference . getHistoryId ( ) ) ) { historyReferenceIdsAdded . add ( historyReference . getHistoryId ( ) ) ; uniqueHistoryReferences . add ( historyReference ) ; } } } uniqueHistoryReferences . trimToSize ( ) ; } SelectableHistoryReferencesContainer messageContainer = new DefaultSelectableHistoryReferencesContainer ( treeAlert . getName ( ) , treeAlert , Collections . < HistoryReference > emptyList ( ) , uniqueHistoryReferences ) ; view . getPopupMenu ( ) . show ( messageContainer , x , y ) ; } } ) ; treeAlert . addMouseListener ( new java . awt . event . MouseAdapter ( ) { @ Override public void mouseClicked ( java . awt . event . MouseEvent e ) { if ( SwingUtilities . isLeftMouseButton ( e ) && e . getClickCount ( ) > 1 ) { // Its a double click - edit the alert
editSelectedAlert ( ) ; } } } ) ; treeAlert . addTreeSelectionListener ( new javax . swing . event . TreeSelectionListener ( ) { @ Override public void valueChanged ( javax . swing . event . TreeSelectionEvent e ) { DefaultMutableTreeNode node = ( DefaultMutableTreeNode ) treeAlert . getLastSelectedPathComponent ( ) ; if ( node != null && node . getUserObject ( ) != null ) { Object obj = node . getUserObject ( ) ; if ( obj instanceof Alert ) { Alert alert = ( Alert ) obj ; setMessage ( alert . getMessage ( ) , alert . getEvidence ( ) ) ; treeAlert . requestFocusInWindow ( ) ; getAlertViewPanel ( ) . displayAlert ( alert ) ; } else { getAlertViewPanel ( ) . clearAlert ( ) ; } } else { getAlertViewPanel ( ) . clearAlert ( ) ; } } } ) ; treeAlert . setCellRenderer ( new AlertTreeCellRenderer ( ) ) ; treeAlert . setExpandsSelectedPaths ( true ) ; String deleteAlertKey = "zap.delete.alert" ; treeAlert . getInputMap ( ) . put ( view . getDefaultDeleteKeyStroke ( ) , deleteAlertKey ) ; treeAlert . getActionMap ( ) . put ( deleteAlertKey , new AbstractAction ( ) { private static final long serialVersionUID = 1L ; @ Override public void actionPerformed ( ActionEvent e ) { Set < Alert > alerts = getSelectedAlerts ( ) ; if ( alerts . size ( ) > 1 && View . getSingleton ( ) . showConfirmDialog ( Constant . messages . getString ( "scanner.delete.confirm" ) ) != JOptionPane . OK_OPTION ) { return ; } for ( Alert alert : alerts ) { extension . deleteAlert ( alert ) ; } } } ) ; } return treeAlert ; |
public class KeyManagementServiceClient { /** * Returns metadata for a given [ KeyRing ] [ google . cloud . kms . v1 . KeyRing ] .
* < p > Sample code :
* < pre > < code >
* try ( KeyManagementServiceClient keyManagementServiceClient = KeyManagementServiceClient . create ( ) ) {
* KeyRingName name = KeyRingName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ KEY _ RING ] " ) ;
* KeyRing response = keyManagementServiceClient . getKeyRing ( name . toString ( ) ) ;
* < / code > < / pre >
* @ param name The [ name ] [ google . cloud . kms . v1 . KeyRing . name ] of the
* [ KeyRing ] [ google . cloud . kms . v1 . KeyRing ] to get .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final KeyRing getKeyRing ( String name ) { } } | GetKeyRingRequest request = GetKeyRingRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getKeyRing ( request ) ; |
public class WordNet { /** * 清理from属性 */
public void clean ( ) { } } | for ( List < Vertex > vertexList : vertexes ) { for ( Vertex vertex : vertexList ) { vertex . from = null ; } } |
public class InternalSARLParser { /** * $ ANTLR start synpred53 _ InternalSARL */
public final void synpred53_InternalSARL_fragment ( ) throws RecognitionException { } } | // InternalSARL . g : 15000:6 : ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) )
// InternalSARL . g : 15000:7 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
{ // InternalSARL . g : 15000:7 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
// InternalSARL . g : 15001:7 : ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) )
{ // InternalSARL . g : 15001:7 : ( )
// InternalSARL . g : 15002:7:
{ } // InternalSARL . g : 15003:7 : ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ?
int alt406 = 2 ; int LA406_0 = input . LA ( 1 ) ; if ( ( LA406_0 == RULE_ID || ( LA406_0 >= 44 && LA406_0 <= 45 ) || ( LA406_0 >= 92 && LA406_0 <= 95 ) ) ) { alt406 = 1 ; } switch ( alt406 ) { case 1 : // InternalSARL . g : 15004:8 : ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
{ // InternalSARL . g : 15004:8 : ( ( ruleJvmFormalParameter ) )
// InternalSARL . g : 15005:9 : ( ruleJvmFormalParameter )
{ // InternalSARL . g : 15005:9 : ( ruleJvmFormalParameter )
// InternalSARL . g : 15006:10 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_134 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } // InternalSARL . g : 15009:8 : ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
loop405 : do { int alt405 = 2 ; int LA405_0 = input . LA ( 1 ) ; if ( ( LA405_0 == 32 ) ) { alt405 = 1 ; } switch ( alt405 ) { case 1 : // InternalSARL . g : 15010:9 : ' , ' ( ( ruleJvmFormalParameter ) )
{ match ( input , 32 , FOLLOW_75 ) ; if ( state . failed ) return ; // InternalSARL . g : 15011:9 : ( ( ruleJvmFormalParameter ) )
// InternalSARL . g : 15012:10 : ( ruleJvmFormalParameter )
{ // InternalSARL . g : 15012:10 : ( ruleJvmFormalParameter )
// InternalSARL . g : 15013:11 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_134 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } } break ; default : break loop405 ; } } while ( true ) ; } break ; } // InternalSARL . g : 15018:7 : ( ( ' | ' ) )
// InternalSARL . g : 15019:8 : ( ' | ' )
{ // InternalSARL . g : 15019:8 : ( ' | ' )
// InternalSARL . g : 15020:9 : ' | '
{ match ( input , 97 , FOLLOW_2 ) ; if ( state . failed ) return ; } } } } |
public class BaseTemplate { /** * This is the main method responsible for writing a tag and its attributes .
* The arguments may be :
* < ul >
* < li > a closure < / li > in which case the closure is rendered inside the tag body
* < li > a string < / li > , in which case the string is rendered as the tag body
* < li > a map of attributes < / li > in which case the attributes are rendered inside the opening tag
* < / ul >
* < p > or a combination of ( attributes , string ) , ( attributes , closure ) < / p >
* @ param tagName the name of the tag
* @ param args tag generation arguments
* @ return this template instance
* @ throws IOException */
public Object methodMissing ( String tagName , Object args ) throws IOException { } } | Object o = model . get ( tagName ) ; if ( o instanceof Closure ) { if ( args instanceof Object [ ] ) { yieldUnescaped ( ( ( Closure ) o ) . call ( ( Object [ ] ) args ) ) ; return this ; } yieldUnescaped ( ( ( Closure ) o ) . call ( args ) ) ; return this ; } else if ( args instanceof Object [ ] ) { final Writer wrt = out ; TagData tagData = new TagData ( args ) . invoke ( ) ; Object body = tagData . getBody ( ) ; writeIndent ( ) ; wrt . write ( '<' ) ; wrt . write ( tagName ) ; writeAttributes ( tagData . getAttributes ( ) ) ; if ( body != null ) { wrt . write ( '>' ) ; writeBody ( body ) ; writeIndent ( ) ; wrt . write ( "</" ) ; wrt . write ( tagName ) ; wrt . write ( '>' ) ; } else { if ( configuration . isExpandEmptyElements ( ) ) { wrt . write ( "></" ) ; wrt . write ( tagName ) ; wrt . write ( '>' ) ; } else { wrt . write ( "/>" ) ; } } } return this ; |
public class HttpFields { /** * Destroy the header .
* Help the garbage collector by null everything that we can . */
public void destroy ( ) { } } | for ( int i = _fields . size ( ) ; i -- > 0 ; ) { Field field = ( Field ) _fields . get ( i ) ; if ( field != null ) field . destroy ( ) ; } _fields = null ; _index = null ; _dateBuffer = null ; _calendar = null ; _dateReceive = null ; |
public class FieldAccessor { /** * Determines whether the field can be modified using reflection .
* @ return Whether or not the field can be modified reflectively . */
public boolean canBeModifiedReflectively ( ) { } } | if ( field . isSynthetic ( ) ) { return false ; } int modifiers = field . getModifiers ( ) ; if ( Modifier . isFinal ( modifiers ) && Modifier . isStatic ( modifiers ) ) { return false ; } return true ; |
public class DServer { public void add_obj_polling ( final DevVarLongStringArray argin , final boolean with_db_upd ) throws DevFailed { } } | Util . out4 . println ( "In add_obj_polling command" ) ; for ( final String value : argin . svalue ) { Util . out4 . println ( "Input string = " + value ) ; } for ( final int value : argin . lvalue ) { Util . out4 . println ( "Input long = " + value ) ; } // Check that parameters number is correct
if ( argin . svalue . length != 3 || argin . lvalue . length != 1 ) { Except . throw_exception ( "API_WrongNumberOfArgs" , "Incorrect number of inout arguments" , "DServer.add_obj_polling" ) ; } // Find the device
final Util tg = Util . instance ( ) ; DeviceImpl dev = null ; try { dev = tg . get_device_by_name ( argin . svalue [ 0 ] ) ; } catch ( final DevFailed e ) { Except . re_throw_exception ( e , "API_DeviceNotFound" , "Device " + argin . svalue [ 0 ] + " not found" , "DServer.add_obj_polling" ) ; } // Check that the command ( or the attribute ) exists .
// For command , also checks that it does not need input value .
final String obj_type = argin . svalue [ 1 ] . toLowerCase ( ) ; final String obj_name = argin . svalue [ 2 ] . toLowerCase ( ) ; int type = Tango_POLL_CMD ; assert dev != null ; if ( obj_type . equals ( Tango_PollCommand ) ) { dev . check_command_exists ( obj_name ) ; type = Tango_POLL_CMD ; } else if ( obj_type . equals ( Tango_PollAttribute ) ) { dev . get_device_attr ( ) . get_attr_by_name ( obj_name ) ; type = Tango_POLL_ATTR ; } else { Except . throw_exception ( "API_NotSupported" , "Object type " + obj_type + " not supported" , "DServer.add_obj_polling" ) ; } // If it ' s for the Init command , refuse to poll it
if ( type == Tango_POLL_CMD ) { if ( obj_name . equals ( "Init" ) ) { Except . throw_exception ( "API_NotSupported" , "It's not possible to poll the Init command!" , "DServer.add_obj_polling" ) ; } } // Check if the object is not already polled
final Vector poll_list = dev . get_poll_obj_list ( ) ; for ( int i = 0 ; i < poll_list . size ( ) ; i ++ ) { final PollObj poll_obj = ( PollObj ) poll_list . elementAt ( i ) ; if ( poll_obj . get_type ( ) == type ) { if ( poll_obj . get_name ( ) . equals ( obj_name ) ) { String s ; if ( type == Tango_POLL_CMD ) { s = "Command " ; } else { s = "Attribute " ; } Except . throw_exception ( "API_AlreadyPolled" , s + " " + obj_name + " already polled" , "DServer.add_obj_polling" ) ; } } } // Check that the update period is not to small
final int upd = argin . lvalue [ 0 ] ; if ( upd < Tango_MIN_POLL_PERIOD && upd != 0 ) { Except . throw_exception ( "API_NotSupported" , upd + " is below the min authorized period (100 mS)" , "DServer.add_obj_polling" ) ; } // Create a new PollObj instance for this object
poll_list . add ( new PollObj ( dev , type , obj_name , upd ) ) ; // Send command to the polling thread but wait in case of previous cmd
// still not executed
Util . out4 . println ( "Sending cmd to polling thread" ) ; final TangoMonitor mon = tg . get_poll_monitor ( ) ; final PollThCmd shared_cmd = tg . get_poll_shared_cmd ( ) ; if ( shared_cmd . cmd_pending == true ) { mon . wait_it ( ) ; } shared_cmd . cmd_pending = true ; shared_cmd . cmd_code = Tango_POLL_ADD_OBJ ; shared_cmd . dev = dev ; shared_cmd . index = poll_list . size ( ) - 1 ; mon . signal ( ) ; Util . out4 . println ( "Cmd sent to polling thread" ) ; // Wait for thread to execute command
boolean interupted ; while ( shared_cmd . cmd_pending == true ) { interupted = mon . wait_it ( Tango_DEFAULT_TIMEOUT ) ; if ( shared_cmd . cmd_pending == true && interupted == false ) { Util . out4 . println ( "TIME OUT" ) ; poll_list . remove ( poll_list . size ( ) - 1 ) ; Except . throw_exception ( "API_CommandTimedOut" , "Polling thread blocked !!!" , "DServer.add_obj_polling" ) ; } } Util . out4 . println ( "Thread cmd normally executed" ) ; // Update polling parameters in database ( if wanted and possible )
// If the property is already there ( it should not but . . . ) ,
// only update its polling period
if ( with_db_upd && Util . _UseDb ) { final String upd_str = "" + upd ; boolean found = false ; final DbDatum db_info = new DbDatum ( "polled_cmd" ) ; if ( type == Tango_POLL_CMD ) { final Vector non_auto_list = dev . get_non_auto_polled_cmd ( ) ; for ( int i = 0 ; i < non_auto_list . size ( ) ; i ++ ) { final String s = ( String ) non_auto_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { non_auto_list . remove ( i ) ; db_info . name = "non_auto_polled_cmd" ; db_info . insert ( stringVect2StringArray ( non_auto_list ) ) ; found = true ; break ; } } if ( found == false ) { final Vector cmd_list = dev . get_polled_cmd ( ) ; int i ; for ( i = 0 ; i < cmd_list . size ( ) ; i = i + 2 ) { final String s = ( String ) cmd_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { cmd_list . remove ( i + 1 ) ; cmd_list . insertElementAt ( upd_str , i + 1 ) ; break ; } } if ( i == cmd_list . size ( ) ) { cmd_list . add ( obj_name ) ; cmd_list . add ( upd_str ) ; } db_info . insert ( stringVect2StringArray ( cmd_list ) ) ; } } else { final Vector non_auto_list = dev . get_non_auto_polled_attr ( ) ; for ( int i = 0 ; i < non_auto_list . size ( ) ; i ++ ) { final String s = ( String ) non_auto_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { non_auto_list . remove ( i ) ; db_info . name = "non_auto_polled_attr" ; db_info . insert ( stringVect2StringArray ( non_auto_list ) ) ; found = true ; break ; } } if ( found == false ) { db_info . name = "polled_attr" ; final Vector attr_list = dev . get_polled_attr ( ) ; int i ; for ( i = 0 ; i < attr_list . size ( ) ; i = i + 2 ) { final String s = ( String ) attr_list . elementAt ( i ) ; if ( s . equals ( obj_name ) ) { attr_list . remove ( i + 1 ) ; attr_list . insertElementAt ( upd_str , i + 1 ) ; break ; } } if ( i == attr_list . size ( ) ) { attr_list . add ( obj_name ) ; attr_list . add ( upd_str ) ; } db_info . insert ( stringVect2StringArray ( attr_list ) ) ; } } final DbDatum [ ] send_data = new DbDatum [ 1 ] ; send_data [ 0 ] = db_info ; dev . get_db_device ( ) . put_property ( send_data ) ; Util . out4 . println ( "Polling properties updated" ) ; } // Mark the device as polled
dev . is_polled ( true ) ; |
public class MessageProcessor { /** * Set up a connection to the message processor for any internal components
* that require the creation of producers / consumer . */
private void createSystemConnection ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createSystemConnection" ) ; Subject subject = _authorisationUtils . getSIBServerSubject ( ) ; try { _connectionToMP = ( MPCoreConnection ) createConnection ( subject , true , null ) ; } catch ( SIResourceException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.MessageProcessor.createSystemConnection" , "1:2529:1.445" , this ) ; // Won ' t ever be thrown
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createSystemConnection" , "SIErrorException " + e ) ; throw new SIErrorException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createSystemConnection" ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getCPI ( ) { } } | if ( cpiEClass == null ) { cpiEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 230 ) ; } return cpiEClass ; |
public class AbstractRule { /** * @ see java . lang . Comparable # compareTo ( Object )
* We need this because we sort by the number of conditions on the rule
* The comparison is done in reverse because we want the high conditions at the top */
public final int compareTo ( AbstractRule o ) { } } | Integer us = new Integer ( this . getSpecificity ( ) ) ; Integer them = new Integer ( o . getSpecificity ( ) ) ; return them . compareTo ( us ) ; |
public class TableFactorBuilder { /** * Increments the weight of each assignment in { @ code this } by its weight in
* { @ code factor } .
* @ param factor */
public void incrementWeight ( DiscreteFactor factor ) { } } | Preconditions . checkArgument ( factor . getVars ( ) . equals ( getVars ( ) ) ) ; weightBuilder . increment ( factor . getWeights ( ) ) ; |
public class RootConsole { /** * Starts monitoring the input file . */
public void start ( ) { } } | System . out . println ( new LogEntry ( "starting root console" ) ) ; try { openFileReader ( ) ; } catch ( IOException ioe ) { throw new ResourceException ( "can not open file reader" , ioe ) ; } thread = new Thread ( this ) ; isRunning = true ; thread . start ( ) ; |
public class JsonMappingDataDictionary { /** * Walks through the Json object structure and translates values based on element path if necessary .
* @ param jsonData
* @ param jsonPath
* @ param context */
private void traverseJsonData ( JSONObject jsonData , String jsonPath , TestContext context ) { } } | for ( Iterator it = jsonData . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { Map . Entry jsonEntry = ( Map . Entry ) it . next ( ) ; if ( jsonEntry . getValue ( ) instanceof JSONObject ) { traverseJsonData ( ( JSONObject ) jsonEntry . getValue ( ) , ( StringUtils . hasText ( jsonPath ) ? jsonPath + "." + jsonEntry . getKey ( ) : jsonEntry . getKey ( ) . toString ( ) ) , context ) ; } else if ( jsonEntry . getValue ( ) instanceof JSONArray ) { JSONArray jsonArray = ( JSONArray ) jsonEntry . getValue ( ) ; for ( int i = 0 ; i < jsonArray . size ( ) ; i ++ ) { if ( jsonArray . get ( i ) instanceof JSONObject ) { traverseJsonData ( ( JSONObject ) jsonArray . get ( i ) , String . format ( ( StringUtils . hasText ( jsonPath ) ? jsonPath + "." + jsonEntry . getKey ( ) : jsonEntry . getKey ( ) . toString ( ) ) + "[%s]" , i ) , context ) ; } else { jsonArray . set ( i , translate ( String . format ( ( StringUtils . hasText ( jsonPath ) ? jsonPath + "." + jsonEntry . getKey ( ) : jsonEntry . getKey ( ) . toString ( ) ) + "[%s]" , i ) , jsonArray . get ( i ) , context ) ) ; } } } else { jsonEntry . setValue ( translate ( ( StringUtils . hasText ( jsonPath ) ? jsonPath + "." + jsonEntry . getKey ( ) : jsonEntry . getKey ( ) . toString ( ) ) , jsonEntry . getValue ( ) != null ? jsonEntry . getValue ( ) : null , context ) ) ; } } |
public class OpenCensusSleuthSpan { /** * TODO : upgrade to new SpanContext . create ( ) once it has been released . */
@ SuppressWarnings ( "deprecation" ) private static SpanContext fromSleuthSpan ( org . springframework . cloud . sleuth . Span span ) { } } | return SpanContext . create ( TraceId . fromBytes ( ByteBuffer . allocate ( TraceId . SIZE ) . putLong ( span . getTraceIdHigh ( ) ) . putLong ( span . getTraceId ( ) ) . array ( ) ) , SpanId . fromBytes ( ByteBuffer . allocate ( SpanId . SIZE ) . putLong ( span . getSpanId ( ) ) . array ( ) ) , Boolean . TRUE . equals ( span . isExportable ( ) ) ? sampledOptions : notSampledOptions ) ; |
public class InstrumentedInvokerFactory { /** * Factory method for TimedInvoker . */
private Invoker timed ( Invoker invoker , List < Method > timedMethods ) { } } | ImmutableMap . Builder < String , Timer > timers = new ImmutableMap . Builder < > ( ) ; for ( Method m : timedMethods ) { Timed annotation = m . getAnnotation ( Timed . class ) ; final String name = chooseName ( annotation . name ( ) , annotation . absolute ( ) , m ) ; Timer timer = metricRegistry . timer ( name ) ; timers . put ( m . getName ( ) , timer ) ; } return new InstrumentedInvokers . TimedInvoker ( invoker , timers . build ( ) ) ; |
public class HadoopFileReader { /** * Loads template as InputStreams
* @ param fileName filename of template ( full URI / path ) to load
* @ return InputStream of the template
* @ throws java . io . IOException in case of issues loading a file */
public InputStream loadTemplate ( String fileName ) throws IOException { } } | Path currentPath = new Path ( fileName ) ; return openFile ( currentPath ) ; |
public class ParsedValue { /** * called by format processors */
void put ( ChronoElement < ? > element , Object v ) { } } | if ( element == null ) { throw new NullPointerException ( ) ; } if ( v == null ) { // removal
if ( this . map != null ) { this . map . remove ( element ) ; if ( this . map . isEmpty ( ) ) { this . map = null ; } } } else { Map < ChronoElement < ? > , Object > m = this . map ; if ( m == null ) { m = new HashMap < > ( ) ; this . map = m ; } m . put ( element , v ) ; } |
public class JarURLConnection { /** * get the specs for a given url out of the cache , and compute and cache them if they ' re not there . */
private void parseSpecs ( URL url ) throws MalformedURLException { } } | String spec = url . getFile ( ) ; int separator = spec . indexOf ( '!' ) ; /* * REMIND : we don ' t handle nested JAR URLs */
if ( separator == - 1 ) { throw new MalformedURLException ( "no ! found in url spec:" + spec ) ; } // Get the protocol
String protocol = spec . substring ( 0 , spec . indexOf ( ":" ) ) ; // This is the important part : we use a URL Stream Handler Factory to find the URL Stream handler to use for
// the nested protocol .
this . jarFileURL = new URL ( null , spec . substring ( 0 , separator ++ ) , this . handlerFactory . createURLStreamHandler ( protocol ) ) ; this . entryName = null ; /* if ! is the last letter of the innerURL , entryName is null */
if ( ++ separator != spec . length ( ) ) { this . entryName = spec . substring ( separator , spec . length ( ) ) ; try { // Note : we decode using UTF8 since it ' s the W3C recommendation .
// See http : / / www . w3 . org / TR / html40 / appendix / notes . html # non - ascii - chars
this . entryName = URLDecoder . decode ( this . entryName , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { // Not supporting UTF - 8 as a valid encoding for some reasons . We consider XWiki cannot work
// without that encoding .
throw new RuntimeException ( "Failed to URL decode [" + this . entryName + "] using UTF-8." , e ) ; } } |
public class DownloadRunner { /** * Print usage options */
private static void usage ( Options options ) { } } | HelpFormatter formatter = new HelpFormatter ( ) ; formatter . printHelp ( DownloadRunner . class . getSimpleName ( ) , options ) ; |
public class OpenWatcomCompiler { /** * Get undefine switch .
* @ param buffer
* StringBuffer argument destination
* @ param define
* String preprocessor macro */
@ Override protected final void getUndefineSwitch ( final StringBuffer buffer , final String define ) { } } | OpenWatcomProcessor . getUndefineSwitch ( buffer , define ) ; |
public class RejoinTaskBuffer { /** * Generate the byte array in preparation of moving over a message bus .
* Idempotent , but not thread - safe . Also changes state to immutable . */
public void compile ( ) { } } | if ( compiledSize == 0 ) { ByteBuffer bb = m_container . b ( ) ; compiledSize = bb . position ( ) ; bb . flip ( ) ; m_allocator . track ( compiledSize ) ; } if ( log . isTraceEnabled ( ) ) { StringBuilder sb = new StringBuilder ( "Compiling buffer: " ) ; ByteBuffer dup = m_container . bDR ( ) ; while ( dup . hasRemaining ( ) ) { sb . append ( " " ) . append ( dup . get ( ) ) ; } log . trace ( sb . toString ( ) ) ; } |
public class BasicEvaluationCtx { /** * Returns attribute value ( s ) from the subject section of the request
* that have no issuer .
* @ param type the type of the attribute value ( s ) to find
* @ param id the id of the attribute value ( s ) to find
* @ param category the category the attribute value ( s ) must be in
* @ return a result containing a bag either empty because no values were
* found or containing at least one value , or status associated with an
* Indeterminate result */
public EvaluationResult getSubjectAttribute ( URI type , URI id , URI category ) { } } | return getSubjectAttribute ( type , id , null , category ) ; |
public class FreeMarkerTag { /** * Returns a map of all variables in scope .
* @ return map of all variables in scope . */
protected Map getAllVariables ( ) { } } | try { Iterator names = FreeMarkerTL . getEnvironment ( ) . getKnownVariableNames ( ) . iterator ( ) ; Map vars = new HashMap ( ) ; while ( names . hasNext ( ) ) { Object name = names . next ( ) ; vars . put ( name , get ( name . toString ( ) ) ) ; } return vars ; } catch ( Exception e ) { throw new ViewException ( e ) ; } |
public class LogLevelMapping { /** * Normalizes the given level to one of those supported by Selenium .
* @ param level log level to normalize
* @ return the selenium supported corresponding log level */
public static Level normalize ( Level level ) { } } | if ( levelMap . containsKey ( level . intValue ( ) ) ) { return levelMap . get ( level . intValue ( ) ) ; } else if ( level . intValue ( ) >= Level . SEVERE . intValue ( ) ) { return Level . SEVERE ; } else if ( level . intValue ( ) >= Level . WARNING . intValue ( ) ) { return Level . WARNING ; } else if ( level . intValue ( ) >= Level . INFO . intValue ( ) ) { return Level . INFO ; } else { return Level . FINE ; } |
public class SortedCursor { /** * Convenience method to create a comparator which orders storables by the
* given order - by properties . The property names may be prefixed with ' + '
* or ' - ' to indicate ascending or descending order . If the prefix is
* omitted , ascending order is assumed .
* @ param type type of storable to create comparator for
* @ param orderProperties list of properties to order by
* @ throws IllegalArgumentException if any property is null or not a member
* of storable type */
public static < S > Comparator < S > createComparator ( Class < S > type , String ... orderProperties ) { } } | BeanComparator bc = BeanComparator . forClass ( type ) ; if ( Storable . class . isAssignableFrom ( type ) ) { StorableInfo info = StorableIntrospector . examine ( ( Class ) type ) ; for ( String property : orderProperties ) { bc = orderBy ( bc , OrderedProperty . parse ( info , property ) ) ; } } else { for ( String property : orderProperties ) { Class propertyType ; { String name = property ; if ( name . startsWith ( "+" ) || name . startsWith ( "-" ) ) { name = name . substring ( 1 ) ; } propertyType = propertyType ( type , name ) ; } bc = orderBy ( bc , property , propertyType , Direction . ASCENDING ) ; } } return bc ; |
public class Selectable { /** * Selector that matches any descendant element that satisfies the specified constraints . If no
* constraints are provided , accepts all descendant elements .
* @ param constraints element constraints
* @ return element selector */
public final ElementSelector < T > descendant ( ElementConstraint ... constraints ) { } } | return new DescendantSelector < T > ( getContext ( ) , getCurrentSelector ( ) , Arrays . asList ( constraints ) ) ; |
public class CouchURIHelper { /** * Returns URI for { @ code documentId } with { @ code query } . */
public URI documentUri ( String documentId , Map < String , Object > query ) { } } | String base_uri = String . format ( "%s/%s" , this . rootUriString , this . encodeId ( documentId ) ) ; String uri = appendQueryString ( base_uri , query ) ; return uriFor ( uri ) ; |
public class Math { /** * Standardizes each column of a matrix to 0 mean and unit variance . */
public static void standardize ( double [ ] [ ] x ) { } } | int n = x . length ; int p = x [ 0 ] . length ; double [ ] center = colMeans ( x ) ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = 0 ; j < p ; j ++ ) { x [ i ] [ j ] = x [ i ] [ j ] - center [ j ] ; } } double [ ] scale = new double [ p ] ; for ( int j = 0 ; j < p ; j ++ ) { for ( int i = 0 ; i < n ; i ++ ) { scale [ j ] += Math . sqr ( x [ i ] [ j ] ) ; } scale [ j ] = Math . sqrt ( scale [ j ] / ( n - 1 ) ) ; if ( ! Math . isZero ( scale [ j ] ) ) { for ( int i = 0 ; i < n ; i ++ ) { x [ i ] [ j ] /= scale [ j ] ; } } } |
public class IOCAFunctionSetIdentificationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setCATEGORY ( Integer newCATEGORY ) { } } | Integer oldCATEGORY = category ; category = newCATEGORY ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IOCA_FUNCTION_SET_IDENTIFICATION__CATEGORY , oldCATEGORY , category ) ) ; |
public class DeprecatedTypesafeEnumPattern { /** * implements the visitor to look for classes compiled with 1.5 or better that have all constructors that are private
* @ param context
* the currently parsed class context object */
@ Override public void visitClassContext ( ClassContext context ) { } } | try { JavaClass cls = context . getJavaClass ( ) ; if ( ! cls . isEnum ( ) && ( cls . getMajor ( ) >= Const . MAJOR_1_5 ) ) { Method [ ] methods = cls . getMethods ( ) ; for ( Method m : methods ) { if ( Values . CONSTRUCTOR . equals ( m . getName ( ) ) && ! m . isPrivate ( ) ) { return ; } } firstEnumPC = 0 ; enumCount = 0 ; enumConstNames = new HashSet < String > ( 10 ) ; super . visitClassContext ( context ) ; } } finally { enumConstNames = null ; } |
public class AbstractServerPredicate { /** * Referenced from RoundRobinRule
* Inspired by the implementation of { @ link AtomicInteger # incrementAndGet ( ) } .
* @ param modulo The modulo to bound the value of the counter .
* @ return The next value . */
private int incrementAndGetModulo ( int modulo ) { } } | for ( ; ; ) { int current = nextIndex . get ( ) ; int next = ( current + 1 ) % modulo ; if ( nextIndex . compareAndSet ( current , next ) && current < modulo ) return current ; } |
public class CombineFileInputFormat { /** * Create a new pool and add the filters to it .
* A split cannot have files from different pools . */
protected void createPool ( JobConf conf , List < PathFilter > filters ) { } } | pools . add ( new MultiPathFilter ( filters ) ) ; |
public class CmsADESessionCache { /** * Adds the formatter id to the recently used list for the given type . < p >
* @ param resType the resource type
* @ param formatterId the formatter id */
public void addRecentFormatter ( String resType , CmsUUID formatterId ) { } } | List < CmsUUID > formatterIds = m_recentFormatters . get ( resType ) ; if ( formatterIds == null ) { formatterIds = new ArrayList < CmsUUID > ( ) ; m_recentFormatters . put ( resType , formatterIds ) ; } formatterIds . remove ( formatterId ) ; if ( formatterIds . size ( ) >= ( RECENT_FORMATTERS_SIZE ) ) { formatterIds . remove ( RECENT_FORMATTERS_SIZE - 1 ) ; } formatterIds . add ( 0 , formatterId ) ; |
public class BaseUpdateableRegressor { /** * Performs training on an updateable classifier by going over the whole
* data set in random order one observation at a time , multiple times .
* @ param dataSet the data set to train from
* @ param toTrain the classifier to train
* @ param epochs the number of passes through the data set */
public static void trainEpochs ( RegressionDataSet dataSet , UpdateableRegressor toTrain , int epochs ) { } } | if ( epochs < 1 ) throw new IllegalArgumentException ( "epochs must be positive" ) ; toTrain . setUp ( dataSet . getCategories ( ) , dataSet . getNumNumericalVars ( ) ) ; IntList randomOrder = new IntList ( dataSet . size ( ) ) ; ListUtils . addRange ( randomOrder , 0 , dataSet . size ( ) , 1 ) ; for ( int epoch = 0 ; epoch < epochs ; epoch ++ ) { Collections . shuffle ( randomOrder ) ; for ( int i : randomOrder ) toTrain . update ( dataSet . getDataPoint ( i ) , dataSet . getWeight ( i ) , dataSet . getTargetValue ( i ) ) ; } |
public class AgentPoolsInner { /** * Gets the agent pool .
* Gets the details of the agent pool by managed cluster and resource group .
* @ param resourceGroupName The name of the resource group .
* @ param managedClusterName The name of the managed cluster resource .
* @ param agentPoolName The name of the agent pool .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the AgentPoolInner object */
public Observable < AgentPoolInner > getAsync ( String resourceGroupName , String managedClusterName , String agentPoolName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , managedClusterName , agentPoolName ) . map ( new Func1 < ServiceResponse < AgentPoolInner > , AgentPoolInner > ( ) { @ Override public AgentPoolInner call ( ServiceResponse < AgentPoolInner > response ) { return response . body ( ) ; } } ) ; |
public class TenantService { /** * Get the { @ link TenantDefinition } of the tenant with the given name . If the given
* tenant name is the default tenant , this method calls { @ link # getDefaultTenantDef ( ) } .
* Otherwise , the DBService must be initialized and the tenant definition is read
* from the database .
* @ param tenantName Candidate tenant name .
* @ return Definition of tenant if it exists , otherwise null . */
public TenantDefinition getTenantDefinition ( String tenantName ) { } } | // Allow access to default tenant if DBService is not yet initialized .
if ( tenantName . equals ( m_defaultTenantName ) ) { return getDefaultTenantDef ( ) ; } checkServiceState ( ) ; return getTenantDef ( tenantName ) ; |
public class JBBPDslBuilder { /** * Add named fixed signed short array which size calculated through expression .
* @ param name name of the field , if null then anonymous
* @ param sizeExpression expression to be used to calculate size , must not be null
* @ return the builder instance , must not be null */
public JBBPDslBuilder ShortArray ( final String name , final String sizeExpression ) { } } | final Item item = new Item ( BinType . SHORT_ARRAY , name , this . byteOrder ) ; item . sizeExpression = assertExpressionChars ( sizeExpression ) ; this . addItem ( item ) ; return this ; |
public class ThrowableNoCauseMatcher { /** * ( non - Javadoc )
* @ see org . hamcrest . Matcher # matches ( java . lang . Object ) */
@ Override public boolean matches ( Object obj ) { } } | if ( ! ( obj instanceof Throwable ) ) return false ; Throwable throwable = ( Throwable ) obj ; return throwable . getCause ( ) == null ; |
public class ModifyVpcEndpointConnectionNotificationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < ModifyVpcEndpointConnectionNotificationRequest > getDryRunRequest ( ) { } } | Request < ModifyVpcEndpointConnectionNotificationRequest > request = new ModifyVpcEndpointConnectionNotificationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class MenuPanel { /** * Adds an example to the recent subMenu .
* @ param text the text to display .
* @ param data the example data instance
* @ param select should the menuItem be selected */
private void addRecentExample ( final String text , final ExampleData data , final boolean select ) { } } | WMenuItem item = new WMenuItem ( text , new SelectExampleAction ( ) ) ; item . setCancel ( true ) ; menu . add ( item ) ; item . setActionObject ( data ) ; if ( select ) { menu . setSelectedMenuItem ( item ) ; } |
public class CommerceNotificationAttachmentUtil { /** * Removes the commerce notification attachment with the primary key from the database . Also notifies the appropriate model listeners .
* @ param commerceNotificationAttachmentId the primary key of the commerce notification attachment
* @ return the commerce notification attachment that was removed
* @ throws NoSuchNotificationAttachmentException if a commerce notification attachment with the primary key could not be found */
public static CommerceNotificationAttachment remove ( long commerceNotificationAttachmentId ) throws com . liferay . commerce . notification . exception . NoSuchNotificationAttachmentException { } } | return getPersistence ( ) . remove ( commerceNotificationAttachmentId ) ; |
public class BufferUtil { /** * 从字符串创建新Buffer
* @ param data 数据
* @ param charset 编码
* @ return { @ link ByteBuffer }
* @ since 4.5.0 */
public static ByteBuffer create ( CharSequence data , Charset charset ) { } } | return create ( StrUtil . bytes ( data , charset ) ) ; |
public class RtmpClient { /** * Send a remote procedure call .
* Note that due to varargs ambiguity , this method will not work if the first argument to the call is a string .
* In that case , use the explicit { @ link # sendRpcWithEndpoint ( String , String , String , Object . . . ) } with endpoint
* " my - rtmps " instead , or use { @ link # sendRpcToDefault ( String , String , Object . . . ) }
* @ param service The service handling the call
* @ param method The method to call
* @ param args Optional arguments to the call
* @ return The invoke id callback
* @ deprecated Use the explicit { @ link # sendRpcToDefault ( String , String , Object . . . ) } instead */
@ Deprecated public int sendRpc ( String service , String method , Object ... args ) { } } | return sendRpc ( "my-rtmps" , service , method , args ) ; |
public class ScopeService { /** * Updates a scope . If the scope does not exists , returns an error .
* @ param req http request
* @ return String message that will be returned in the response */
public String updateScope ( FullHttpRequest req , String scopeName ) throws OAuthException { } } | String contentType = ( req . headers ( ) != null ) ? req . headers ( ) . get ( HttpHeaderNames . CONTENT_TYPE ) : null ; // check Content - Type
if ( contentType != null && contentType . contains ( ResponseBuilder . APPLICATION_JSON ) ) { try { Scope scope = InputValidator . validate ( req . content ( ) . toString ( CharsetUtil . UTF_8 ) , Scope . class ) ; if ( scope . validForUpdate ( ) ) { Scope foundScope = DBManagerFactory . getInstance ( ) . findScope ( scopeName ) . blockingGet ( ) ; if ( foundScope == null ) { LOG . error ( "scope does not exist" ) ; throw new OAuthException ( SCOPE_NOT_EXIST , HttpResponseStatus . BAD_REQUEST ) ; } else { setScopeEmptyValues ( scope , foundScope ) ; DBManagerFactory . getInstance ( ) . storeScope ( scope ) ; } } else { LOG . error ( "scope is not valid" ) ; throw new OAuthException ( MANDATORY_SCOPE_ERROR , HttpResponseStatus . BAD_REQUEST ) ; } } catch ( Exception e ) { LOG . error ( "cannot handle scope request" , e ) ; throw new OAuthException ( e , null , HttpResponseStatus . BAD_REQUEST ) ; } } else { throw new OAuthException ( ResponseBuilder . UNSUPPORTED_MEDIA_TYPE , HttpResponseStatus . BAD_REQUEST ) ; } return SCOPE_UPDATED_OK_MESSAGE ; |
public class BoundedLocalCache { /** * Adapts the eviction policy to towards the optimal recency / frequency configuration . */
@ GuardedBy ( "evictionLock" ) void climb ( ) { } } | if ( ! evicts ( ) ) { return ; } determineAdjustment ( ) ; demoteFromMainProtected ( ) ; long amount = adjustment ( ) ; if ( amount == 0 ) { return ; } else if ( amount > 0 ) { increaseWindow ( ) ; } else { decreaseWindow ( ) ; } |
public class CmsCategoryService { /** * Adds a resource identified by the given resource name to the given category . < p >
* The resource has to be locked . < p >
* @ param cms the current cms context
* @ param resourceName the site relative path to the resource to add
* @ param category the category to add the resource to
* @ throws CmsException if something goes wrong */
public void addResourceToCategory ( CmsObject cms , String resourceName , CmsCategory category ) throws CmsException { } } | if ( readResourceCategories ( cms , cms . readResource ( resourceName , CmsResourceFilter . IGNORE_EXPIRATION ) ) . contains ( category ) ) { return ; } String sitePath = cms . getRequestContext ( ) . removeSiteRoot ( category . getRootPath ( ) ) ; cms . addRelationToResource ( resourceName , sitePath , CmsRelationType . CATEGORY . getName ( ) ) ; String parentCatPath = category . getPath ( ) ; // recursively add to higher level categories
if ( parentCatPath . endsWith ( "/" ) ) { parentCatPath = parentCatPath . substring ( 0 , parentCatPath . length ( ) - 1 ) ; } if ( parentCatPath . lastIndexOf ( '/' ) > 0 ) { addResourceToCategory ( cms , resourceName , parentCatPath . substring ( 0 , parentCatPath . lastIndexOf ( '/' ) + 1 ) ) ; } |
public class AbstractAlpineQueryManager { /** * Wrapper around { @ link Query # executeWithMap ( Map ) } that adds transparent support for
* pagination and ordering of results via { @ link # decorate ( Query ) } .
* @ param query the JDO Query object to execute
* @ param parameters the < code > Map < / code > containing all of the parameters .
* @ return a PaginatedResult object
* @ since 1.0.0 */
public PaginatedResult execute ( final Query query , final Map parameters ) { } } | final long count = getCount ( query , parameters ) ; decorate ( query ) ; return new PaginatedResult ( ) . objects ( query . executeWithMap ( parameters ) ) . total ( count ) ; |
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link TypeNotFoundException } with the given { @ link Throwable cause }
* and { @ link String message } formatted with the given { @ link Object [ ] arguments } .
* @ param cause { @ link Throwable } identified as the reason this { @ link TypeNotFoundException } was thrown .
* @ param message { @ link String } describing the { @ link TypeNotFoundException exception } .
* @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } .
* @ return a new { @ link TypeNotFoundException } with the given { @ link Throwable cause } and { @ link String message } .
* @ see org . cp . elements . lang . TypeNotFoundException */
public static TypeNotFoundException newTypeNotFoundException ( Throwable cause , String message , Object ... args ) { } } | return new TypeNotFoundException ( format ( message , args ) , cause ) ; |
public class RibbonizerExtension { /** * utilities */
public Consumer < BufferedImage > grayScaleFilter ( ApplicationVariant variant , File iconFile ) { } } | return new GrayScaleBuilder ( ) . apply ( variant , iconFile ) ; |
public class ParsingValidator { /** * Validates an instance against the schema using a pre - configured { @ link SAXParserFactory } .
* < p > The factory given will be configured to be namespace aware and validating . < / p >
* @ param s the instance document
* @ param factory the factory to use , must not be null
* @ since XMLUnit 2.6.0 */
public ValidationResult validateInstance ( Source s , SAXParserFactory factory ) { } } | if ( factory == null ) { throw new IllegalArgumentException ( "factory must not be null" ) ; } try { factory . setNamespaceAware ( true ) ; factory . setValidating ( true ) ; SAXParser parser = factory . newSAXParser ( ) ; if ( Languages . W3C_XML_SCHEMA_NS_URI . equals ( language ) ) { parser . setProperty ( Properties . SCHEMA_LANGUAGE , Languages . W3C_XML_SCHEMA_NS_URI ) ; } final Source [ ] source = getSchemaSources ( ) ; Handler handler = new Handler ( ) ; if ( source . length != 0 ) { if ( Languages . W3C_XML_SCHEMA_NS_URI . equals ( language ) ) { InputSource [ ] schemaSource = new InputSource [ source . length ] ; for ( int i = 0 ; i < source . length ; i ++ ) { schemaSource [ i ] = Convert . toInputSource ( source [ i ] ) ; } parser . setProperty ( Properties . SCHEMA_SOURCE , schemaSource ) ; } else if ( source . length == 1 ) { handler . setSchemaSystemId ( source [ 0 ] . getSystemId ( ) ) ; } } InputSource input = Convert . toInputSource ( s ) ; try { parser . parse ( input , handler ) ; } catch ( SAXParseException e ) { handler . error ( ( SAXParseException ) e ) ; } catch ( SAXException e ) { throw new XMLUnitException ( e ) ; } return handler . getResult ( ) ; } catch ( ParserConfigurationException ex ) { throw new ConfigurationException ( ex ) ; } catch ( SAXNotRecognizedException ex ) { throw new ConfigurationException ( ex ) ; } catch ( SAXNotSupportedException ex ) { throw new ConfigurationException ( ex ) ; } catch ( SAXException ex ) { throw new XMLUnitException ( ex ) ; } catch ( java . io . IOException ex ) { throw new XMLUnitException ( ex ) ; } |
public class GetClientCertificateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetClientCertificateRequest getClientCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getClientCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getClientCertificateRequest . getClientCertificateId ( ) , CLIENTCERTIFICATEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EConv { /** * / * make _ replacement */
public int makeReplacement ( ) { } } | if ( replacementString != null ) return 0 ; byte [ ] insEnc = encodingToInsertOutput ( ) ; final byte [ ] replEnc ; final int len ; final byte [ ] replacement ; if ( insEnc . length != 0 ) { // Transcoding transcoding = lastTranscoding ;
// Transcoder transcoder = transcoding . transcoder ;
// Encoding enc = EncodingDB . getEncodings ( ) . get ( transcoder . destination ) . getEncoding ( ) ;
// get _ replacement _ character
if ( caseInsensitiveEquals ( insEnc , "UTF-8" . getBytes ( ) ) ) { len = 3 ; replEnc = "UTF-8" . getBytes ( ) ; replacement = new byte [ ] { ( byte ) 0xEF , ( byte ) 0xBF , ( byte ) 0xBD } ; } else { len = 1 ; replEnc = "US-ASCII" . getBytes ( ) ; replacement = new byte [ ] { '?' } ; } } else { len = 1 ; replEnc = NULL_STRING ; replacement = new byte [ ] { '?' } ; } replacementString = replacement ; replacementLength = len ; replacementEncoding = replEnc ; return 0 ; |
public class Log { /** * Check that the ranges and sizes add up , otherwise we have lost some data somewhere */
private void validateSegments ( List < LogSegment > segments ) { } } | synchronized ( lock ) { for ( int i = 0 ; i < segments . size ( ) - 1 ; i ++ ) { LogSegment curr = segments . get ( i ) ; LogSegment next = segments . get ( i + 1 ) ; if ( curr . start ( ) + curr . size ( ) != next . start ( ) ) { throw new IllegalStateException ( "The following segments don't validate: " + curr . getFile ( ) . getAbsolutePath ( ) + ", " + next . getFile ( ) . getAbsolutePath ( ) ) ; } } } |
public class IfcSystemImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcRelServicesBuildings > getServicesBuildings ( ) { } } | return ( EList < IfcRelServicesBuildings > ) eGet ( Ifc4Package . Literals . IFC_SYSTEM__SERVICES_BUILDINGS , true ) ; |
public class MailChimpMethod { /** * Get the MailChimp API method meta - info .
* @ throws IllegalArgumentException if neither this class nor any of its superclasses
* are annotated with { @ link Method } . */
public final Method getMetaInfo ( ) { } } | for ( Class < ? > c = getClass ( ) ; c != null ; c = c . getSuperclass ( ) ) { Method a = c . getAnnotation ( Method . class ) ; if ( a != null ) { return a ; } } throw new IllegalArgumentException ( "Neither " + getClass ( ) + " nor its superclasses are annotated with " + Method . class ) ; |
public class DeletionInfo { /** * Purge every tombstones that are older than { @ code gcbefore } .
* @ param gcBefore timestamp ( in seconds ) before which tombstones should be purged */
public void purge ( int gcBefore ) { } } | topLevel = topLevel . localDeletionTime < gcBefore ? DeletionTime . LIVE : topLevel ; if ( ranges != null ) { ranges . purge ( gcBefore ) ; if ( ranges . isEmpty ( ) ) ranges = null ; } |
public class MsBuildParser { /** * Determines the name of the file that is cause of the warning .
* @ param matcher
* the matcher to get the matches from
* @ return the name of the file with a warning */
private String determineFileName ( final Matcher matcher ) { } } | String fileName ; if ( StringUtils . isNotBlank ( matcher . group ( 3 ) ) ) { fileName = matcher . group ( 3 ) ; } else if ( StringUtils . isNotBlank ( matcher . group ( 7 ) ) ) { fileName = matcher . group ( 7 ) ; } else { fileName = matcher . group ( 1 ) ; } if ( StringUtils . isBlank ( fileName ) ) { fileName = StringUtils . substringBetween ( matcher . group ( 6 ) , "'" ) ; } if ( StringUtils . isBlank ( fileName ) ) { fileName = "unknown.file" ; } return fileName ; |
public class AbstractPlanNode { /** * Does the ( sub ) plan guarantee an identical result / effect when " replayed "
* against the same database state , such as during replication or CL recovery .
* @ return */
public boolean isOrderDeterministic ( ) { } } | // Leaf nodes need to re - implement this test .
assert ( m_children != null ) ; for ( AbstractPlanNode child : m_children ) { if ( ! child . isOrderDeterministic ( ) ) { m_nondeterminismDetail = child . m_nondeterminismDetail ; return false ; } } return true ; |
public class Generators { /** * constructs a generator that yields the specified seconds in increasing
* order for each minute .
* @ param seconds values in [ 0-59]
* @ param dtStart non null */
static Generator bySecondGenerator ( int [ ] seconds , final DateValue dtStart ) { } } | seconds = Util . uniquify ( seconds ) ; if ( seconds . length == 0 ) { seconds = new int [ ] { dtStart instanceof TimeValue ? ( ( TimeValue ) dtStart ) . second ( ) : 0 } ; } final int [ ] useconds = seconds ; if ( useconds . length == 1 ) { final int second = useconds [ 0 ] ; return new SingleValueGenerator ( ) { int year ; int month ; int day ; int hour ; int minute ; @ Override boolean generate ( DTBuilder builder ) { if ( ( year != builder . year ) || ( month != builder . month ) || ( day != builder . day ) || ( hour != builder . hour ) || ( minute != builder . minute ) ) { year = builder . year ; month = builder . month ; day = builder . day ; hour = builder . hour ; minute = builder . minute ; builder . second = second ; return true ; } return false ; } @ Override int getValue ( ) { return second ; } @ Override public String toString ( ) { return "bySecondGenerator:" + second ; } } ; } return new Generator ( ) { int i ; int year = dtStart . year ( ) ; int month = dtStart . month ( ) ; int day = dtStart . day ( ) ; int hour = dtStart instanceof TimeValue ? ( ( TimeValue ) dtStart ) . hour ( ) : 0 ; int minute = dtStart instanceof TimeValue ? ( ( TimeValue ) dtStart ) . minute ( ) : 0 ; { int second = dtStart instanceof TimeValue ? ( ( TimeValue ) dtStart ) . second ( ) : 0 ; while ( i < useconds . length && useconds [ i ] < second ) { ++ i ; } } @ Override boolean generate ( DTBuilder builder ) { if ( ( year != builder . year ) || ( month != builder . month ) || ( day != builder . day ) || ( hour != builder . hour ) || ( minute != builder . minute ) ) { i = 0 ; year = builder . year ; month = builder . month ; day = builder . day ; hour = builder . hour ; minute = builder . minute ; } if ( i >= useconds . length ) { return false ; } builder . second = useconds [ i ++ ] ; return true ; } @ Override public String toString ( ) { return "bySecondGenerator:" + Arrays . toString ( useconds ) ; } } ; |
public class ClientFactory { /** * Configure a linked data client suitable for use with a Fedora Repository .
* @ param endpoints additional endpoints to enable on the client
* @ param providers additional providers to enable on the client
* @ return a configuration for use with an LDClient */
public static ClientConfiguration createClient ( final List < Endpoint > endpoints , final List < DataProvider > providers ) { } } | return createClient ( null , null , endpoints , providers ) ; |
public class AbstractTermExtractor { /** * Executes the TermSpec returning a list of stringified terms . This method does not use the TermSpec ' s { @ link
* com . davidbracewell . hermes . ml . feature . ValueCalculator }
* @ param hString the HString to process
* @ return a list of term counts */
public List < String > collect ( @ NonNull HString hString ) { } } | return stream ( hString ) . collect ( Collectors . toList ( ) ) ; |
public class AmazonCloudFrontClient { /** * List all field - level encryption configurations that have been created in CloudFront for this account .
* @ param listFieldLevelEncryptionConfigsRequest
* @ return Result of the ListFieldLevelEncryptionConfigs operation returned by the service .
* @ throws InvalidArgumentException
* The argument is invalid .
* @ sample AmazonCloudFront . ListFieldLevelEncryptionConfigs
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / ListFieldLevelEncryptionConfigs "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListFieldLevelEncryptionConfigsResult listFieldLevelEncryptionConfigs ( ListFieldLevelEncryptionConfigsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListFieldLevelEncryptionConfigs ( request ) ; |
public class SortableArrayList { /** * Inserts the specified item into the list into a position that
* preserves the sorting of the list according to the supplied { @ link
* Comparator } . The list must be sorted ( via the supplied comparator )
* prior to the call to this method ( an empty list built up entirely
* via calls to { @ link # insertSorted } will be properly sorted ) .
* @ return the index at which the element was inserted . */
@ SuppressWarnings ( "unchecked" ) public int insertSorted ( T value , Comparator < ? super T > comp ) { } } | int ipos = binarySearch ( value , comp ) ; if ( ipos < 0 ) { ipos = - ( ipos + 1 ) ; } _elements = ( T [ ] ) ListUtil . insert ( _elements , ipos , value ) ; _size ++ ; return ipos ; |
public class DBWrapperFactory { /** * Create a wrapper around a collection of entities .
* @ param collection The collection to be wrapped .
* @ param entityClass The class of the entity that the collection contains .
* @ param isRevisionCollection Whether or not the collection is a collection of revision entities .
* @ param < T > The wrapper class that is returned .
* @ return The Wrapper around the collection of entities . */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) public < T extends BaseWrapper < T > , U > CollectionWrapper < T > createCollection ( final Collection < U > collection , final Class < U > entityClass , boolean isRevisionCollection ) { if ( collection == null ) { return null ; } // Create the key
final DBWrapperKey key = new DBWrapperKey ( collection , entityClass ) ; // Check to see if a wrapper has already been cached for the key
final DBCollectionWrapper cachedWrapper = wrapperCache . getCollection ( key ) ; if ( cachedWrapper != null ) { return cachedWrapper ; } final DBCollectionWrapper wrapper ; if ( entityClass == Locale . class ) { // LOCALE
wrapper = new DBLocaleCollectionWrapper ( this , ( Collection < Locale > ) collection , isRevisionCollection ) ; } else if ( entityClass == TranslationServer . class ) { // TRANSLATION SERVER
throw new UnsupportedOperationException ( "A return class needs to be specified for TranslationServer entities." ) ; } else if ( entityClass == Topic . class ) { // TOPIC
wrapper = new DBTopicCollectionWrapper ( this , ( Collection < Topic > ) collection , isRevisionCollection ) ; } else if ( entityClass == TopicSourceUrl . class ) { // TOPIC SOURCE URL
wrapper = new DBTopicSourceURLCollectionWrapper ( this , ( Collection < TopicSourceUrl > ) collection , isRevisionCollection ) ; } else if ( entityClass == TranslatedTopicData . class ) { // TRANSLATED TOPIC
wrapper = new DBTranslatedTopicDataCollectionWrapper ( this , ( Collection < TranslatedTopicData > ) collection , isRevisionCollection ) ; } else if ( entityClass == TranslatedTopicString . class ) { // TRANSLATED TOPIC STRING
wrapper = new DBTranslatedTopicStringCollectionWrapper ( this , ( Collection < TranslatedTopicString > ) collection , isRevisionCollection ) ; } else if ( entityClass == Tag . class ) { // TAG
wrapper = new DBTagCollectionWrapper ( this , ( Collection < Tag > ) collection , isRevisionCollection ) ; } else if ( entityClass == Category . class ) { // CATEGORY
wrapper = new DBCategoryCollectionWrapper ( this , ( Collection < Category > ) collection , isRevisionCollection ) ; } else if ( entityClass == TagToCategory . class ) { throw new UnsupportedOperationException ( "A return class needs to be specified for TagToCategory entities." ) ; } else if ( entityClass == PropertyTagToPropertyTagCategory . class ) { // PROPERTY TAGS
wrapper = new DBPropertyTagInPropertyCategoryCollectionWrapper ( this , ( Collection < PropertyTagToPropertyTagCategory > ) collection , isRevisionCollection ) ; } else if ( entityClass == PropertyTag . class ) { wrapper = new DBPropertyTagCollectionWrapper ( this , ( Collection < PropertyTag > ) collection , isRevisionCollection ) ; } else if ( entityClass == TopicToPropertyTag . class ) { wrapper = new DBTopicToPropertyTagCollectionWrapper ( this , ( Collection < TopicToPropertyTag > ) collection , isRevisionCollection ) ; } else if ( entityClass == TagToPropertyTag . class ) { wrapper = new DBTagToPropertyTagCollectionWrapper ( this , ( Collection < TagToPropertyTag > ) collection , isRevisionCollection ) ; } else if ( entityClass == ContentSpecToPropertyTag . class ) { wrapper = new DBContentSpecToPropertyTagCollectionWrapper ( this , ( Collection < ContentSpecToPropertyTag > ) collection , isRevisionCollection ) ; } else if ( entityClass == BlobConstants . class ) { // BLOB CONSTANT
wrapper = new DBBlobConstantCollectionWrapper ( this , ( Collection < BlobConstants > ) collection , isRevisionCollection ) ; } else if ( entityClass == StringConstants . class ) { // STRING CONSTANT
wrapper = new DBStringConstantCollectionWrapper ( this , ( Collection < StringConstants > ) collection , isRevisionCollection ) ; } else if ( entityClass == File . class ) { // FILE
wrapper = new DBFileCollectionWrapper ( this , ( Collection < File > ) collection , isRevisionCollection ) ; } else if ( entityClass == LanguageFile . class ) { // LANGUAGE IMAGE
wrapper = new DBLanguageFileCollectionWrapper ( this , ( Collection < LanguageFile > ) collection , isRevisionCollection ) ; } else if ( entityClass == ImageFile . class ) { // IMAGE
wrapper = new DBImageCollectionWrapper ( this , ( Collection < ImageFile > ) collection , isRevisionCollection ) ; } else if ( entityClass == LanguageImage . class ) { // LANGUAGE IMAGE
wrapper = new DBLanguageImageCollectionWrapper ( this , ( Collection < LanguageImage > ) collection , isRevisionCollection ) ; } else if ( entityClass == User . class ) { // USER
wrapper = new DBUserCollectionWrapper ( this , ( Collection < User > ) collection , isRevisionCollection ) ; } else if ( entityClass == ContentSpec . class ) { // CONTENT SPEC
wrapper = new DBContentSpecCollectionWrapper ( this , ( Collection < ContentSpec > ) collection , isRevisionCollection ) ; } else if ( entityClass == CSNode . class ) { // CONTENT SPEC NODE
wrapper = new DBCSNodeCollectionWrapper ( this , ( Collection < CSNode > ) collection , isRevisionCollection ) ; } else if ( entityClass == CSNodeToCSNode . class ) { wrapper = new DBCSRelatedNodeCollectionWrapper ( this , ( Collection < CSNodeToCSNode > ) collection , isRevisionCollection ) ; } else if ( entityClass == CSNode . class ) { // CONTENT SPEC INFO NODE
wrapper = new DBCSInfoNodeCollectionWrapper ( this , ( Collection < CSInfoNode > ) collection , isRevisionCollection ) ; } else if ( entityClass == TranslatedCSNode . class ) { // CONTENT SPEC TRANSLATED NODE
wrapper = new DBTranslatedCSNodeCollectionWrapper ( this , ( Collection < TranslatedCSNode > ) collection , isRevisionCollection ) ; } else if ( entityClass == TranslatedCSNodeString . class ) { // CONTENT SPEC TRANSLATED NODE STRING
wrapper = new DBTranslatedCSNodeStringCollectionWrapper ( this , ( Collection < TranslatedCSNodeString > ) collection , isRevisionCollection ) ; } else if ( entityClass == CSTranslationDetail . class ) { // CONTENT SPEC TRANSLATION DETAIL
wrapper = new DBCSTranslationDetailCollectionWrapper ( this , ( Collection < CSTranslationDetail > ) collection , isRevisionCollection ) ; } else if ( entityClass == UndefinedEntity . class ) { // UNDEFINED APPLICATION ENTITY
wrapper = new DBServerUndefinedEntityCollectionWrapper ( this , ( Collection < UndefinedEntity > ) collection , isRevisionCollection ) ; } else if ( entityClass == UndefinedSetting . class ) { // UNDEFINED APPLICATION SETTING
wrapper = new DBServerUndefinedSettingCollectionWrapper ( this , ( Collection < UndefinedSetting > ) collection , isRevisionCollection ) ; } else { throw new IllegalArgumentException ( "Failed to create a Collection Wrapper instance as there is no wrapper available for the Collection." ) ; } // Add the wrapper to the cache
wrapperCache . putCollection ( key , wrapper ) ; return wrapper ; |
public class DateTimeParserBucket { /** * Sorts elements [ 0 , high ) . Calling java . util . Arrays isn ' t always the right
* choice since it always creates an internal copy of the array , even if it
* doesn ' t need to . If the array slice is small enough , an insertion sort
* is chosen instead , but it doesn ' t need a copy !
* This method has a modified version of that insertion sort , except it
* doesn ' t create an unnecessary array copy . If high is over 10 , then
* java . util . Arrays is called , which will perform a merge sort , which is
* faster than insertion sort on large lists .
* The end result is much greater performance when computeMillis is called .
* Since the amount of saved fields is small , the insertion sort is a
* better choice . Additional performance is gained since there is no extra
* array allocation and copying . Also , the insertion sort here does not
* perform any casting operations . The version in java . util . Arrays performs
* casts within the insertion sort loop . */
private static void sort ( SavedField [ ] array , int high ) { } } | if ( high > 10 ) { Arrays . sort ( array , 0 , high ) ; } else { for ( int i = 0 ; i < high ; i ++ ) { for ( int j = i ; j > 0 && ( array [ j - 1 ] ) . compareTo ( array [ j ] ) > 0 ; j -- ) { SavedField t = array [ j ] ; array [ j ] = array [ j - 1 ] ; array [ j - 1 ] = t ; } } } |
public class DefaultSentryClientFactory { /** * Whether or not buffering is enabled .
* @ param dsn Sentry server DSN which may contain options .
* @ return Whether or not buffering is enabled . */
protected boolean getBufferEnabled ( Dsn dsn ) { } } | String bufferEnabled = Lookup . lookup ( BUFFER_ENABLED_OPTION , dsn ) ; if ( bufferEnabled != null ) { return Boolean . parseBoolean ( bufferEnabled ) ; } return BUFFER_ENABLED_DEFAULT ; |
public class Matrix3x2f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix3x2fc # normalizedPositiveX ( org . joml . Vector2f ) */
public Vector2f normalizedPositiveX ( Vector2f dir ) { } } | dir . x = m11 ; dir . y = - m01 ; return dir ; |
public class CapacityCommand { /** * Gets the formatted tier values of a worker .
* @ param map the map to get worker tier values from
* @ param workerName name of the worker
* @ return the formatted tier values of the input worker name */
private static String getWorkerFormattedTierValues ( Map < String , Map < String , String > > map , String workerName ) { } } | return map . values ( ) . stream ( ) . map ( ( tierMap ) -> ( String . format ( "%-14s" , tierMap . getOrDefault ( workerName , "-" ) ) ) ) . collect ( Collectors . joining ( "" ) ) ; |
public class DataSet { /** * Emits a DataSet using an { @ link OutputFormat } . This method adds a data sink to the program .
* Programs may have multiple data sinks . A DataSet may also have multiple consumers ( data sinks
* or transformations ) at the same time .
* @ param outputFormat The OutputFormat to process the DataSet .
* @ return The DataSink that processes the DataSet .
* @ see OutputFormat
* @ see DataSink */
public DataSink < T > output ( OutputFormat < T > outputFormat ) { } } | Preconditions . checkNotNull ( outputFormat ) ; // configure the type if needed
if ( outputFormat instanceof InputTypeConfigurable ) { ( ( InputTypeConfigurable ) outputFormat ) . setInputType ( getType ( ) , context . getConfig ( ) ) ; } DataSink < T > sink = new DataSink < > ( this , outputFormat , getType ( ) ) ; this . context . registerDataSink ( sink ) ; return sink ; |
public class PositionUtil { /** * Return the Excel / OO / LO address of a range , preceeded by the table name
* @ param row1 the first row
* @ param col1 the first col
* @ param row2 the last row
* @ param col2 the last col
* @ param table the table
* @ return the Excel / OO / LO address */
public String toRangeAddress ( final Table table , final int row1 , final int col1 , final int row2 , final int col2 ) { } } | return this . toCellAddress ( table , row1 , col1 ) + ":" + this . toCellAddress ( table , row2 , col2 ) ; |
public class FilteringScore { /** * Finds covering matches from the remainder . */
private List < Filter < S > > findCoveringMatches ( ) { } } | List < Filter < S > > coveringFilters = null ; boolean check = ! mRemainderFilters . isEmpty ( ) && ( mIdentityFilters . size ( ) > 0 || mRangeStartFilters . size ( ) > 0 || mRangeEndFilters . size ( ) > 0 ) ; if ( check ) { // Any remainder property which is provided by the index is a covering match .
for ( Filter < S > subFilter : mRemainderFilters ) { if ( isProvidedByIndex ( subFilter ) ) { if ( coveringFilters == null ) { coveringFilters = new ArrayList < Filter < S > > ( ) ; } coveringFilters . add ( subFilter ) ; } } } return prepareList ( coveringFilters ) ; |
public class Bytes { /** * Retrieve a < b > short < / b > from a byte array in a given byte order */
public static int toShort ( byte [ ] b , int off , boolean littleEndian ) { } } | if ( littleEndian ) { return ( ( b [ off ] & 0xFF ) | ( ( b [ off + 1 ] & 0xFF ) << 8 ) ) ; } return ( ( ( b [ off ] & 0xFF ) << 8 ) | ( b [ off + 1 ] & 0xFF ) ) ; |
public class Snappy { /** * Uncompress the input [ offset , offset + length ) as a String
* @ param input
* @ param offset
* @ param length
* @ return the uncompressed data
* @ throws IOException */
public static String uncompressString ( byte [ ] input , int offset , int length ) throws IOException { } } | try { return uncompressString ( input , offset , length , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( "UTF-8 decoder is not found" ) ; } |
public class IpAccessControlListReader { /** * Make the request to the Twilio API to perform the read .
* @ param client TwilioRestClient with which to make the request
* @ return IpAccessControlList ResourceSet */
@ Override public ResourceSet < IpAccessControlList > read ( final TwilioRestClient client ) { } } | return new ResourceSet < > ( this , client , firstPage ( client ) ) ; |
public class Process { /** * Finds one work transition for this process matching the specified parameters
* @ param fromId
* @ param eventType
* @ param completionCode
* @ return the work transition value object ( or null if not found ) */
public Transition getTransition ( Long fromId , Integer eventType , String completionCode ) { } } | Transition ret = null ; for ( Transition transition : getTransitions ( ) ) { if ( transition . getFromId ( ) . equals ( fromId ) && transition . match ( eventType , completionCode ) ) { if ( ret == null ) ret = transition ; else { throw new IllegalStateException ( "Multiple matching work transitions when one expected:\n" + " processId: " + getId ( ) + " fromId: " + fromId + " eventType: " + eventType + "compCode: " + completionCode ) ; } } } return ret ; |
public class AwsSecurityFindingFilters { /** * A finding ' s title .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTitle ( java . util . Collection ) } or { @ link # withTitle ( java . util . Collection ) } if you want to override the
* existing values .
* @ param title
* A finding ' s title .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withTitle ( StringFilter ... title ) { } } | if ( this . title == null ) { setTitle ( new java . util . ArrayList < StringFilter > ( title . length ) ) ; } for ( StringFilter ele : title ) { this . title . add ( ele ) ; } return this ; |
public class Link { /** * Connect a device to the link
* @ param device
* @ throws TooManyConnectionException */
public void connectDevice ( Device device ) throws ShanksException { } } | if ( this . linkedDevices . size ( ) < deviceCapacity ) { if ( ! this . linkedDevices . contains ( device ) ) { this . linkedDevices . add ( device ) ; device . connectToLink ( this ) ; logger . finer ( "Link " + this . getID ( ) + " has Device " + device . getID ( ) + " in its linked device list." ) ; } else { logger . finer ( "Link " + this . getID ( ) + " already has Device " + device . getID ( ) + " in its linked device list." ) ; } } else { if ( ! this . linkedDevices . contains ( device ) ) { logger . warning ( "Link " + this . getID ( ) + " is full of its capacity. Device " + device . getID ( ) + " was not included in its linked device list." ) ; throw new TooManyConnectionException ( this ) ; } else { logger . finer ( "Link " + this . getID ( ) + " already has Device " + device . getID ( ) + " in its linked device list." ) ; } } |
public class NumberFormat { /** * Sets the maximum number of digits allowed in the integer portion of a
* number . This must be & gt ; = minimumIntegerDigits . If the
* new value for maximumIntegerDigits is less than the current value
* of minimumIntegerDigits , then minimumIntegerDigits will also be set to
* the new value .
* @ param newValue the maximum number of integer digits to be shown ; if
* less than zero , then zero is used . Subclasses might enforce an
* upper limit to this value appropriate to the numeric type being formatted .
* @ see # getMaximumIntegerDigits */
public void setMaximumIntegerDigits ( int newValue ) { } } | maximumIntegerDigits = Math . max ( 0 , newValue ) ; if ( minimumIntegerDigits > maximumIntegerDigits ) minimumIntegerDigits = maximumIntegerDigits ; |
public class FreePool { /** * Return a mcWrapper from the free pool . */
protected MCWrapper getFreeConnection ( ManagedConnectionFactory managedConnectionFactory , Subject subject , ConnectionRequestInfo cri , int hashCode ) throws ResourceAllocationException { } } | final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "getFreeConnection" , gConfigProps . cfName ) ; } MCWrapper mcWrapper = null ; MCWrapper mcWrapperTemp1 = null ; MCWrapper mcWrapperTemp2 = null ; int mcwlSize = 0 ; int mcwlIndex = 0 ; synchronized ( freeConnectionLockObject ) { mcwlSize = mcWrapperList . size ( ) ; if ( mcwlSize > 0 ) { mcwlIndex = mcwlSize - 1 ; // Remove the first mcWrapper from the list ( Optimistic )
mcWrapperTemp1 = ( MCWrapper ) mcWrapperList . remove ( mcwlIndex ) ; mcWrapperTemp1 . setPoolState ( 0 ) ; } } /* * At this point we may have a mcWrapper and we can release the free pool lock . This is a
* very optimistic view of the world . We assume that the connection will match most of the time to
* have released the lock . */
if ( mcWrapperTemp1 != null ) { if ( hashCode == mcWrapperTemp1 . getSubjectCRIHashCode ( ) ) { mcWrapper = getMCWrapperFromMatch ( subject , cri , managedConnectionFactory , mcWrapperTemp1 ) ; if ( ( ( com . ibm . ejs . j2c . MCWrapper ) mcWrapperTemp1 ) . do_not_reuse_mcw ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Connection error occurred for this mcw " + mcWrapperTemp1 + ", mcw will not be reuse" ) ; } synchronized ( pm . waiterFreePoolLock ) { cleanupAndDestroyMCWrapper ( mcWrapperTemp1 ) ; synchronized ( freeConnectionLockObject ) { -- numberOfConnectionsAssignedToThisFreePool ; } pm . totalConnectionCount . decrementAndGet ( ) ; if ( ( pm . waiterCount > 0 ) && ( pm . waiterCount > pm . mcWrapperWaiterList . size ( ) ) ) { pm . waiterFreePoolLock . notify ( ) ; } } } } if ( mcWrapper == null ) { /* * In the 5.0.1 release , we had heavier locking which allowed a resource adapter
* to call connection error occurred during mc . matchManagedConnection . I am not
* 100 % sure if this worked correctly in all cases , but is going to make sure it
* works . Any call to getMCWrapperFromMatch with calls mc . matchManagedConnection must
* be in a synchronized pm . waterFreePoolLock if its in a synchronized freeConnectionLockObject .
* Adding the pm . waiterFreePoolLock . */
synchronized ( pm . waiterFreePoolLock ) { synchronized ( freeConnectionLockObject ) { // We need to look through the list , since we didn ' t find a matching connection at
// the end of the list , we need to use get and only remove if we find a matching
// connection .
mcwlSize = mcWrapperList . size ( ) ; if ( mcwlSize > 0 ) { // set this to 0 since mcWrappterTemp1 has already been removed .
mcwlIndex = mcwlSize - 1 ; for ( int i = mcwlIndex ; i >= 0 ; -- i ) { mcWrapperTemp2 = ( MCWrapper ) mcWrapperList . get ( i ) ; if ( hashCode == mcWrapperTemp2 . getSubjectCRIHashCode ( ) ) { mcWrapper = getMCWrapperFromMatch ( subject , cri , managedConnectionFactory , mcWrapperTemp2 ) ; if ( ( ( com . ibm . ejs . j2c . MCWrapper ) mcWrapperTemp2 ) . do_not_reuse_mcw ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Connection error occurred for this mcw " + mcWrapperTemp2 + ", mcw will not be reuse" ) ; } mcWrapperList . remove ( i ) ; cleanupAndDestroyMCWrapper ( mcWrapperTemp2 ) ; synchronized ( freeConnectionLockObject ) { -- numberOfConnectionsAssignedToThisFreePool ; } pm . totalConnectionCount . decrementAndGet ( ) ; if ( ( pm . waiterCount > 0 ) && ( pm . waiterCount > pm . mcWrapperWaiterList . size ( ) ) ) { pm . waiterFreePoolLock . notify ( ) ; } } } if ( mcWrapper != null ) { mcWrapperList . remove ( i ) ; mcWrapper . setPoolState ( 0 ) ; break ; } } } // end if mcwlSize > 1
} // end synchronized ( freeConnectionLockObject )
/* * We need to add the first non - matching mcWrapper back into the free pool or waiter queue . */
if ( ! ( ( com . ibm . ejs . j2c . MCWrapper ) mcWrapperTemp1 ) . do_not_reuse_mcw ) { // synchronized ( pm . waiterFreePoolLock ) {
// waiter code
if ( ( pm . waiterCount > 0 ) && ( pm . waiterCount > pm . mcWrapperWaiterList . size ( ) ) ) { // there are requests waiting , so notify one of them
pm . mcWrapperWaiterList . add ( mcWrapperTemp1 ) ; pm . waiterFreePoolLock . notify ( ) ; } else { synchronized ( freeConnectionLockObject ) { mcWrapperList . add ( mcWrapperTemp1 ) ; // Add to end of list
mcWrapperTemp1 . setPoolState ( 1 ) ; } } // } / / end synchronized ( waiterFreePoolLock )
} // end synchronized ( waiterFreePoolLock )
if ( ( isTracingEnabled && tc . isDebugEnabled ( ) ) ) { if ( mcWrapper != null ) { ++ fnop_gets ; } } } } else { /* * We found a connection the first try : - ) */
if ( ( isTracingEnabled && tc . isDebugEnabled ( ) ) ) { ++ fop_gets ; } } // end else
} if ( ( isTracingEnabled && tc . isDebugEnabled ( ) ) ) { if ( mcWrapper == null ) { ++ fnop_get_notfound ; } } if ( isTracingEnabled && tc . isEntryEnabled ( ) ) { if ( isTracingEnabled && tc . isDebugEnabled ( ) ) { if ( mcWrapper != null ) { Tr . debug ( this , tc , "Returning mcWrapper " + mcWrapper ) ; } else { Tr . debug ( this , tc , "MCWrapper was not found in Free Pool" ) ; } } Tr . exit ( this , tc , "getFreeConnection" ) ; } return mcWrapper ; |
public class Crossfade { /** * Sets the type of resizing behavior that will be used during the
* transition animation , one of { @ link # RESIZE _ BEHAVIOR _ NONE } and
* { @ link # RESIZE _ BEHAVIOR _ SCALE } .
* @ param resizeBehavior The type of resizing behavior to use when this
* transition is run . */
@ NonNull public Crossfade setResizeBehavior ( int resizeBehavior ) { } } | if ( resizeBehavior >= RESIZE_BEHAVIOR_NONE && resizeBehavior <= RESIZE_BEHAVIOR_SCALE ) { mResizeBehavior = resizeBehavior ; } return this ; |
public class ContextAwareReporter { /** * Stops the { @ link ContextAwareReporter } . If the { @ link ContextAwareReporter } has not been started , or if it has been
* stopped already , and not started since , this is a no - op . */
public final void stop ( ) { } } | if ( ! this . started ) { log . warn ( String . format ( "Reporter %s has already been stopped." , this . name ) ) ; return ; } try { stopImpl ( ) ; this . started = false ; } catch ( Exception exception ) { log . warn ( String . format ( "Reporter %s did not stop correctly." , this . name ) , exception ) ; } |
public class AbstractValueCountingAnalyzerResult { /** * Appends a string representation with a maximum amount of entries
* @ param sb
* the StringBuilder to append to
* @ param maxEntries
* @ return */
protected void appendToString ( StringBuilder sb , ValueCountingAnalyzerResult groupResult , int maxEntries ) { } } | if ( maxEntries != 0 ) { Collection < ValueFrequency > valueCounts = groupResult . getValueCounts ( ) ; for ( ValueFrequency valueCount : valueCounts ) { sb . append ( "\n - " ) ; sb . append ( valueCount . getName ( ) ) ; sb . append ( ": " ) ; sb . append ( valueCount . getCount ( ) ) ; maxEntries -- ; if ( maxEntries == 0 ) { sb . append ( "\n ..." ) ; break ; } } } |
public class SAXDriver { /** * ( temporarily ) package - visible for external entity decls */
String absolutize ( String baseURI , String systemId , boolean nice ) throws MalformedURLException , SAXException { } } | // FIXME normalize system IDs - - when ?
// - Convert to UTF - 8
// - Map reserved and non - ASCII characters to % HH
try { if ( baseURI == null ) { if ( XmlParser . uriWarnings ) { warn ( "No base URI; hope this SYSTEM id is absolute: " + systemId ) ; } return new URL ( systemId ) . toString ( ) ; } else { return new URL ( new URL ( baseURI ) , systemId ) . toString ( ) ; } } catch ( MalformedURLException e ) { // Let unknown URI schemes pass through unless we need
// the JVM to map them to i / o streams for us . . .
if ( ! nice ) { throw e ; } // sometimes sysids for notations or unparsed entities
// aren ' t really URIs . . .
warn ( "Can't absolutize SYSTEM id: " + e . getMessage ( ) ) ; return systemId ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.