idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
159,900
|
@ Override public MPSubscription getSubscription ( String subscriptionName ) throws SIDurableSubscriptionNotFoundException { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getSubscription" , subscriptionName ) ; HashMap durableSubs = _destinationManager . getDurableSubscriptionsTable ( ) ; ConsumerDispatcher cd = null ; synchronized ( durableSubs ) { //Look up the consumer dispatcher for this subId in the system durable subs list cd = ( ConsumerDispatcher ) durableSubs . get ( subscriptionName ) ; // Check that the durable subscription exists if ( cd == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getSubscription" , "Durable sub not found" ) ; throw new SIDurableSubscriptionNotFoundException ( nls . getFormattedMessage ( "SUBSCRIPTION_DOESNT_EXIST_ERROR_CWSIP0072" , new Object [ ] { subscriptionName , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; } } MPSubscription mpSubscription = cd . getMPSubscription ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getSubscription" , mpSubscription ) ; return mpSubscription ; }
|
Retrieve the MPSubscription object that represents the named durable subscription
| 325
| 14
|
159,901
|
@ Override public void deregisterConsumerSetMonitor ( ConsumerSetChangeCallback callback ) throws SINotPossibleInCurrentConfigurationException { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deregisterConsumerSetMonitor" , new Object [ ] { callback } ) ; _messageProcessor . getMessageProcessorMatching ( ) . deregisterConsumerSetMonitor ( this , callback ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deregisterConsumerSetMonitor" ) ; }
|
Deregisters a previously registered callback .
| 142
| 9
|
159,902
|
public Map getConnectionProperties ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getConnectionProperties" ) ; SibTr . exit ( tc , "getConnectionProperties" , _connectionProperties ) ; } return _connectionProperties ; }
|
Retrieve the properties associated with this connection .
| 76
| 9
|
159,903
|
public void setConnectionProperties ( Map connectionProperties ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getConnectionProperties" , connectionProperties ) ; SibTr . exit ( tc , "getConnectionProperties" ) ; } _connectionProperties = connectionProperties ; }
|
Set the properties associated with this connection . Supports Unittest environment .
| 82
| 14
|
159,904
|
private void stopChain ( String name , Event event ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Stop chain event; chain=" + name ) ; } ChannelFramework cf = ChannelFrameworkFactory . getChannelFramework ( ) ; try { if ( cf . isChainRunning ( name ) ) { // stop the chain now.. cf . stopChain ( name , 0L ) ; } } catch ( Exception e ) { FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "stopChain" , new Object [ ] { event , cf } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Error stopping chain; " + e ) ; } } }
|
Stop the explicit chain provided .
| 184
| 6
|
159,905
|
private static WebServiceRefPartialInfo buildPartialInfoFromWebServiceClient ( Class < ? > serviceInterfaceClass ) { WebServiceClient webServiceClient = serviceInterfaceClass . getAnnotation ( WebServiceClient . class ) ; if ( webServiceClient == null ) { return null ; } String className = serviceInterfaceClass . getName ( ) ; String wsdlLocation = webServiceClient . wsdlLocation ( ) ; QName serviceQName = null ; String localPart = webServiceClient . name ( ) ; if ( localPart != null ) { serviceQName = new QName ( webServiceClient . targetNamespace ( ) , localPart ) ; } String handlerChainDeclaringClassName = null ; javax . jws . HandlerChain handlerChainAnnotation = serviceInterfaceClass . getAnnotation ( javax . jws . HandlerChain . class ) ; if ( handlerChainAnnotation != null ) handlerChainDeclaringClassName = serviceInterfaceClass . getName ( ) ; WebServiceRefPartialInfo partialInfo = new WebServiceRefPartialInfo ( className , wsdlLocation , serviceQName , null , handlerChainDeclaringClassName , handlerChainAnnotation ) ; return partialInfo ; }
|
This method will build a ServiceRefPartialInfo object from a class with an
| 261
| 16
|
159,906
|
public Map < ConfigID , List < T > > collectElementsById ( Map < ConfigID , List < T > > map , String defaultId , String pid ) { if ( map == null ) { map = new HashMap < ConfigID , List < T > > ( ) ; } int index = 0 ; for ( T configElement : configElements ) { String id = configElement . getId ( ) ; if ( id == null ) { if ( defaultId != null ) { id = defaultId ; } else { id = generateId ( index ++ ) ; } } // Create a new config ID based on the old one, but using the generated ID if necessary ConfigID configID = configElement . getConfigID ( ) ; configID = new ConfigID ( configID . getParent ( ) , pid , id , configID . getChildAttribute ( ) ) ; List < T > elements = map . get ( configID ) ; if ( elements == null ) { elements = new ArrayList < T > ( ) ; map . put ( configID , elements ) ; } elements . add ( configElement ) ; } return map ; }
|
Collects elements into Lists based on their ID . If an ID is not specified the defaultId will be used . If the defaultId is null an id will be generated .
| 240
| 35
|
159,907
|
public static long readLong ( byte b [ ] , int offset ) { long retValue ; retValue = ( ( long ) b [ offset ++ ] ) << 56 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 48 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 40 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 32 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 24 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 16 ; retValue |= ( ( long ) b [ offset ++ ] & 0xff ) << 8 ; retValue |= ( long ) b [ offset ] & 0xff ; return retValue ; }
|
Unserializes a long from a byte array at a specific offset in big - endian order
| 175
| 19
|
159,908
|
public static void writeLong ( byte b [ ] , int offset , long value ) { b [ offset ++ ] = ( byte ) ( value >>> 56 ) ; b [ offset ++ ] = ( byte ) ( value >>> 48 ) ; b [ offset ++ ] = ( byte ) ( value >>> 40 ) ; b [ offset ++ ] = ( byte ) ( value >>> 32 ) ; b [ offset ++ ] = ( byte ) ( value >>> 24 ) ; b [ offset ++ ] = ( byte ) ( value >>> 16 ) ; b [ offset ++ ] = ( byte ) ( value >>> 8 ) ; b [ offset ] = ( byte ) value ; }
|
Serializes a long into a byte array at a specific offset in big - endian order
| 134
| 18
|
159,909
|
public static int readInt ( byte b [ ] , int offset ) { int retValue ; retValue = ( ( int ) b [ offset ++ ] ) << 24 ; retValue |= ( ( int ) b [ offset ++ ] & 0xff ) << 16 ; retValue |= ( ( int ) b [ offset ++ ] & 0xff ) << 8 ; retValue |= ( int ) b [ offset ] & 0xff ; return retValue ; }
|
Unserializes an int from a byte array at a specific offset in big - endian order
| 95
| 19
|
159,910
|
public static void writeInt ( byte [ ] b , int offset , int value ) { b [ offset ++ ] = ( byte ) ( value >>> 24 ) ; b [ offset ++ ] = ( byte ) ( value >>> 16 ) ; b [ offset ++ ] = ( byte ) ( value >>> 8 ) ; b [ offset ] = ( byte ) value ; }
|
Serializes an int into a byte array at a specific offset in big - endian order
| 74
| 18
|
159,911
|
public static short readShort ( byte b [ ] , int offset ) { int retValue ; retValue = b [ offset ++ ] << 8 ; retValue |= b [ offset ] & 0xff ; return ( short ) retValue ; }
|
Unserializes a short from a byte array at a specific offset in big - endian order
| 50
| 19
|
159,912
|
public static void writeShort ( byte b [ ] , int offset , short value ) { b [ offset ++ ] = ( byte ) ( value >>> 8 ) ; b [ offset ] = ( byte ) value ; }
|
Serializes a short into a byte array at a specific offset in big - endian order
| 44
| 18
|
159,913
|
public static void establishSSLContext ( HttpClient client , int port , LibertyServer server ) { establishSSLContext ( client , port , server , null , null , null , null , "TLSv1.2" ) ; }
|
Adds an SSL context to the HttpClient . No trust or client certificate is established and a trust - all policy is assumed .
| 49
| 26
|
159,914
|
public final void fireEvent ( EventObject evt , EventListenerV visitor ) { EventListener [ ] list = getListenerArray ( ) ; for ( int i = 0 ; i < list . length ; i ++ ) { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "fireEvent" , "Use visitor " + visitor + " to fire event to " + list [ i ] + ", class:" + list [ i ] . getClass ( ) ) ; visitor . fireEvent ( evt , list [ i ] ) ; } }
|
Fire the event to all listeners by allowing the visitor to visit each listener . The visitor is responsible for implementing the actual firing of the event to each listener .
| 152
| 31
|
159,915
|
public final synchronized void addListener ( EventListener l ) { if ( l == null ) { throw new IllegalArgumentException ( "Listener " + l + " is null" ) ; } if ( listeners == EMPTY_LISTENERS ) { listeners = new EventListener [ 1 ] ; listeners [ 0 ] = l ; } else { int i = listeners . length ; EventListener [ ] tmp = new EventListener [ i + 1 ] ; System . arraycopy ( listeners , 0 , tmp , 0 , i ) ; tmp [ i ] = l ; listeners = tmp ; } }
|
Add the listener as a listener to the list .
| 121
| 10
|
159,916
|
public final synchronized void removeListener ( EventListener l ) { if ( l == null ) { throw new IllegalArgumentException ( "Listener " + l + " is null" ) ; } // Is l on the list? int index = - 1 ; for ( int i = listeners . length - 1 ; i >= 0 ; i -- ) { if ( listeners [ i ] . equals ( l ) == true ) { index = i ; break ; } } // If so, remove it if ( index != - 1 ) { EventListener [ ] tmp = new EventListener [ listeners . length - 1 ] ; // Copy the list up to index System . arraycopy ( listeners , 0 , tmp , 0 , index ) ; // Copy from two past the index, up to // the end of tmp (which is two elements // shorter than the old list) if ( index < tmp . length ) System . arraycopy ( listeners , index + 1 , tmp , index , tmp . length - index ) ; // set the listener array to the new array or null listeners = ( tmp . length == 0 ) ? EMPTY_LISTENERS : tmp ; } }
|
Remove the listener .
| 237
| 4
|
159,917
|
private ClassLoader getClassLoaderForInterfaces ( final ClassLoader loader , final Class < ? > [ ] interfaces ) { if ( canSeeAllInterfaces ( loader , interfaces ) ) { LOG . log ( Level . FINE , "current classloader " + loader + " can see all interface" ) ; return loader ; } String sortedNameFromInterfaceArray = getSortedNameFromInterfaceArray ( interfaces ) ; ClassLoader cachedLoader = proxyClassLoaderCache . getProxyClassLoader ( loader , interfaces ) ; if ( canSeeAllInterfaces ( cachedLoader , interfaces ) ) { LOG . log ( Level . FINE , "find required loader from ProxyClassLoader cache with key" + sortedNameFromInterfaceArray ) ; return cachedLoader ; } else { LOG . log ( Level . FINE , "find a loader from ProxyClassLoader cache with interfaces " + sortedNameFromInterfaceArray + " but can't see all interfaces" ) ; for ( Class < ? > currentInterface : interfaces ) { String ifName = currentInterface . getName ( ) ; if ( ! ifName . startsWith ( "org.apache.cxf" ) && ! ifName . startsWith ( "java" ) ) { // remove the stale ProxyClassLoader and recreate one proxyClassLoaderCache . removeStaleProxyClassLoader ( currentInterface ) ; cachedLoader = proxyClassLoaderCache . getProxyClassLoader ( loader , interfaces ) ; } } } return cachedLoader ; }
|
Return a classloader that can see all the given interfaces If the given loader can see all interfaces then it is used . If not then a combined classloader of all interface classloaders is returned .
| 303
| 40
|
159,918
|
private Application getMyfacesApplicationInstance ( ) { FacesContext facesContext = FacesContext . getCurrentInstance ( ) ; if ( facesContext != null ) { ExternalContext externalContext = facesContext . getExternalContext ( ) ; if ( externalContext != null ) { return ( Application ) externalContext . getApplicationMap ( ) . get ( "org.apache.myfaces.application.ApplicationImpl" ) ; } } return null ; }
|
Retrieve the current Myfaces Application Instance lookup on the application map . All methods introduced on jsf 1 . 2 for Application interface should thrown by default UnsupportedOperationException but the ri scan and find the original Application impl and redirect the call to that method instead throwing it allowing application implementations created before jsf 1 . 2 continue working .
| 90
| 69
|
159,919
|
protected ClassLoader buildClassLoader ( final List < URL > urlList , String verifyJarProperty ) { if ( libertyBoot ) { // for liberty boot we just use the class loader that loaded this class return AccessController . doPrivileged ( new PrivilegedAction < ClassLoader > ( ) { @ Override public ClassLoader run ( ) { return getClass ( ) . getClassLoader ( ) ; } } ) ; } final boolean verifyJar ; if ( System . getSecurityManager ( ) == null ) { // do not perform verification if SecurityManager is not installed // unless explicitly enabled. verifyJar = "true" . equalsIgnoreCase ( verifyJarProperty ) ; } else { // always perform verification if SecurityManager is installed. verifyJar = true ; } enableJava2SecurityIfSet ( this . bootProps , urlList ) ; ClassLoader loader = AccessController . doPrivileged ( new PrivilegedAction < ClassLoader > ( ) { @ Override public ClassLoader run ( ) { ClassLoader parent = getClass ( ) . getClassLoader ( ) ; URL [ ] urls = urlList . toArray ( new URL [ urlList . size ( ) ] ) ; if ( verifyJar ) { return new BootstrapChildFirstURLClassloader ( urls , parent ) ; } else { try { return new BootstrapChildFirstJarClassloader ( urls , parent ) ; } catch ( RuntimeException e ) { // fall back to URLClassLoader in case something went wrong return new BootstrapChildFirstURLClassloader ( urls , parent ) ; } } } } ) ; return loader ; }
|
Build the nested classloader containing the OSGi framework and the log provider .
| 334
| 15
|
159,920
|
public static void enableJava2SecurityIfSet ( BootstrapConfig bootProps , List < URL > urlList ) { if ( bootProps . get ( BootstrapConstants . JAVA_2_SECURITY_PROPERTY ) != null ) { NameBasedLocalBundleRepository repo = new NameBasedLocalBundleRepository ( bootProps . getInstallRoot ( ) ) ; urlList . add ( getJarFileFromBundleName ( repo , "com.ibm.ws.org.eclipse.equinox.region" , "[1.0,1.0.100)" ) ) ; // the following three jar files are for serialfilter which are loaded by URLClassloader by bootstrap agent. urlList . add ( getJarFileFromBundleName ( repo , "com.ibm.ws.kernel.instrument.serialfilter" , "[1.0,1.0.100)" ) ) ; addJarFileIfExist ( urlList , bootProps . getInstallRoot ( ) + "/bin/tools/ws-javaagent.jar" ) ; addJarFileIfExist ( urlList , bootProps . getInstallRoot ( ) + "/lib/bootstrap-agent.jar" ) ; Policy wlpPolicy = new WLPDynamicPolicy ( Policy . getPolicy ( ) , urlList ) ; Policy . setPolicy ( wlpPolicy ) ; } }
|
Set Java 2 Security if enabled
| 303
| 6
|
159,921
|
protected static String getProductInfoDisplayName ( ) { String result = null ; try { Map < String , ProductInfo > products = ProductInfo . getAllProductInfo ( ) ; StringBuilder builder = new StringBuilder ( ) ; for ( ProductInfo productInfo : products . values ( ) ) { ProductInfo replaced = productInfo . getReplacedBy ( ) ; if ( productInfo . getReplacedBy ( ) == null || replaced . isReplacedProductLogged ( ) ) { if ( builder . length ( ) != 0 ) { builder . append ( ", " ) ; } builder . append ( productInfo . getDisplayName ( ) ) ; } } result = builder . toString ( ) ; } catch ( ProductInfoParseException e ) { // ignore exceptions-- best effort to get a pretty string } catch ( DuplicateProductInfoException e ) { // ignore exceptions-- best effort to get a pretty string } catch ( ProductInfoReplaceException e ) { // ignore exceptions-- best effort to get a pretty string } return result ; }
|
Return a display name for the currently running server .
| 217
| 10
|
159,922
|
protected Instrumentation getInstrumentation ( ) { ClassLoader cl = ClassLoader . getSystemClassLoader ( ) ; Instrumentation i = findInstrumentation ( cl , "com.ibm.ws.kernel.instrument.BootstrapAgent" ) ; if ( i == null ) i = findInstrumentation ( cl , "wlp.lib.extract.agent.BootstrapAgent" ) ; return i ; }
|
Fetch the BootstrapAgent instrumentation instance from the BootstrapAgent in the system classloader .
| 90
| 20
|
159,923
|
public JspConfiguration createClonedJspConfiguration ( ) { return new JspConfiguration ( configManager , this . getServletVersion ( ) , this . jspVersion , this . isXml , this . isXmlSpecified , this . elIgnored , this . scriptingInvalid ( ) , this . isTrimDirectiveWhitespaces ( ) , this . isDeferredSyntaxAllowedAsLiteral ( ) , this . getTrimDirectiveWhitespaces ( ) , this . getDeferredSyntaxAllowedAsLiteral ( ) , this . elIgnoredSetTrueInPropGrp ( ) , this . elIgnoredSetTrueInPage ( ) , this . getDefaultContentType ( ) , this . getBuffer ( ) , this . isErrorOnUndeclaredNamespace ( ) ) ; }
|
This method is used for creating a configuration for a tag file . The tag file may want to override some properties if it s jsp version in the tld is different than the server version
| 180
| 38
|
159,924
|
public ExpressionFactory getExpressionFactory ( ) { // lazy init here, sync to avoid race condition synchronized ( this ) { if ( expressionFactory == null ) { expressionFactory = ExpressionFactory . newInstance ( ) ; } } //allow JCDI to wrap our expression factory so they can clean up objects after the expressions are through if ( configManager . isJCDIEnabled ( ) ) { if ( jcdiWrappedExpressionFactory == null ) { //wrap expressionFactory ELFactoryWrapperForCDI wrapperExpressionFactory = JSPExtensionFactory . getWrapperExpressionFactory ( ) ; if ( wrapperExpressionFactory != null ) { jcdiWrappedExpressionFactory = ( ExpressionFactory ) wrapperExpressionFactory ; return jcdiWrappedExpressionFactory ; } // this code is left here for historic purposes, not because we will want to do this in this way anymore //jcdiWrappedExpressionFactory = JspShim.createWrappedExpressionFactory(expressionFactory); } else { return jcdiWrappedExpressionFactory ; } } return expressionFactory ; }
|
LIDB4147 - 9 Begin
| 232
| 8
|
159,925
|
protected ClassInfoImpl getDelayableClassInfo ( Type type ) { String typeClassName = type . getClassName ( ) ; if ( tc . isDebugEnabled ( ) ) { // Type 'toString' answers the descriptor; // show the type class name, too, for clarity. Tr . debug ( tc , MessageFormat . format ( "[ {0} ] ENTER [ {1} ] [ {2} ]" , new Object [ ] { getHashText ( ) , type , typeClassName } ) ) ; } // Now that we have the type class name, pass it along instead // of recomputing when needed. ClassInfoImpl classInfo ; String classInfoCase ; int sort = type . getSort ( ) ; if ( sort == Type . ARRAY ) { classInfo = getArrayClassInfo ( typeClassName , type ) ; classInfoCase = "array class" ; } else if ( sort == Type . OBJECT ) { classInfo = getDelayableClassInfo ( typeClassName , DO_NOT_ALLOW_PRIMITIVE ) ; if ( classInfo . isJavaClass ( ) ) { if ( classInfo . isDelayedClass ( ) ) { classInfoCase = "java delayed" ; } else { classInfoCase = "java non-delayed" ; } } else { if ( classInfo . isDelayedClass ( ) ) { classInfoCase = "non-java delayed" ; } else { classInfoCase = "non-java non-delayed" ; } } } else { classInfo = getPrimitiveClassInfo ( typeClassName , type ) ; classInfoCase = "primitive class" ; } if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] RETURN [ {1} ] [ {2} ]" , new Object [ ] { getHashText ( ) , classInfo . getHashText ( ) , classInfoCase } ) ) ; } return classInfo ; }
|
For array types the previous implementation used the element name .
| 427
| 11
|
159,926
|
public ArrayClassInfo getArrayClassInfo ( String typeClassName , Type arrayType ) { ClassInfoImpl elementClassInfo = getDelayableClassInfo ( arrayType . getElementType ( ) ) ; return new ArrayClassInfo ( typeClassName , elementClassInfo ) ; }
|
Note that this will recurse as long as the element type is still an array type .
| 59
| 18
|
159,927
|
protected boolean addClassInfo ( NonDelayedClassInfo classInfo ) { boolean didAdd ; if ( classInfo . isJavaClass ( ) ) { didAdd = basicPutJavaClassInfo ( classInfo ) ; } else if ( classInfo . isAnnotationPresent ( ) || classInfo . isFieldAnnotationPresent ( ) || classInfo . isMethodAnnotationPresent ( ) ) { didAdd = basicPutAnnotatedClassInfo ( classInfo ) ; } else { didAdd = basicPutClassInfo ( classInfo ) ; // Note: 'addAsFirst' must only be performed for non-java, non-annotated // classes. Both java and annotated classes are put in separate // storage which is never swapped. Non-java, non-annotated classes // are swappable, and are maintained in a last-access ordered // linked list. // // The current addition counts as an access, meaning, the class // info is placed into the last-access list as the first element. if ( didAdd ) { addAsFirst ( classInfo ) ; } } if ( didAdd ) { ClassInfoImpl delayedClassInfo = associate ( classInfo ) ; discardRef ( delayedClassInfo ) ; // No current use for the return value; discard it. } return didAdd ; }
|
Do update the LRU state .
| 270
| 7
|
159,928
|
protected void addAsFirst ( NonDelayedClassInfo classInfo ) { String methodName = "addAsFirst" ; boolean doLog = tc . isDebugEnabled ( ) ; String useHashText = ( doLog ? getHashText ( ) : null ) ; String useClassHashText = ( doLog ? classInfo . getHashText ( ) : null ) ; if ( doLog ) { logLinks ( methodName , classInfo ) ; } if ( firstClassInfo == null ) { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Adding [ {1} ] to empty" , new Object [ ] { useHashText , useClassHashText } ) ) ; } firstClassInfo = classInfo ; lastClassInfo = classInfo ; // last == first // lastClassInfoName == firstClassInfoName // first.next remains null // first.prev remains null } else if ( firstClassInfo == lastClassInfo ) { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Adding [ {1} ] to singleton [ {2} ]" , new Object [ ] { useHashText , useClassHashText , firstClassInfo . getHashText ( ) } ) ) ; } firstClassInfo = classInfo ; firstClassInfo . setNextClassInfo ( lastClassInfo ) ; lastClassInfo . setPriorClassInfo ( classInfo ) ; // last != first // lastClassInfoName != firstClassInfoName // first.prev == null // first.next == last // last.prev == first // last.next == null } else { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Adding [ {1} ] to multitude [ {2} ]" , new Object [ ] { useHashText , useClassHashText , firstClassInfo . getHashText ( ) } ) ) ; } classInfo . setNextClassInfo ( firstClassInfo ) ; firstClassInfo . setPriorClassInfo ( classInfo ) ; firstClassInfo = classInfo ; if ( classInfos . size ( ) > ClassInfoCache . classInfoCacheLimit ) { NonDelayedClassInfo oldLastClassInfo = lastClassInfo ; String lastClassName = lastClassInfo . getName ( ) ; classInfos . remove ( lastClassName ) ; discardRef ( lastClassName ) ; // No current use for the old last class name; discard it. lastClassInfo = oldLastClassInfo . getPriorClassInfo ( ) ; lastClassInfo . setNextClassInfo ( null ) ; // oldLastClassInfo.setNextClassInfo(null); oldLastClassInfo . setPriorClassInfo ( null ) ; if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] new last [ {1} ] displaces [ {2} ]" , new Object [ ] { useHashText , lastClassInfo . getHashText ( ) , oldLastClassInfo . getHashText ( ) } ) ) ; } DelayedClassInfo delayedClassInfo = oldLastClassInfo . getDelayedClassInfo ( ) ; if ( delayedClassInfo != null ) { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Clearing link on displaced [ {1} ]" , new Object [ ] { useHashText , oldLastClassInfo . getHashText ( ) } ) ) ; } delayedClassInfo . setClassInfo ( null ) ; oldLastClassInfo . setDelayedClassInfo ( null ) ; } } } }
|
put us over the maximum size trim off the last element .
| 769
| 12
|
159,929
|
public void makeFirst ( NonDelayedClassInfo classInfo ) { String methodName = "makeFirst" ; boolean doLog = tc . isDebugEnabled ( ) ; String useHashText = ( doLog ? getHashText ( ) : null ) ; String useClassHashText = ( doLog ? classInfo . getHashText ( ) : null ) ; if ( doLog ) { logLinks ( methodName , classInfo ) ; } if ( classInfo == firstClassInfo ) { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Already first [ {1} ]" , new Object [ ] { useHashText , useClassHashText } ) ) ; } return ; } else if ( classInfo == lastClassInfo ) { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Moving from last [ {1} ]" , new Object [ ] { useHashText , useClassHashText } ) ) ; Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Old first [ {1} ]" , new Object [ ] { useHashText , firstClassInfo . getHashText ( ) } ) ) ; } lastClassInfo = classInfo . getPriorClassInfo ( ) ; lastClassInfo . setNextClassInfo ( null ) ; if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] New last [ {1} ]" , new Object [ ] { useHashText , lastClassInfo . getHashText ( ) } ) ) ; } firstClassInfo . setPriorClassInfo ( classInfo ) ; classInfo . setPriorClassInfo ( null ) ; classInfo . setNextClassInfo ( firstClassInfo ) ; firstClassInfo = classInfo ; } else { if ( doLog ) { Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Moving from middle [ {1} ]" , new Object [ ] { useHashText , useClassHashText } ) ) ; Tr . debug ( tc , MessageFormat . format ( "[ {0} ] Old first [ {1} ]" , new Object [ ] { useHashText , firstClassInfo . getHashText ( ) } ) ) ; } NonDelayedClassInfo currentPrior = classInfo . getPriorClassInfo ( ) ; NonDelayedClassInfo currentNext = classInfo . getNextClassInfo ( ) ; currentPrior . setNextClassInfo ( currentNext ) ; currentNext . setPriorClassInfo ( currentPrior ) ; firstClassInfo . setPriorClassInfo ( classInfo ) ; classInfo . setNextClassInfo ( firstClassInfo ) ; classInfo . setPriorClassInfo ( null ) ; firstClassInfo = classInfo ; } }
|
The class info is last or the class info is somewhere in the middle .
| 592
| 15
|
159,930
|
@ Override @ FFDCIgnore ( MalformedURLException . class ) public URL getResource ( ) { String useRelPath = getRelativePath ( ) ; if ( ( zipEntryData == null ) || zipEntryData . isDirectory ( ) ) { useRelPath += "/" ; } URI entryUri = rootContainer . createEntryUri ( useRelPath ) ; if ( entryUri == null ) { return null ; } try { return entryUri . toURL ( ) ; // throws MalformedURLException } catch ( MalformedURLException e ) { // In some cases an attempt is made to get a resource using the wsjar protocol // after the protocol has been deregistered. It would be too much of a behavior change // to properly enforce the dependency on the wsjar protocol for all components. // Instead, only log a debug statement if a MalformedURLException is caught during // shutdown. if ( FrameworkState . isStopping ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "MalformedURLException during OSGi framework stop." , e . getMessage ( ) ) ; } else { FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "269" ) ; } } return null ; } }
|
Answer the URL of this entry .
| 298
| 7
|
159,931
|
@ Override public InputStream getInputStream ( ) throws IOException { if ( ( zipEntryData == null ) || zipEntryData . isDirectory ( ) ) { return null ; } final ZipFileHandle zipFileHandle = rootContainer . getZipFileHandle ( ) ; // throws IOException ZipFile zipFile = zipFileHandle . open ( ) ; // The open must have a balancing close. That should be done by the caller. // In the worst case, 'finalize' makes sure it happens. final InputStream baseInputStream ; try { baseInputStream = zipFileHandle . getInputStream ( zipFile , zipEntryData . getPath ( ) ) ; // throws IOException } catch ( Throwable th ) { // Need to close here, since the caller never receives a wrapped // input stream to close. zipFileHandle . close ( ) ; throw th ; } if ( baseInputStream == null ) { throw new FileNotFoundException ( "Zip file [ " + zipFile . getName ( ) + " ]" + " failed to provide input stream for entry [ " + zipEntryData . getPath ( ) + " ]" ) ; } InputStream inputStream = new InputStream ( ) { private final InputStream wrappedInputStream = baseInputStream ; // Object lifecycle ... @ Override public synchronized void finalize ( ) throws Throwable { close ( ) ; // throws IOException super . finalize ( ) ; // throws Throwable } // Close ... private volatile boolean isClosed ; @ Override public void close ( ) throws IOException { if ( ! isClosed ) { synchronized ( this ) { if ( ! isClosed ) { try { wrappedInputStream . close ( ) ; // throws IOException } catch ( IOException e ) { // FFDC } zipFileHandle . close ( ) ; isClosed = true ; } } } } // Delegate methods ... @ Trivial @ Override public int read ( byte [ ] b ) throws IOException { return wrappedInputStream . read ( b ) ; } @ Trivial @ Override public int read ( byte [ ] b , int off , int len ) throws IOException { return wrappedInputStream . read ( b , off , len ) ; // throws IOException } @ Trivial @ Override public long skip ( long n ) throws IOException { return wrappedInputStream . skip ( n ) ; // throws IOException } @ Trivial @ Override public int available ( ) throws IOException { return wrappedInputStream . available ( ) ; // throws IOException } @ SuppressWarnings ( "sync-override" ) @ Trivial @ Override public void mark ( int readlimit ) { wrappedInputStream . mark ( readlimit ) ; } @ SuppressWarnings ( "sync-override" ) @ Trivial @ Override public void reset ( ) throws IOException { wrappedInputStream . reset ( ) ; // throws IOException } @ Trivial @ Override public boolean markSupported ( ) { return wrappedInputStream . markSupported ( ) ; } @ Override public int read ( ) throws IOException { return wrappedInputStream . read ( ) ; // throws IOException } } ; return inputStream ; }
|
Obtain an input stream for the entry .
| 678
| 9
|
159,932
|
@ Override public ArtifactContainer getEnclosingContainer ( ) { // The enclosing container may be set when the entry is // created, in which case the enclosing container lock is null // and is never needed. // // The entry can be created in these ways: // // ZipFileContainer.createEntry(ArtifactContainer, String, String, String, int, ZipEntry) // -- A caching factory method of zip file container entries. // -- Caches intermediate entries. Non-container leaf entries are not cached. // // That is invoked in several ways: // // By: // ZipFileContainer.createEntry(String, String, String) // Which is invoked by: // ZipFileEntry.getEnclosingContainer() // -- Used when the enclosing container is not set when the entry was // created. This happens when the entry was created with a null // enclosing container, which only happens when the entry is created // from 'ZipFileContainer.getEntry'. // -- This is the core non-trivial step of resolving the enclosing container. // -- As a first step, if the parent is the root zip container, that is // obtained as the enclosing container. // -- As a second step, the enclosing entry of this entry is obtained, then // the enclosing container is obtained by interpreting that entry as a // container. // -- The enclosing container must be obtained from the enclosing entry // since those are cached and re-used, and since the reference to those // keep a reference to their interpreted container. // // By zip container iterators: // // -- com.ibm.ws.artifact.zip.internal.ZipFileEntry.getEnclosingContainer() // ZipFileContainer.RootZipFileEntryIterator.next() // -- always provides the root zip container as the enclosing container // ZipFileNestedDirContainer.NestedZipFileEntryIterator.next() // -- always provides the nested zip container as the enclosing container // // ZipFileContainer.getEntry(String, boolean) // -- always provides null as the enclosing container // // As a public API, 'getEnclosingContainer' may be invoked externally. // Locally, 'getEnclosingContainer' is only invoked from: // ZipFileEntry.convertToContainer(boolean) // That is also a public API. // Locally, that is only invoked from: // ZipFileEntry.convertToContainer(boolean) // That is also a public API. if ( enclosingContainer == null ) { synchronized ( this ) { // Having a new object to guard 'enclosingContainer' is too many objects. if ( enclosingContainer == null ) { String a_enclosingPath = PathUtils . getParent ( a_path ) ; int parentLen = a_enclosingPath . length ( ) ; if ( parentLen == 1 ) { // a_enclosingPath == "/" enclosingContainer = rootContainer ; } else { String r_enclosingPath = a_enclosingPath . substring ( 1 ) ; int lastSlash = r_enclosingPath . lastIndexOf ( ' ' ) ; String enclosingName ; if ( lastSlash == - 1 ) { enclosingName = r_enclosingPath ; // r_enclosingPath = "name" } else { enclosingName = r_enclosingPath . substring ( lastSlash + 1 ) ; // r_enclosingPath = "parent/child/name" } ZipFileEntry entryInEnclosingContainer = rootContainer . createEntry ( enclosingName , a_enclosingPath ) ; enclosingContainer = entryInEnclosingContainer . convertToLocalContainer ( ) ; } } } } return enclosingContainer ; }
|
Answer the enclosing container of this entry .
| 800
| 9
|
159,933
|
public char normalize ( char currentChar ) { if ( NORMALIZE_UPPER == getNormalization ( ) ) { return toUpper ( currentChar ) ; } if ( NORMALIZE_LOWER == getNormalization ( ) ) { return toLower ( currentChar ) ; } return currentChar ; }
|
Take the input character and normalize based on this normalizer instance .
| 66
| 14
|
159,934
|
static public char normalize ( char input , int format ) { if ( NORMALIZE_LOWER == format ) { return toLower ( input ) ; } if ( NORMALIZE_UPPER == format ) { return toUpper ( input ) ; } return input ; }
|
Take the input character and normalize based on the input format .
| 58
| 13
|
159,935
|
public void init ( IFilterConfig filterConfig ) throws ServletException { try { // init the filter instance _filterState = FILTER_STATE_INITIALIZING ; // LIDB-3598: begin this . _filterConfig = filterConfig ; if ( _eventSource != null && _eventSource . hasFilterListeners ( ) ) { _eventSource . onFilterStartInit ( getFilterEvent ( ) ) ; // LIDB-3598: end _filterInstance . init ( filterConfig ) ; // LIDB-3598: begin _eventSource . onFilterFinishInit ( getFilterEvent ( ) ) ; // LIDB-3598: end } else { _filterInstance . init ( filterConfig ) ; } _filterState = FILTER_STATE_AVAILABLE ; } catch ( Throwable th ) { if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( th ) ; _eventSource . onFilterInitError ( errorEvent ) ; } com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( th , "com.ibm.ws.webcontainer.filter.FilterInstanceWrapper.init" , "111" , this ) ; _filterState = FILTER_STATE_UNAVAILABLE ; throw new ServletException ( MessageFormat . format ( "Filter [{0}]: could not be initialized" , new Object [ ] { _filterName } ) , th ) ; } }
|
Initializes the filter wrapper and the underlying filter instance
| 336
| 10
|
159,936
|
public void destroy ( ) throws ServletException { try { // destroy the filter instance _filterState = FILTER_STATE_DESTROYING ; for ( int i = 0 ; ( nServicing . get ( ) > 0 ) && i < 60 ; i ++ ) { try { if ( i == 0 ) { logger . logp ( Level . INFO , CLASS_NAME , "destroy" , "waiting.to.destroy.filter.[{0}]" , _filterName ) ; } Thread . sleep ( 1000 ) ; } catch ( InterruptedException e ) { com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( e , "com.ibm.ws.webcontainer.servlet.ServletInstance.destroy" , "377" , this ) ; } } if ( _eventSource != null && _eventSource . hasFilterListeners ( ) ) { // LIDB-3598: begin _eventSource . onFilterStartDestroy ( getFilterEvent ( ) ) ; // LIDB-3598: end _filterInstance . destroy ( ) ; // LIDB-3598: begin _eventSource . onFilterFinishDestroy ( getFilterEvent ( ) ) ; // LIDB-3598: end } else { _filterInstance . destroy ( ) ; } _filterState = FILTER_STATE_DESTROYED ; if ( null != _managedObject ) { _managedObject . release ( ) ; } } catch ( Throwable th ) { if ( _eventSource != null && _eventSource . hasFilterErrorListeners ( ) ) { FilterErrorEvent errorEvent = getFilterErrorEvent ( th ) ; _eventSource . onFilterDestroyError ( errorEvent ) ; } com . ibm . wsspi . webcontainer . util . FFDCWrapper . processException ( th , "com.ibm.ws.webcontainer.filter.FilterInstanceWrapper.destroy" , "173" , this ) ; _filterState = FILTER_STATE_UNAVAILABLE ; throw new ServletException ( MessageFormat . format ( "Filter [{0}]: could not be destroyed" , new Object [ ] { _filterName } ) , th ) ; } }
|
Destroys the filter wrapper and the underlying filter instance
| 479
| 11
|
159,937
|
protected synchronized void activate ( ComponentContext cc ) { pipelineRef . activate ( cc ) ; securityServiceRef . activate ( cc ) ; insertJMXSecurityFilter ( ) ; }
|
Insert the JMX security filter upon activation . This will only happen if we have both the MBeanServerPipeline and the SecurityService .
| 36
| 30
|
159,938
|
protected synchronized void deactivate ( ComponentContext cc ) { removeJMXSecurityFilter ( ) ; pipelineRef . deactivate ( cc ) ; securityServiceRef . deactivate ( cc ) ; }
|
Remove the JMX security filter upon deactivation .
| 39
| 10
|
159,939
|
private void throwAuthzException ( ) throws SecurityException { SubjectManager subjectManager = new SubjectManager ( ) ; String name = "UNAUTHENTICATED" ; if ( subjectManager . getInvocationSubject ( ) != null ) { name = subjectManager . getInvocationSubject ( ) . getPrincipals ( ) . iterator ( ) . next ( ) . getName ( ) ; } Tr . audit ( tc , "MANAGEMENT_SECURITY_AUTHZ_FAILED" , name , "MBeanAccess" , requiredRoles ) ; String message = Tr . formatMessage ( tc , "MANAGEMENT_SECURITY_AUTHZ_FAILED" , name , "MBeanAccess" , requiredRoles ) ; throw new SecurityException ( message ) ; }
|
Throwing a SecurityException as not all of the methods that need protection throw an MBeanException . We can change this if we need to .
| 172
| 30
|
159,940
|
protected void setupNotificationArea ( ) throws Throwable { final String sourceMethod = "setupNotificationArea" ; URL notificationsURL = null ; HttpsURLConnection connection = null ; try { // Get URL for creating a notification area notificationsURL = serverConnection . getNotificationsURL ( ) ; if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , sourceMethod , "[" + RESTClientMessagesUtil . getObjID ( this ) + "] About to call notificationURL: " + notificationsURL ) ; } // Get connection to server connection = serverConnection . getConnection ( notificationsURL , HttpMethod . POST , true ) ; // Create NotificationSettings object NotificationSettings ns = new NotificationSettings ( ) ; ns . deliveryInterval = serverConnection . getConnector ( ) . getNotificationDeliveryInterval ( ) ; ns . inboxExpiry = serverConnection . getConnector ( ) . getNotificationInboxExpiry ( ) ; // Write CreateMBean JSON to connection output stream OutputStream output = connection . getOutputStream ( ) ; converter . writeNotificationSettings ( output , ns ) ; output . flush ( ) ; } catch ( ConnectException ce ) { // Server is down; not a client bug throw ce ; } catch ( IOException io ) { throw serverConnection . getRequestErrorException ( sourceMethod , io , notificationsURL ) ; } // Check response code from server final int responseCode = connection . getResponseCode ( ) ; if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , sourceMethod , "Received responseCode: " + responseCode ) ; } switch ( responseCode ) { case HttpURLConnection . HTTP_OK : JSONConverter converter = JSONConverter . getConverter ( ) ; try { // Process and return server response, which should be a NotificationArea NotificationArea area = converter . readNotificationArea ( connection . getInputStream ( ) ) ; inboxURL = new DynamicURL ( serverConnection . connector , area . inboxURL ) ; registrationsURL = new DynamicURL ( serverConnection . connector , area . registrationsURL ) ; serverRegistrationsURL = new DynamicURL ( serverConnection . connector , area . serverRegistrationsURL ) ; notificationClientURL = new DynamicURL ( serverConnection . connector , area . clientURL ) ; if ( logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINER , logger . getName ( ) , "setupNotificationArea" , "Successfully setup inboxURL: " + inboxURL . getURL ( ) ) ; } break ; } catch ( Exception e ) { throw serverConnection . getResponseErrorException ( sourceMethod , e , notificationsURL ) ; } finally { JSONConverter . returnConverter ( converter ) ; } case HttpURLConnection . HTTP_NOT_FOUND : throw new IOException ( RESTClientMessagesUtil . getMessage ( RESTClientMessagesUtil . URL_NOT_FOUND ) ) ; case HttpURLConnection . HTTP_UNAVAILABLE : case HttpURLConnection . HTTP_BAD_REQUEST : case HttpURLConnection . HTTP_INTERNAL_ERROR : // Server response should be a serialized Throwable throw serverConnection . getServerThrowable ( sourceMethod , connection ) ; case HttpURLConnection . HTTP_UNAUTHORIZED : case HttpURLConnection . HTTP_FORBIDDEN : throw serverConnection . getBadCredentialsException ( responseCode , connection ) ; default : throw serverConnection . getResponseCodeErrorException ( sourceMethod , responseCode , connection ) ; } }
|
we want to avoid cycles .
| 805
| 6
|
159,941
|
private void sendClosingSignal ( ) { URL clientURL = null ; HttpsURLConnection connection = null ; try { // Get the appropriate URL to delete notification client if ( serverConnection . serverVersion >= 4 ) { //V4+ clients use /{clientID} to delete the notification client clientURL = getNotificationClientURL ( ) ; } else { //Pre-V4 clients use /{clientID}/inbox to delete the notification client clientURL = getInboxURL ( ) ; } if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , "sendClosingSignal" , "Making a call to delete inbox [" + clientURL + "] from [" + RESTClientMessagesUtil . getObjID ( this ) + "]" ) ; } // Get connection to server connection = serverConnection . getConnection ( clientURL , HttpMethod . DELETE , true ) ; connection . setReadTimeout ( serverConnection . getConnector ( ) . getReadTimeout ( ) ) ; // Check response code from server int responseCode = 0 ; try { responseCode = connection . getResponseCode ( ) ; } catch ( ConnectException ce ) { logger . logp ( Level . FINE , logger . getName ( ) , "sendClosingSignal" , ce . getMessage ( ) , ce ) ; } if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , "sendClosingSignal" , "Response code: " + responseCode ) ; } } catch ( IOException io ) { logger . logp ( Level . FINE , logger . getName ( ) , "sendClosingSignal" , io . getMessage ( ) , io ) ; } }
|
We don t throw any errors because the connector is about to be closed .
| 399
| 15
|
159,942
|
private static Type getAsynchronizedGenericType ( Object targetObject ) { if ( targetObject instanceof java . util . Collection ) { Class < ? extends java . util . Collection > rawType = ( Class < ? extends Collection > ) targetObject . getClass ( ) ; Class < ? > actualType = Object . class ; if ( ( ( java . util . Collection < ? > ) targetObject ) . size ( ) > 0 ) { Object element = ( ( java . util . Collection < ? > ) targetObject ) . iterator ( ) . next ( ) ; actualType = element . getClass ( ) ; } return new ParameterizedType ( ) { private Type actualType , rawType ; public ParameterizedType setTypes ( Type actualType , Type rawType ) { this . actualType = actualType ; this . rawType = rawType ; return this ; } @ Override public Type [ ] getActualTypeArguments ( ) { return new Type [ ] { actualType } ; } @ Override public Type getRawType ( ) { return rawType ; } @ Override public Type getOwnerType ( ) { return null ; } } . setTypes ( actualType , rawType ) ; } else return targetObject . getClass ( ) ; }
|
Hack to generate a type class for collection object .
| 265
| 10
|
159,943
|
@ Override public synchronized FailureScope currentFailureScope ( ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "currentFailureScope" , this ) ; if ( _currentFailureScope == null ) { _currentFailureScope = new FileFailureScope ( ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "currentFailureScope" , _currentFailureScope ) ; return _currentFailureScope ; }
|
Invoked by a client service to determine the current FailureScope . This is defined as a FailureScope that identifies the current point of execution . In practice this means the current server on distributed or server region on 390 .
| 96
| 43
|
159,944
|
@ Override public void registerRecoveryEventListener ( RecoveryEventListener rel ) /* @MD19638A */ { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "registerRecoveryEventListener" , rel ) ; RegisteredRecoveryEventListeners . instance ( ) . add ( rel ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "registerRecoveryEventListener" ) ; }
|
Register the recovery event callback listener .
| 93
| 7
|
159,945
|
@ Override public boolean isHAEnabled ( ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "isHAEnabled" ) ; final boolean haEnabled = Configuration . HAEnabled ( ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "isHAEnabled" , haEnabled ) ; return haEnabled ; }
|
This method allows a client service to determine if High Availability support has been enabled for the local cluster .
| 77
| 20
|
159,946
|
public void registerCallback ( UOWScopeCallback callback ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "registerCallback" , new Object [ ] { callback , this } ) ; _callbackManager . addCallback ( callback ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "registerCallback" ) ; }
|
Register users who want notification on UserTransaction Begin and End
| 77
| 11
|
159,947
|
public void unregisterCallback ( UOWScopeCallback callback ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "unregisterCallback" , new Object [ ] { callback , this } ) ; _callbackManager . removeCallback ( callback ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "unregisterCallback" ) ; }
|
unregister users who want notification on UserTransaction Begin and End
| 80
| 12
|
159,948
|
private void connectCommon ( Object _udpRequestContextObject ) throws IOException { String localAddress = "*" ; int localPort = 0 ; Map < Object , Object > vcStateMap = getVirtualConnection ( ) . getStateMap ( ) ; if ( vcStateMap != null ) { // // Size of the buffer the channel should use to read. // String value = ( String ) vcStateMap . get ( UDPConfigConstants . CHANNEL_RCV_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . CHANNEL_RCV_BUFF_SIZE + " " + value ) ; } cfg . setChannelReceiveBufferSize ( Integer . parseInt ( value ) ) ; } // // Receive buffer size. // value = ( String ) vcStateMap . get ( UDPConfigConstants . RCV_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . RCV_BUFF_SIZE + " " + value ) ; } cfg . setReceiveBufferSize ( Integer . parseInt ( value ) ) ; } // // Send buffer size // value = ( String ) vcStateMap . get ( UDPConfigConstants . SEND_BUFF_SIZE ) ; if ( value != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , UDPConfigConstants . SEND_BUFF_SIZE + " " + value ) ; } cfg . setSendBufferSize ( Integer . parseInt ( value ) ) ; } } // // Allow for this to be null. If the requestContext is null, then just // allow The NetworkLayer to find the port to listen on. // if ( _udpRequestContextObject != null ) { final UDPRequestContext udpRequestContext = ( UDPRequestContext ) _udpRequestContextObject ; final InetSocketAddress addr = udpRequestContext . getLocalAddress ( ) ; localAddress = addr . getAddress ( ) . getHostAddress ( ) ; localPort = addr . getPort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "connect with local address: " + localAddress + " local port: " + localPort ) ; } } udpNetworkLayer = new UDPNetworkLayer ( udpChannel , workQueueMgr , localAddress , localPort ) ; udpNetworkLayer . initDatagramSocket ( getVirtualConnection ( ) ) ; udpNetworkLayer . setConnLink ( this ) ; }
|
Common connect logic between sync and async connect requests .
| 618
| 10
|
159,949
|
public String retrieveEndpointName ( J2EEName j2eeName ) { for ( Entry < String , J2EEName > entry : endpointNameJ2EENameMap . entrySet ( ) ) { if ( entry . getValue ( ) . equals ( j2eeName ) ) { return entry . getKey ( ) ; } } return null ; }
|
Get the endpoint name by j2eeName
| 79
| 9
|
159,950
|
private final void _tryUnlink ( ) { if ( 0 >= _cursorCount && _state == LOGICALLY_UNLINKED ) { _previousLink . _nextLink = _nextLink ; _nextLink . _previousLink = _previousLink ; _previousLink = null ; _nextLink = null ; // Defect 240039 //_parent = null; _state = PHYSICALLY_UNLINKED ; } }
|
Attempt to physically unlink the receiver if appropriate . MUST BE CALLED UNDER _parent MONITOR .
| 98
| 21
|
159,951
|
public final Link getNextLink ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( this , tc , "getNextLink" , _positionString ( ) ) ; } Link nextLink = null ; LinkedList parent = _parent ; if ( null != parent ) { nextLink = _parent . getNextLink ( this ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( this , tc , "getNextLink" , nextLink ) ; } return nextLink ; }
|
Navigate to the next logical link . This version is for use with non - cursored navigation .
| 138
| 20
|
159,952
|
public void xmlWriteOn ( FormattedWriter writer ) throws IOException { String name = "link" ; writer . write ( "<" ) ; writer . write ( name ) ; xmlWriteAttributesOn ( writer ) ; writer . write ( " />" ) ; }
|
Default XML output .
| 55
| 4
|
159,953
|
@ SuppressWarnings ( "rawtypes" ) public void doPostConstruct ( Class clazz , List < LifecycleCallback > postConstructs ) throws InjectionException { mainClassName = clazz . getName ( ) ; doPostConstruct ( clazz , postConstructs , null ) ; }
|
Processes the PostConstruct callback method for the application main class
| 64
| 12
|
159,954
|
public void doPostConstruct ( Object instance , List < LifecycleCallback > postConstructs ) throws InjectionException { doPostConstruct ( instance . getClass ( ) , postConstructs , instance ) ; }
|
Processes the PostConstruct callback method for the login callback handler class
| 43
| 13
|
159,955
|
public void doPreDestroy ( Object instance , List < LifecycleCallback > preDestroy ) throws InjectionException { doPreDestroy ( instance . getClass ( ) , preDestroy , instance ) ; }
|
Processes the PreDestroy callback method for the login callback handler class
| 41
| 13
|
159,956
|
@ SuppressWarnings ( "rawtypes" ) private void doPostConstruct ( Class clazz , List < LifecycleCallback > postConstructs , Object instance ) throws InjectionException { if ( ! metadataComplete && clazz . getSuperclass ( ) != null ) { doPostConstruct ( clazz . getSuperclass ( ) , postConstructs , instance ) ; } String classname = clazz . getName ( ) ; String methodName = getMethodNameFromDD ( postConstructs , classname ) ; if ( methodName != null ) { invokeMethod ( clazz , methodName , instance ) ; } else if ( ! metadataComplete ) { Method method = getAnnotatedPostConstructMethod ( clazz ) ; if ( method != null ) { invokeMethod ( clazz , method . getName ( ) , instance ) ; } } }
|
Processes the PostConstruct callback method
| 179
| 7
|
159,957
|
@ SuppressWarnings ( "rawtypes" ) public Method getAnnotatedMethod ( Class clazz , Class < ? extends Annotation > annotationClass ) { Method m = null ; Method [ ] methods = clazz . getDeclaredMethods ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { Annotation [ ] a = methods [ i ] . getAnnotations ( ) ; if ( a != null ) { for ( int j = 0 ; j < a . length ; j ++ ) { if ( a [ j ] . annotationType ( ) == annotationClass ) { if ( m == null ) { m = methods [ i ] ; } else { Tr . warning ( tc , "DUPLICATE_CALLBACK_METHOD_CWWKC2454W" , new Object [ ] { methods [ i ] . getName ( ) , clazz . getName ( ) } ) ; } } } } } return m ; }
|
Gets the annotated method from the class object .
| 206
| 11
|
159,958
|
@ SuppressWarnings ( { "rawtypes" , "unchecked" } ) public void invokeMethod ( final Class clazz , final String methodName , final Object instance ) { // instance can be null for the static application main method AccessController . doPrivileged ( new PrivilegedAction ( ) { @ Override public Object run ( ) { try { final Method m = clazz . getDeclaredMethod ( methodName ) ; if ( ! m . isAccessible ( ) ) { m . setAccessible ( true ) ; m . invoke ( instance ) ; m . setAccessible ( false ) ; return m ; } else { m . invoke ( instance ) ; return m ; } } catch ( Exception e ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , e . getMessage ( ) ) ; } return null ; } } } ) ; }
|
Invokes the class method . The object instance can be null for the application main class .
| 187
| 18
|
159,959
|
public String getMethodNameFromDD ( List < LifecycleCallback > callbacks , String classname ) { String methodName = null ; for ( LifecycleCallback callback : callbacks ) { // lifecycle-callback-class default to the enclosing component class Client String callbackClassName ; callbackClassName = callback . getClassName ( ) ; if ( callbackClassName == null ) { callbackClassName = mainClassName ; } if ( callbackClassName . equals ( classname ) ) { if ( methodName == null ) { methodName = callback . getMethodName ( ) ; } else { Tr . warning ( tc , "DUPLICATE_CALLBACK_METHOD_CWWKC2454W" , new Object [ ] { methodName , classname } ) ; } } } return methodName ; }
|
Gets the lifecycle callback method name from the application client module deployment descriptor
| 171
| 15
|
159,960
|
private final void encrypt ( ) throws Exception { String signStr = Base64Coder . toString ( Base64Coder . base64Encode ( signature ) ) ; String ud = userData . toString ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "encrypt: userData" + ud ) ; } byte [ ] accessID = Base64Coder . getBytes ( ud ) ; StringBuilder sb = new StringBuilder ( DELIM ) ; sb . append ( getExpiration ( ) ) . append ( DELIM ) . append ( signStr ) ; byte [ ] timeAndSign = getSimpleBytes ( sb . toString ( ) ) ; byte [ ] toBeEnc = new byte [ accessID . length + timeAndSign . length ] ; for ( int i = 0 ; i < accessID . length ; i ++ ) { toBeEnc [ i ] = accessID [ i ] ; } for ( int i = accessID . length ; i < toBeEnc . length ; i ++ ) { toBeEnc [ i ] = timeAndSign [ i - accessID . length ] ; } try { encryptedBytes = LTPAKeyUtil . encrypt ( toBeEnc , sharedKey , cipher ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Error encrypting; " + e ) ; } throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Encrypted bytes are: " + ( encryptedBytes == null ? "" : Base64Coder . toString ( Base64Coder . base64Encode ( encryptedBytes ) ) ) ) ; } }
|
Encrypt the token passed into the token .
| 409
| 9
|
159,961
|
@ FFDCIgnore ( { BadPaddingException . class , Exception . class } ) private final void decrypt ( ) throws InvalidTokenException { byte [ ] tokenData ; try { tokenData = LTPAKeyUtil . decrypt ( encryptedBytes . clone ( ) , sharedKey , cipher ) ; checkTokenBytes ( tokenData ) ; String UTF8TokenString = toUTF8String ( tokenData ) ; String [ ] userFields = LTPATokenizer . parseToken ( UTF8TokenString ) ; Map < String , ArrayList < String > > attribs = LTPATokenizer . parseUserData ( userFields [ 0 ] ) ; userData = new UserData ( attribs ) ; String tokenString = toSimpleString ( tokenData ) ; String [ ] fields = LTPATokenizer . parseToken ( tokenString ) ; String [ ] expirationArray = userData . getAttributes ( AttributeNameConstants . WSTOKEN_EXPIRATION ) ; if ( expirationArray != null && expirationArray [ expirationArray . length - 1 ] != null ) { // the new expiration value inside the signature expirationInMilliseconds = Long . parseLong ( expirationArray [ expirationArray . length - 1 ] ) ; } else { // the old expiration value outside of the signature expirationInMilliseconds = Long . parseLong ( fields [ 1 ] ) ; } byte [ ] signature = Base64Coder . base64Decode ( Base64Coder . getBytes ( fields [ 2 ] ) ) ; setSignature ( signature ) ; } catch ( BadPaddingException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Caught BadPaddingException while decrypting token, this is only a critical problem if decryption should have worked." , e ) ; } throw new InvalidTokenException ( e . getMessage ( ) , e ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Error decrypting; " + e ) ; } throw new InvalidTokenException ( e . getMessage ( ) , e ) ; } }
|
Decrypt the encrypted token bytes passed into the constructor .
| 480
| 11
|
159,962
|
private final void sign ( ) throws Exception { String dataStr = this . getUserData ( ) . toString ( ) ; byte [ ] data = Base64Coder . getBytes ( dataStr ) ; byte [ ] signature = sign ( data , this . privateKey ) ; this . setSignature ( signature ) ; }
|
Sign the token passed into the token .
| 68
| 8
|
159,963
|
private final boolean verify ( ) throws Exception { String dataStr = this . getUserData ( ) . toString ( ) ; byte [ ] data = Base64Coder . getBytes ( dataStr ) ; return verify ( data , signature , publicKey ) ; }
|
Verify the token .
| 55
| 5
|
159,964
|
public final void validateExpiration ( ) throws TokenExpiredException { Date d = new Date ( ) ; Date expD = new Date ( getExpiration ( ) ) ; boolean expired = d . after ( expD ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Current time = " + d + ", expiration time = " + expD ) ; } if ( expired ) { String msg = "The token has expired: current time = \"" + d + "\", expire time = \"" + expD + "\"" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , msg ) ; } throw new TokenExpiredException ( expirationInMilliseconds , msg ) ; } }
|
Checks if the token has expired .
| 185
| 8
|
159,965
|
private final void setExpiration ( long expirationInMinutes ) { expirationInMilliseconds = System . currentTimeMillis ( ) + expirationInMinutes * 60 * 1000 ; signature = null ; if ( userData != null ) { encryptedBytes = null ; userData . addAttribute ( "expire" , Long . toString ( expirationInMilliseconds ) ) ; } else { encryptedBytes = null ; } }
|
Set expiration limit of the LTPA2 token
| 89
| 9
|
159,966
|
private static final String toUTF8String ( byte [ ] b ) { String ns = null ; try { ns = new String ( b , "UTF8" ) ; } catch ( UnsupportedEncodingException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Error converting to string; " + e ) ; } } return ns ; }
|
Convert the byte representation to the UTF - 8 String form .
| 91
| 13
|
159,967
|
private static final String toSimpleString ( byte [ ] b ) { StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 , len = b . length ; i < len ; i ++ ) { sb . append ( ( char ) ( b [ i ] & 0xff ) ) ; } String str = sb . toString ( ) ; return str ; }
|
Convert the byte representation to the String form .
| 81
| 10
|
159,968
|
private static final byte [ ] getSimpleBytes ( String str ) { StringBuilder sb = new StringBuilder ( str ) ; byte [ ] b = new byte [ sb . length ( ) ] ; for ( int i = 0 , len = sb . length ( ) ; i < len ; i ++ ) { b [ i ] = ( byte ) sb . charAt ( i ) ; } return b ; }
|
Convert the String form to the byte representation
| 88
| 9
|
159,969
|
public static ProtectedFunctionMapper getInstance ( ) { ProtectedFunctionMapper funcMapper ; if ( System . getSecurityManager ( ) != null ) { funcMapper = ( ProtectedFunctionMapper ) AccessController . doPrivileged ( new PrivilegedAction ( ) { @ Override public Object run ( ) { return new ProtectedFunctionMapper ( ) ; } } ) ; } else { funcMapper = new ProtectedFunctionMapper ( ) ; } funcMapper . fnmap = new java . util . HashMap ( ) ; return funcMapper ; }
|
Generated Servlet and Tag Handler implementations call this method to retrieve an instance of the ProtectedFunctionMapper . This is necessary since generated code does not have access to create instances of classes in this package .
| 122
| 42
|
159,970
|
@ Override public Method resolveFunction ( String prefix , String localName ) { return ( Method ) this . fnmap . get ( prefix + ":" + localName ) ; }
|
Resolves the specified local name and prefix into a Java . lang . Method . Returns null if the prefix and local name are not found .
| 37
| 28
|
159,971
|
public void setItemType ( JMFType elem ) { if ( elem == null ) throw new NullPointerException ( "Repeated item cannot be null" ) ; itemType = ( JSType ) elem ; itemType . parent = this ; itemType . siblingPosition = 0 ; }
|
Set the item type of the array
| 64
| 7
|
159,972
|
protected void incrementActiveConns ( ) { int count = this . activeConnections . incrementAndGet ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Increment active, current=" + count ) ; } }
|
Increase the number of active connections currently being processed inside the HTTP dispatcher .
| 65
| 14
|
159,973
|
protected void decrementActiveConns ( ) { int count = this . activeConnections . decrementAndGet ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Decrement active, current=" + count ) ; } if ( 0 == count && this . quiescing ) { signalNoConnections ( ) ; } }
|
Decrement the number of active connections being processed by the dispatcher .
| 88
| 13
|
159,974
|
@ Trivial public void enactOpen ( long openAt ) { String methodName = "enactOpen" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " On [ " + path + " ] at [ " + toRelSec ( initialAt , openAt ) + " (s) ]" ) ; } if ( zipFileState == ZipFileState . OPEN ) { // OPEN -> OPEN openDuration += openAt - lastOpenAt ; lastLastOpenAt = lastOpenAt ; lastOpenAt = openAt ; openCount ++ ; } else if ( zipFileState == ZipFileState . PENDING ) { // PENDING -> OPEN long lastPendDuration = openAt - lastPendAt ; pendToOpenDuration += lastPendDuration ; pendToOpenCount ++ ; lastLastOpenAt = lastOpenAt ; lastOpenAt = openAt ; openCount ++ ; zipFileState = ZipFileState . OPEN ; if ( ZIP_REAPER_COLLECT_TIMINGS ) { timing ( " Pend Success [ " + toAbsSec ( lastPendDuration ) + " (s) ]" ) ; } } else if ( zipFileState == ZipFileState . FULLY_CLOSED ) { // FULLY_CLOSED -> OPEN if ( firstOpenAt == - 1L ) { firstOpenAt = openAt ; } else { long lastFullCloseDuration = openAt - lastFullCloseAt ; fullCloseToOpenDuration += lastFullCloseDuration ; if ( ZIP_REAPER_COLLECT_TIMINGS ) { long lastPendDuration = ( ( lastPendAt == - 1L ) ? 0 : ( lastFullCloseAt - lastPendAt ) ) ; timing ( " Reopen; Pend [ " + toAbsSec ( lastPendDuration ) + " (s) ] " + " Close [ " + toAbsSec ( lastFullCloseDuration ) + " (s) ]" ) ; } } fullCloseToOpenCount ++ ; lastLastOpenAt = lastOpenAt ; lastOpenAt = openAt ; openCount ++ ; zipFileState = ZipFileState . OPEN ; } else { throw unknownState ( ) ; } if ( ZIP_REAPER_COLLECT_TIMINGS ) { timing ( " Open " + dualTiming ( openAt , initialAt ) + " " + openState ( ) ) ; } }
|
PENDING - > FULLY_CLOSED
| 526
| 10
|
159,975
|
@ Trivial protected ZipFile reacquireZipFile ( ) throws IOException , ZipException { String methodName = "reacquireZipFile" ; File rawZipFile = new File ( path ) ; long newZipLength = FileUtils . fileLength ( rawZipFile ) ; long newZipLastModified = FileUtils . fileLastModified ( rawZipFile ) ; boolean zipFileChanged = false ; if ( newZipLength != zipLength ) { zipFileChanged = true ; if ( openCount > closeCount ) { // Tr.warning(tc, methodName + // " Zip [ " + path + " ]:" + // " Update length from [ " + Long.valueOf(zipLength) + " ]" + // " to [ " + Long.valueOf(newZipLength) + " ]"); Tr . warning ( tc , "reaper.unexpected.length.change" , path , Long . valueOf ( zipLength ) , Long . valueOf ( newZipLength ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Zip [ " + path + " ]:" + " Update length from [ " + Long . valueOf ( zipLength ) + " ]" + " to [ " + Long . valueOf ( newZipLength ) + " ]" ) ; } } } if ( newZipLastModified != zipLastModified ) { zipFileChanged = true ; if ( openCount > closeCount ) { // Tr.warning(tc, methodName + // " Zip [ " + path + " ]:" + // " Update last modified from [ " + Long.valueOf(zipLastModified) + " ]" + // " to [ " + Long.valueOf(newZipLastModified) + " ]"); Tr . warning ( tc , "reaper.unexpected.lastmodified.change" , path , Long . valueOf ( zipLastModified ) , Long . valueOf ( newZipLastModified ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Zip [ " + path + " ]:" + " Update last modified from [ " + Long . valueOf ( zipLastModified ) + " ]" + " to [ " + Long . valueOf ( newZipLastModified ) + " ]" ) ; } } } if ( zipFileChanged ) { if ( openCount > closeCount ) { // Tr.warning(tc, methodName + " Reopen [ " + path + " ]"); Tr . warning ( tc , "reaper.reopen.active" , path ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Reopen [ " + path + " ]" ) ; } } @ SuppressWarnings ( "unused" ) ZipFile oldZipFile = closeZipFile ( ) ; @ SuppressWarnings ( "unused" ) ZipFile newZipFile = openZipFile ( newZipLength , newZipLastModified ) ; // throws IOException, ZipException } return zipFile ; }
|
Re - acquire the ZIP file .
| 713
| 7
|
159,976
|
public static String getAttribute ( XMLStreamReader reader , String localName ) { int count = reader . getAttributeCount ( ) ; for ( int i = 0 ; i < count ; i ++ ) { String name = reader . getAttributeLocalName ( i ) ; if ( localName . equals ( name ) ) { return reader . getAttributeValue ( i ) ; } } return null ; }
|
Get the element attribute value
| 82
| 5
|
159,977
|
public static < T > T createInstanceByElement ( XMLStreamReader reader , Class < T > clazz , Set < String > attrNames ) { if ( reader == null || clazz == null || attrNames == null ) return null ; try { T instance = clazz . newInstance ( ) ; int count = reader . getAttributeCount ( ) ; int matchCount = attrNames . size ( ) ; for ( int i = 0 ; i < count && matchCount > 0 ; ++ i ) { String name = reader . getAttributeLocalName ( i ) ; String value = reader . getAttributeValue ( i ) ; if ( attrNames . contains ( name ) ) { Field field = clazz . getDeclaredField ( name ) ; field . setAccessible ( true ) ; field . set ( instance , value ) ; matchCount -- ; } } return instance ; } catch ( InstantiationException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; } catch ( SecurityException e ) { throw new RuntimeException ( e ) ; } catch ( NoSuchFieldException e ) { throw new RuntimeException ( e ) ; } }
|
Create the instance by parse the element the instance s class must have the empty construct . The clazz must have the fields in attrNames and all the fields type must be String
| 256
| 36
|
159,978
|
public void removeEjbBindings ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "removeEjbBindings called" ) ; // Just loop through the values, not the keys, as the simple-binding-name // key may have a # in front of it when the bean was not simple, and that // won't match what is in the ServerContextBindingMap. d457053.1 for ( String bindingName : ivEjbContextBindingMap . values ( ) ) { removeFromServerContextBindingMap ( bindingName , true ) ; } ivEjbContextBindingMap . clear ( ) ; }
|
Removes all of the EJB bindings for this EJB from the bean specific and sever wide maps .
| 155
| 21
|
159,979
|
public boolean isUniqueShortDefaultBinding ( String interfaceName ) { // If there were no explicit bindings, and only one implicit // binding, then it is considered uniquie. d457053.1 BindingData bdata = ivServerContextBindingMap . get ( interfaceName ) ; if ( bdata != null && bdata . ivExplicitBean == null && bdata . ivImplicitBeans != null && bdata . ivImplicitBeans . size ( ) == 1 ) { return true ; } return false ; }
|
Returns true if a short form default binding is present for the specified interface and the current bean is currently the only bean with this short form default binding and there are no explicit bindings .
| 114
| 36
|
159,980
|
public void removeShortDefaultBindings ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "removeShortDefaultBindings called" ) ; for ( String bindingName : ivEjbContextShortDefaultJndiNames ) { removeFromServerContextBindingMap ( bindingName , false ) ; } ivEjbContextShortDefaultJndiNames . clear ( ) ; }
|
Removes all of the short form default bindings for this EJB from the bean specific and sever wide maps .
| 98
| 22
|
159,981
|
private void addToServerContextBindingMap ( String interfaceName , String bindingName ) throws NameAlreadyBoundException { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "addToServerContextBindingMap : " + interfaceName + ", binding : " + bindingName ) ; BindingData bdata = ivServerContextBindingMap . get ( bindingName ) ; if ( bdata == null ) { bdata = new BindingData ( ) ; ivServerContextBindingMap . put ( bindingName , bdata ) ; } if ( bdata . ivExplicitBean == null ) { bdata . ivExplicitBean = ivHomeRecord . j2eeName ; bdata . ivExplicitInterface = interfaceName ; if ( bdata . ivImplicitBeans != null && bdata . ivImplicitBeans . size ( ) > 1 ) { ivEjbContextAmbiguousMap . remove ( interfaceName ) ; } } else { J2EEName j2eeName = ivHomeRecord . j2eeName ; if ( bdata . ivExplicitBean . equals ( j2eeName ) ) { Tr . error ( tc , "NAME_ALREADY_BOUND_FOR_SAME_EJB_CNTR0173E" , new Object [ ] { interfaceName , j2eeName . getComponent ( ) , j2eeName . getModule ( ) , j2eeName . getApplication ( ) , bindingName , bdata . ivExplicitInterface } ) ; // d479669 String message = "The " + interfaceName + " interface of the " + j2eeName . getComponent ( ) + " bean in the " + j2eeName . getModule ( ) + " module of the " + j2eeName . getApplication ( ) + " application " + "cannot be bound to the " + bindingName + " name location. " + "The " + bdata . ivExplicitInterface + " interface of the " + "same bean has already been bound to the " + bindingName + " name location." ; throw new NameAlreadyBoundException ( message ) ; } else { Tr . error ( tc , "NAME_ALREADY_BOUND_FOR_EJB_CNTR0172E" , new Object [ ] { interfaceName , j2eeName . getComponent ( ) , j2eeName . getModule ( ) , j2eeName . getApplication ( ) , bindingName , bdata . ivExplicitInterface , bdata . ivExplicitBean . getComponent ( ) , bdata . ivExplicitBean . getModule ( ) , bdata . ivExplicitBean . getApplication ( ) } ) ; // d479669 String message = "The " + interfaceName + " interface of the " + j2eeName . getComponent ( ) + " bean in the " + j2eeName . getModule ( ) + " module of the " + j2eeName . getApplication ( ) + " application " + "cannot be bound to the " + bindingName + " name location. " + "The " + bdata . ivExplicitInterface + " interface of the " + bdata . ivExplicitBean . getComponent ( ) + " bean in the " + bdata . ivExplicitBean . getModule ( ) + " module of the " + bdata . ivExplicitBean . getApplication ( ) + " application " + "has already been bound to the " + bindingName + " name location." ; throw new NameAlreadyBoundException ( message ) ; } } }
|
d457053 . 1
| 783
| 6
|
159,982
|
public static BindingsHelper getLocalHelper ( HomeRecord homeRecord ) { if ( homeRecord . ivLocalBindingsHelper == null ) { homeRecord . ivLocalBindingsHelper = new BindingsHelper ( homeRecord , cvAllLocalBindings , null ) ; } return homeRecord . ivLocalBindingsHelper ; }
|
A method for obtaining a Binding Name Helper for use with the local jndi namespace .
| 68
| 19
|
159,983
|
public static BindingsHelper getRemoteHelper ( HomeRecord homeRecord ) { if ( homeRecord . ivRemoteBindingsHelper == null ) { homeRecord . ivRemoteBindingsHelper = new BindingsHelper ( homeRecord , cvAllRemoteBindings , "ejb/" ) ; } return homeRecord . ivRemoteBindingsHelper ; }
|
A method for obtaining a Binding Name Helper for use with the remote jndi namespace .
| 72
| 19
|
159,984
|
public synchronized void stop ( ) { if ( timer != null ) { timer . keepRunning = false ; timer . interrupt ( ) ; timer = null ; } // Remove this manager from the space alert list LogRepositorySpaceAlert . getInstance ( ) . removeRepositoryInfo ( this ) ; }
|
stop logging and thus stop the timer retention thread
| 61
| 9
|
159,985
|
protected static long calculateFileSplit ( long repositorySize ) { if ( repositorySize <= 0 ) { return MAX_LOG_FILE_SIZE ; } if ( repositorySize < MIN_REPOSITORY_SIZE ) { throw new IllegalArgumentException ( "Specified repository size is too small" ) ; } long result = repositorySize / SPLIT_RATIO ; if ( result < MIN_LOG_FILE_SIZE ) { result = MIN_LOG_FILE_SIZE ; } else if ( result > MAX_LOG_FILE_SIZE ) { result = MAX_LOG_FILE_SIZE ; } return result ; }
|
calculates maximum size of repository files based on the required maximum limit on total size of the repository .
| 130
| 21
|
159,986
|
private void initFileList ( boolean force ) { if ( totalSize < 0 || force ) { fileList . clear ( ) ; parentFilesMap . clear ( ) ; totalSize = 0L ; File [ ] files = listRepositoryFiles ( ) ; if ( files . length > 0 ) { Arrays . sort ( files , fileComparator ) ; for ( File file : files ) { long size = AccessHelper . getFileLength ( file ) ; // Intentional here to NOT add these files to activeFilesMap since they are legacy fileList . add ( new FileDetails ( file , getLogFileTimestamp ( file ) , size , null ) ) ; totalSize += size ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "initFileList" , "add: " + file . getPath ( ) + " sz: " + size + " listSz: " + fileList . size ( ) + " new totalSz: " + totalSize ) ; } incrementFileCount ( file ) ; } debugListLL ( "fileListPrePop" ) ; } deleteEmptyRepositoryDirs ( ) ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { Iterator < File > parentKeys = parentFilesMap . keySet ( ) . iterator ( ) ; while ( parentKeys . hasNext ( ) ) { File parentNameKey = parentKeys . next ( ) ; Integer fileCount = parentFilesMap . get ( parentNameKey ) ; debugLogger . logp ( Level . FINE , thisClass , "initFileList" , " Directory: " + parentNameKey + " file count: " + fileCount ) ; } } } }
|
Initializes file list from the list of files in the repository . This method should be called while holding a lock on fileList .
| 405
| 26
|
159,987
|
protected void deleteEmptyRepositoryDirs ( ) { File [ ] directories = listRepositoryDirs ( ) ; //determine if the server/controller instance directory is empty for ( int i = 0 ; i < directories . length ; i ++ ) { // This is a directory we should not delete boolean currentDir = ivSubDirectory != null && ivSubDirectory . compareTo ( directories [ i ] ) == 0 ; //if a server instance directory does not have a key in parentFilesMap, then it does not have any files if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) debugLogger . logp ( Level . FINE , thisClass , "deleteEmptyRepositoryDirs" , "Instance directory name (controller): " + directories [ i ] . getAbsolutePath ( ) ) ; //now look for empty servant directories File [ ] childFiles = AccessHelper . listFiles ( directories [ i ] , subprocFilter ) ; for ( File curFile : childFiles ) { if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) debugLogger . logp ( Level . FINE , thisClass , "deleteEmptyRepositoryDirs" , "Servant directory name: " + curFile . getAbsolutePath ( ) ) ; if ( ! currentDir && ! parentFilesMap . containsKey ( curFile ) ) { if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) debugLogger . logp ( Level . FINE , thisClass , "deleteEmptyRepositoryDirs" , "Found an empty servant directory: " + curFile ) ; deleteDirectory ( curFile ) ; } else { incrementFileCount ( curFile ) ; } } //delete directory if empty if ( ! currentDir && ! parentFilesMap . containsKey ( directories [ i ] ) ) { if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) debugLogger . logp ( Level . FINE , thisClass , "listRepositoryFiles" , "Found an empty directory: " + directories [ i ] ) ; deleteDirectory ( directories [ i ] ) ; } } }
|
Deletes all empty server instance directories including empty servant directories
| 481
| 11
|
159,988
|
protected void deleteDirectory ( File directoryName ) { if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "deleteDirectory" , "empty directory " + ( ( directoryName == null ) ? "None" : directoryName . getPath ( ) ) ) ; } if ( AccessHelper . deleteFile ( directoryName ) ) { // If directory is empty, delete if ( debugLogger . isLoggable ( Level . FINE ) && isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "deleteDirectory" , "delete " + directoryName . getName ( ) ) ; } } else { // Else the directory is not empty, and deletion fails if ( isDebugEnabled ( ) ) { debugLogger . logp ( Level . WARNING , thisClass , "deleteDirectory" , "Failed to delete directory " + directoryName . getPath ( ) ) ; } } }
|
Deletes the specified directory
| 222
| 5
|
159,989
|
private boolean purgeOldFiles ( long total ) { boolean result = false ; // Should delete some files. if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldFiles" , "total: " + total + " listSz: " + fileList . size ( ) ) ; } while ( total > 0 && fileList . size ( ) > 1 ) { FileDetails details = purgeOldestFile ( ) ; if ( details != null ) { if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldFiles" , "Purged: " + details . file . getPath ( ) + " sz: " + details . size ) ; } total -= details . size ; result = true ; } } return result ; }
|
Removes old files from the repository . This method does not remove the most recent file . This method should be called while holding a lock on fileList .
| 225
| 31
|
159,990
|
private FileDetails purgeOldestFile ( ) { debugListLL ( "prepurgeOldestFile" ) ; debugListHM ( "prepurgeOldestFile" ) ; FileDetails returnFD = getOldestInactive ( ) ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldestFile" , "oldestInactive: " + ( ( returnFD == null ) ? "None" : returnFD . file . getPath ( ) ) ) ; } if ( returnFD == null ) return null ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldestFile" , "fileList size before remove: " + fileList . size ( ) ) ; } if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldestFile" , "fileList size after remove: " + fileList . size ( ) ) ; } if ( AccessHelper . deleteFile ( returnFD . file ) ) { fileList . remove ( returnFD ) ; totalSize -= returnFD . size ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldestFile" , "delete: " + returnFD . file . getName ( ) ) ; } decrementFileCount ( returnFD . file ) ; notifyOfFileAction ( LogEventListener . EVENTTYPEDELETE ) ; // F004324 } else { // Assume the list is out of sync. if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "purgeOldestFile" , "Failed to delete file: " + returnFD . file . getPath ( ) ) ; } initFileList ( true ) ; returnFD = null ; } debugListLL ( "postpurgeOldestFile" ) ; return returnFD ; }
|
Removes the oldest file from the repository . This method has logic to avoid removing currently active files This method should be called with a lock on filelist already attained
| 544
| 32
|
159,991
|
public synchronized String addNewFileFromSubProcess ( long spTimeStamp , String spPid , String spLabel ) { // TODO: It is theoretically possible that subProcess already created one of these (although it won't happen in our scenario. // Consider either pulling actual pid from the files on initFileList or looking for the file here before adding it. If found, // adjust the pid to this pid. checkSpaceConstrain ( maxLogFileSize ) ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "addNewFileFromSubProcess" , "Tstamp: " + spTimeStamp + " pid: " + spPid + " lbl: " + spLabel + " Max: " + maxLogFileSize ) ; } if ( ivSubDirectory == null ) getControllingProcessDirectory ( spTimeStamp , svPid ) ; // Note: passing, pid of this region, not sending child region if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "addNewFileFromSubProcess" , "Got ivSubDir: " + ivSubDirectory . getPath ( ) ) ; } File servantDirectory = new File ( ivSubDirectory , getLogDirectoryName ( - 1 , spPid , spLabel ) ) ; File servantFile = getLogFile ( servantDirectory , spTimeStamp ) ; FileDetails thisFile = new FileDetails ( servantFile , spTimeStamp , maxLogFileSize , spPid ) ; synchronized ( fileList ) { initFileList ( false ) ; fileList . add ( thisFile ) ; // Not active as new one was created incrementFileCount ( servantFile ) ; synchronized ( activeFilesMap ) { // In this block so that fileList always locked first activeFilesMap . put ( spPid , thisFile ) ; } } totalSize += maxLogFileSize ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "addNewFileFromSubProcess" , "Added file: " + servantFile . getPath ( ) + " sz:" + maxLogFileSize + " tstmp: " + spTimeStamp ) ; debugListLL ( "postAddFromSP" ) ; debugListHM ( "postAddFromSP" ) ; } return servantFile . getPath ( ) ; }
|
add information about a new file being created by a subProcess in order to maintain retention information . This is done for all files created by each subProcess . If IPC facility is not ready subProcess may have to create first then notify when IPC is up .
| 583
| 53
|
159,992
|
public void inactivateSubProcess ( String spPid ) { synchronized ( fileList ) { // always lock fileList first to avoid deadlock synchronized ( activeFilesMap ) { // Right into sync block because 99% case is that map contains pid activeFilesMap . remove ( spPid ) ; } } if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "inactivateSubProcess" , "Inactivated pid: " + spPid ) ; } }
|
inactivate active file for a given process . Should only be one file active for a process
| 129
| 18
|
159,993
|
protected int getContentLength ( boolean update ) { if ( update ) { contentLength = ( int ) this . getFileSize ( update ) ; return contentLength ; } else { if ( contentLength == - 1 ) { contentLength = ( int ) this . getFileSize ( update ) ; return contentLength ; } else { return contentLength ; } } }
|
PM92967 pulled up method
| 75
| 7
|
159,994
|
public void logError ( String moduleName , String beanName , String methodName ) { Tr . error ( tc , ivError . getMessageId ( ) , new Object [ ] { beanName , moduleName , methodName , ivField } ) ; }
|
Logs an error message corresponding to this exception .
| 53
| 10
|
159,995
|
protected void restore ( ObjectInputStream ois , int dataVersion ) throws SevereMessageStoreException { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "restore" , new Object [ ] { dataVersion } ) ; checkPersistentVersionId ( dataVersion ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "restore" ) ; }
|
Child classes should override this method to restore their persistent data .
| 111
| 12
|
159,996
|
synchronized void captureCheckpointManagedObjects ( ) { if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "captureCheckpointManagedObjectsremove" ) ; // Now that we are synchronized check that we have not captured the checkpoint sets already. if ( checkpointManagedObjectsToWrite == null ) { // Take the tokens to write first, if we miss a delete we will catch it next time. // The managedObjectsToWrite and tokensToDelete sets are volatile so users of the store will move to them // promptly. checkpointManagedObjectsToWrite = managedObjectsToWrite ; managedObjectsToWrite = new ConcurrentHashMap ( concurrency ) ; checkpointTokensToDelete = tokensToDelete ; tokensToDelete = new ConcurrentHashMap ( concurrency ) ; } if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "captureCheckpointManagedObjects" ) ; }
|
Capture the ManagedObjects to write and delete as part of the checkpoint .
| 230
| 16
|
159,997
|
private void write ( ManagedObject managedObject ) throws ObjectManagerException { final String methodName = "write" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , methodName , new Object [ ] { managedObject } ) ; // Pick up and write the latest serialized bytes. ObjectManagerByteArrayOutputStream serializedBytes = null ; if ( usesSerializedForm ) { // Pick up and write the latest serialized bytes. // It is possible that several threads requested a write one after the // other each getting the request added to a different managedObjetsToWrite table, // however the first of these through here will clear serializedBytes. // It is also possible that the transaction state of the managed object // was restored from a checkpoint, in which case the serialized object // may already be in the ObjectStore, and the serializedBytes will again be null. // Is the Object deleted? It may have got deleted after we release the // synchronize lock when we // captured the tokensToWrite hashtable but before we actually try to write it. if ( managedObject . state != ManagedObject . stateDeleted ) serializedBytes = managedObject . freeLatestSerializedBytes ( ) ; } else { // Not logged so use the current serialized bytes, as long as its not part of a transaction. // If it is part of a transaction then the transaction will hold the ManagedObject in memory. // Not locked because this is only used by SAVE_ONLY_ON_SHUTDOWN stores at shutdown // when no appliaction threads are active. if ( managedObject . state == ManagedObject . stateReady ) serializedBytes = managedObject . getSerializedBytes ( ) ; } // if ( usesSerializedForm ). // It is possible that several threads requested a write one after the other each getting the request added // to a different tokensToWrite table, however the first of these through here will clear serializedBytes. if ( serializedBytes != null ) { // Already done by another thread? try { java . io . FileOutputStream storeFileOutputStream = new java . io . FileOutputStream ( storeDirectoryName + java . io . File . separator + managedObject . owningToken . storedObjectIdentifier ) ; storeFileOutputStream . write ( serializedBytes . getBuffer ( ) , 0 , serializedBytes . getCount ( ) ) ; storeFileOutputStream . flush ( ) ; storeFileOutputStream . close ( ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed. ObjectManager . ffdc . processException ( this , cclass , methodName , exception , "1:656:1.17" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName , new Object [ ] { exception } ) ; throw new PermanentIOException ( this , exception ) ; } // catch java.io.IOException. managedObjectsOnDisk . add ( new Long ( managedObject . owningToken . storedObjectIdentifier ) ) ; } // if (serializedBytes != null ). if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName ) ; }
|
Writes an object to hardened storage but may return before the write completes .
| 715
| 15
|
159,998
|
public void writeHeader ( ) throws ObjectManagerException { final String methodName = "writeHeader" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , methodName ) ; try { java . io . FileOutputStream headerOutputStream = new java . io . FileOutputStream ( storeDirectoryName + java . io . File . separator + headerIdentifier ) ; java . io . DataOutputStream dataOutputStream = new java . io . DataOutputStream ( headerOutputStream ) ; java . io . FileDescriptor fileDescriptor = headerOutputStream . getFD ( ) ; dataOutputStream . writeInt ( version ) ; dataOutputStream . writeLong ( objectStoreIdentifier ) ; dataOutputStream . writeLong ( sequenceNumber ) ; dataOutputStream . flush ( ) ; headerOutputStream . flush ( ) ; fileDescriptor . sync ( ) ; // Force buffered records to disk. headerOutputStream . close ( ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed. ObjectManager . ffdc . processException ( this , cclass , methodName , exception , "1:706:1.17" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName , new Object [ ] { exception } ) ; throw new PermanentIOException ( this , exception ) ; } // catch java.io.IOException. if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName ) ; }
|
Write header information for disk in the file headerIdentifier and force it to disk .
| 366
| 17
|
159,999
|
public static RestRepositoryConnection createConnection ( RestRepositoryConnectionProxy proxy ) throws RepositoryBackendIOException { readRepoProperties ( proxy ) ; RestRepositoryConnection connection = new RestRepositoryConnection ( repoProperties . getProperty ( REPOSITORY_URL_PROP ) . trim ( ) ) ; connection . setProxy ( proxy ) ; if ( repoProperties . containsKey ( API_KEY_PROP ) ) { connection . setApiKey ( repoProperties . getProperty ( API_KEY_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( USERID_PROP ) ) { connection . setUserId ( repoProperties . getProperty ( USERID_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( PASSWORD_PROP ) ) { connection . setPassword ( repoProperties . getProperty ( PASSWORD_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( SOFTLAYER_USERID_PROP ) ) { connection . setSoftlayerUserId ( repoProperties . getProperty ( SOFTLAYER_USERID_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( SOFTLAYER_PASSWORD_PROP ) ) { connection . setSoftlayerPassword ( repoProperties . getProperty ( SOFTLAYER_PASSWORD_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( ATTACHMENT_BASIC_AUTH_USERID_PROP ) ) { connection . setAttachmentBasicAuthUserId ( repoProperties . getProperty ( ATTACHMENT_BASIC_AUTH_USERID_PROP ) . trim ( ) ) ; } if ( repoProperties . containsKey ( ATTACHMENT_BASIC_AUTH_PASSWORD_PROP ) ) { connection . setAttachmentBasicAuthPassword ( repoProperties . getProperty ( ATTACHMENT_BASIC_AUTH_PASSWORD_PROP ) . trim ( ) ) ; } return connection ; }
|
Creates a LoginInfoEntry with a proxy . This will then load the default repository using a hosted properties file on DHE .
| 469
| 26
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.