signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Vector3f { /** * / * ( non - Javadoc ) * @ see org . joml . Vector3fc # div ( float , float , float , org . joml . Vector3f ) */ public Vector3f div ( float x , float y , float z , Vector3f dest ) { } }
dest . x = this . x / x ; dest . y = this . y / y ; dest . z = this . z / z ; return dest ;
public class RuleCallImpl { /** * < ! - - begin - user - doc - - > * @ since 2.9 * < ! - - end - user - doc - - > * @ generated */ public EList < NamedArgument > getArguments ( ) { } }
if ( arguments == null ) { arguments = new EObjectContainmentEList < NamedArgument > ( NamedArgument . class , this , XtextPackage . RULE_CALL__ARGUMENTS ) ; } return arguments ;
public class DefaultComparisonFormatter { /** * Return a String representation for { @ link # getShortString } that describes the " thing " that has been compared so * users know how to locate it . * < p > Examples are " & lt ; bar . . . & gt ; at / foo [ 1 ] / bar [ 1 ] " for a comparison of elements or " & lt ; ! - - Comment Text - - & gt ; * at / foo [ 2 ] / comment ( ) [ 1 ] " for a comment . < / p > * < p > This implementation dispatches to several { @ code appendX } methods based on the comparison type or the type of * the node . < / p > * @ param node the node to describe * @ param xpath xpath of the node if applicable * @ param type the comparison type * @ return the formatted result * @ since XMLUnit 2.4.0 */ protected String getShortString ( Node node , String xpath , ComparisonType type ) { } }
StringBuilder sb = new StringBuilder ( ) ; if ( type == ComparisonType . HAS_DOCTYPE_DECLARATION ) { Document doc = ( Document ) node ; appendDocumentType ( sb , doc . getDoctype ( ) ) ; appendDocumentElementIndication ( sb , doc ) ; } else if ( node instanceof Document ) { Document doc = ( Document ) node ; appendDocumentXmlDeclaration ( sb , doc ) ; appendDocumentElementIndication ( sb , doc ) ; } else if ( node instanceof DocumentType ) { final DocumentType docType = ( DocumentType ) node ; appendDocumentType ( sb , docType ) ; appendDocumentElementIndication ( sb , docType . getOwnerDocument ( ) ) ; } else if ( node instanceof Attr ) { appendAttribute ( sb , ( Attr ) node ) ; } else if ( node instanceof Element ) { appendElement ( sb , ( Element ) node ) ; } else if ( node instanceof Text ) { appendText ( sb , ( Text ) node ) ; } else if ( node instanceof Comment ) { appendComment ( sb , ( Comment ) node ) ; } else if ( node instanceof ProcessingInstruction ) { appendProcessingInstruction ( sb , ( ProcessingInstruction ) node ) ; } else if ( node == null ) { sb . append ( "<NULL>" ) ; } else { sb . append ( "<!--NodeType " ) . append ( node . getNodeType ( ) ) . append ( ' ' ) . append ( node . getNodeName ( ) ) . append ( '/' ) . append ( node . getNodeValue ( ) ) . append ( "-->" ) ; } appendXPath ( sb , xpath ) ; return sb . toString ( ) ;
public class ConnectionEventListener { /** * This method is called by a resource adapter when a connection error occurs . * This is also called internally by this class when other event handling methods fail * and require cleanup . * @ param ConnectionEvent */ @ Override public void connectionErrorOccurred ( ConnectionEvent event ) { } }
int eventID = event . getId ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { StringBuilder entry = new StringBuilder ( event . getClass ( ) . getSimpleName ( ) ) . append ( '{' ) ; entry . append ( "id=" ) . append ( event . getId ( ) ) . append ( ", " ) ; entry . append ( "source=" ) . append ( event . getSource ( ) ) ; entry . append ( '}' ) ; if ( event . getException ( ) == null ) Tr . entry ( this , tc , "connectionErrorOccurred" , entry . toString ( ) ) ; else Tr . entry ( this , tc , "connectionErrorOccurred" , entry . toString ( ) , event . getException ( ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // ( ASSERT : event is not null ) StringBuffer tsb = new StringBuffer ( ) ; Object connHandle = event . getConnectionHandle ( ) ; tsb . append ( "***Connection Error Request*** Handle Name: " + connHandle ) ; if ( mcWrapper != null ) { Object poolMgr = mcWrapper . getPoolManager ( ) ; tsb . append ( ", Connection Pool: " + poolMgr + ", Details: " + mcWrapper ) ; } else { tsb . append ( ", Details: null" ) ; } Tr . debug ( this , tc , tsb . toString ( ) ) ; } switch ( eventID ) { case ConnectionEvent . CONNECTION_ERROR_OCCURRED : { Exception tempEx = event . getException ( ) ; // Initialize tempString so the msg makes sense for the case where the event has NO associated Exception String tempString = "" ; if ( tempEx != null ) { // If there is an associated Exception , generate tempString from that tempString = J2CUtilityClass . generateExceptionString ( tempEx ) ; Tr . audit ( tc , "RA_CONNECTION_ERROR_J2CA0056" , tempString , mcWrapper . gConfigProps . cfName ) ; } else { Tr . audit ( tc , "NO_RA_EXCEPTION_J2CA0216" , mcWrapper . gConfigProps . cfName ) ; } // NOTE : Moving all functional code for this to the MCWrapper as it is // closer to all the data / objects needed to perform this cleanup . mcWrapper . connectionErrorOccurred ( event ) ; break ; } case com . ibm . websphere . j2c . ConnectionEvent . SINGLE_CONNECTION_ERROR_OCCURRED : { /* * 51 is the id selected for this event . * If a resource adapter uses this Id , the connection may be * unconditionally cleaned up and destroyed . We are assuming the resource * adapter knows this connection can not be recovered . * Existing transactions may delay destroying the connection . * The connectionErrorOccurred method will process this request , * Only this connection will be destroyed . */ Exception tempEx = event . getException ( ) ; // Initialize tempString so the msg makes sense for the case where the event has NO associated Exception String tempString = "" ; if ( tempEx != null ) { // If there is an associated Exception , generate tempString from that tempString = J2CUtilityClass . generateExceptionString ( tempEx ) ; Tr . audit ( tc , "RA_CONNECTION_ERROR_J2CA0056" , tempString , mcWrapper . gConfigProps . cfName ) ; } else { Tr . audit ( tc , "NO_RA_EXCEPTION_J2CA0216" , mcWrapper . gConfigProps . cfName ) ; } // NOTE : Moving all functional code for this to the MCWrapper as it is // closer to all the data / objects needed to perform this cleanup . mcWrapper . connectionErrorOccurred ( event ) ; break ; } case com . ibm . websphere . j2c . ConnectionEvent . CONNECTION_ERROR_OCCURRED_NO_EVENT : { // NOTE : Moving all functional code for this to the MCWrapper as it is // closer to all the data / objects needed to perform this cleanup . mcWrapper . connectionErrorOccurred ( event ) ; break ; } default : { // Connection Event passed in doesn ' t match the method called . // This should never happen unless there is an error in the ResourceAdapter . processBadEvent ( "connectionErrorOccurred" , ConnectionEvent . CONNECTION_ERROR_OCCURRED , event ) ; } } // end switch if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "connectionErrorOccurred" ) ; } return ;
public class Rectangles { /** * Computes the point inside the bounds of the rectangle that ' s closest to the given point , * writing the result into { @ code out } . * @ return { @ code out } for call chaining convenience . */ public static Point closestInteriorPoint ( IRectangle r , IPoint p , Point out ) { } }
out . set ( MathUtil . clamp ( p . x ( ) , r . minX ( ) , r . maxX ( ) ) , MathUtil . clamp ( p . y ( ) , r . minY ( ) , r . maxY ( ) ) ) ; return out ;
public class DirContextAdapter { /** * { @ inheritDoc } */ @ Override public Attributes getAttributes ( Name name , String [ ] attrIds ) throws NamingException { } }
return getAttributes ( name . toString ( ) , attrIds ) ;
public class ApiOvhSupplymondialRelay { /** * Find the 10 nearest MondialRelay points from address or city . * REST : POST / supply / mondialRelay * @ param city [ required ] City * @ param address [ required ] Address * @ param country [ required ] ISO country code * @ param zipcode [ required ] Zip Code */ public OvhMondialRelayReturn POST ( String address , String city , OvhCountryEnum country , String zipcode ) throws IOException { } }
String qPath = "/supply/mondialRelay" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "address" , address ) ; addBody ( o , "city" , city ) ; addBody ( o , "country" , country ) ; addBody ( o , "zipcode" , zipcode ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhMondialRelayReturn . class ) ;
public class NeedlessMemberCollectionSynchronization { /** * sets the source line annotation of a store to a collection if that collection is synchronized . */ private void processCollectionStore ( ) { } }
String fieldClassName = getDottedClassConstantOperand ( ) ; if ( fieldClassName . equals ( className ) && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; if ( item . getUserValue ( ) != null ) { String fieldName = getNameConstantOperand ( ) ; if ( fieldName != null ) { FieldInfo fi = collectionFields . get ( fieldName ) ; if ( fi != null ) { fi . getFieldAnnotation ( ) . setSourceLines ( SourceLineAnnotation . fromVisitedInstruction ( this ) ) ; fi . setSynchronized ( ) ; } } } }
public class ExecutorServiceMetrics { /** * Record metrics on the use of an { @ link ExecutorService } . * @ param registry The registry to bind metrics to . * @ param executor The executor to instrument . * @ param executorServiceName Will be used to tag metrics with " name " . * @ param tags Tags to apply to all recorded metrics . * @ return The instrumented executor , proxied . */ public static ExecutorService monitor ( MeterRegistry registry , ExecutorService executor , String executorServiceName , Iterable < Tag > tags ) { } }
new ExecutorServiceMetrics ( executor , executorServiceName , tags ) . bindTo ( registry ) ; return new TimedExecutorService ( registry , executor , executorServiceName , tags ) ;
public class SARLValidator { /** * Report the warnings associated to the casted expressions . * @ param concreteSyntax the type specified into the casted expression . * @ param toType the type specified into the casted expression . * @ param fromType the type of the source expression . */ protected void reportCastWarnings ( JvmTypeReference concreteSyntax , LightweightTypeReference toType , LightweightTypeReference fromType ) { } }
if ( ! isIgnored ( OBSOLETE_CAST ) && toType . isAssignableFrom ( fromType ) ) { addIssue ( MessageFormat . format ( Messages . SARLValidator_96 , fromType . getHumanReadableName ( ) , toType . getHumanReadableName ( ) ) , concreteSyntax , OBSOLETE_CAST ) ; }
public class GregorianCalendar { /** * Return the maximum value that this field could have , given the current date . * For example , with the date " Feb 3 , 1997 " and the DAY _ OF _ MONTH field , the actual * maximum would be 28 ; for " Feb 3 , 1996 " it s 29 . Similarly for a Hebrew calendar , * for some years the actual maximum for MONTH is 12 , and for others 13. */ public int getActualMaximum ( int field ) { } }
/* It is a known limitation that the code here ( and in getActualMinimum ) * won ' t behave properly at the extreme limits of GregorianCalendar ' s * representable range ( except for the code that handles the YEAR * field ) . That ' s because the ends of the representable range are at * odd spots in the year . For calendars with the default Gregorian * cutover , these limits are Sun Dec 02 16:47:04 GMT 292269055 BC to Sun * Aug 17 07:12:55 GMT 292278994 AD , somewhat different for non - GMT * zones . As a result , if the calendar is set to Aug 1 292278994 AD , * the actual maximum of DAY _ OF _ MONTH is 17 , not 30 . If the date is Mar * 31 in that year , the actual maximum month might be Jul , whereas is * the date is Mar 15 , the actual maximum might be Aug - - depending on * the precise semantics that are desired . Similar considerations * affect all fields . Nonetheless , this effect is sufficiently arcane * that we permit it , rather than complicating the code to handle such * intricacies . - liu 8/20/98 * UPDATE : No longer true , since we have pulled in the limit values on * the year . - Liu 11/6/00 */ switch ( field ) { case YEAR : /* The year computation is no different , in principle , from the * others , however , the range of possible maxima is large . In * addition , the way we know we ' ve exceeded the range is different . * For these reasons , we use the special case code below to handle * this field . * The actual maxima for YEAR depend on the type of calendar : * Gregorian = May 17 , 292275056 BC - Aug 17 , 292278994 AD * Julian = Dec 2 , 292269055 BC - Jan 3 , 292272993 AD * Hybrid = Dec 2 , 292269055 BC - Aug 17 , 292278994 AD * We know we ' ve exceeded the maximum when either the month , date , * time , or era changes in response to setting the year . We don ' t * check for month , date , and time here because the year and era are * sufficient to detect an invalid year setting . NOTE : If code is * added to check the month and date in the future for some reason , * Feb 29 must be allowed to shift to Mar 1 when setting the year . */ { Calendar cal = ( Calendar ) clone ( ) ; cal . setLenient ( true ) ; int era = cal . get ( ERA ) ; Date d = cal . getTime ( ) ; /* Perform a binary search , with the invariant that lowGood is a * valid year , and highBad is an out of range year . */ int lowGood = LIMITS [ YEAR ] [ 1 ] ; int highBad = LIMITS [ YEAR ] [ 2 ] + 1 ; while ( ( lowGood + 1 ) < highBad ) { int y = ( lowGood + highBad ) / 2 ; cal . set ( YEAR , y ) ; if ( cal . get ( YEAR ) == y && cal . get ( ERA ) == era ) { lowGood = y ; } else { highBad = y ; cal . setTime ( d ) ; // Restore original fields } } return lowGood ; } default : return super . getActualMaximum ( field ) ; }
public class SSLSocketFactoryProvider { /** * Get the SSL socket factory . * @ return the { @ link SSLSocketFactory } object */ public SSLSocketFactory getSSLSockectFactory ( ) { } }
SSLContext context = null ; try { context = SSLContext . getInstance ( "TLS" ) ; } catch ( NoSuchAlgorithmException e ) { throw new VOMSError ( e . getMessage ( ) , e ) ; } KeyManager [ ] keyManagers = new KeyManager [ ] { credential . getKeyManager ( ) } ; X509TrustManager trustManager = SocketFactoryCreator . getSSLTrustManager ( validator ) ; TrustManager [ ] trustManagers = new TrustManager [ ] { trustManager } ; SecureRandom secureRandom = null ; /* http : / / bugs . sun . com / view _ bug . do ? bug _ id = 6202721 */ /* * Use new SecureRandom instead of SecureRandom . getInstance ( " SHA1PRNG " ) to * avoid unnecessary blocking */ secureRandom = new SecureRandom ( ) ; try { context . init ( keyManagers , trustManagers , secureRandom ) ; } catch ( KeyManagementException e ) { throw new VOMSError ( e . getMessage ( ) , e ) ; } return context . getSocketFactory ( ) ;
public class PersistenceTypeConversion { /** * Convert object from persistence to be used in configuration . * @ param value Persisted value * @ param parameterType Parameter type * @ return Configured value */ public static Object fromPersistenceType ( Object value , Class < ? > parameterType ) { } }
if ( ! isTypeConversionRequired ( parameterType ) ) { return value ; } if ( Map . class . isAssignableFrom ( parameterType ) && ( value instanceof String [ ] ) ) { String [ ] rows = ( String [ ] ) value ; Map < String , String > map = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < rows . length ; i ++ ) { String [ ] keyValue = ConversionStringUtils . splitPreserveAllTokens ( rows [ i ] , KEY_VALUE_DELIMITER . charAt ( 0 ) ) ; if ( keyValue . length == 2 && StringUtils . isNotEmpty ( keyValue [ 0 ] ) ) { String entryKey = keyValue [ 0 ] ; String entryValue = StringUtils . isEmpty ( keyValue [ 1 ] ) ? null : keyValue [ 1 ] ; map . put ( ConversionStringUtils . decodeString ( entryKey ) , ConversionStringUtils . decodeString ( entryValue ) ) ; } } return map ; } throw new IllegalArgumentException ( "Type conversion not supported: " + parameterType . getName ( ) ) ;
public class HtmlExample { /** * { @ inheritDoc } */ public void print ( PrintWriter out ) { } }
out . write ( lead ) ; printStartTag ( out ) ; if ( child != null ) child . print ( out ) ; else out . write ( text ) ; out . write ( endTag ) ; if ( sibling != null ) sibling . print ( out ) ; else out . write ( tail ) ;
public class Mapper { /** * Create a new Map by mapping all keys from the original map and maintaining the original value . * @ param mapper a Mapper to map the keys * @ param map a Map * @ param allowNull allow null values * @ return a new Map with keys mapped */ public static Map mapKeys ( Mapper mapper , Map map , boolean allowNull ) { } }
HashMap h = new HashMap ( ) ; for ( Object e : map . entrySet ( ) ) { Map . Entry entry = ( Map . Entry ) e ; Object o = mapper . map ( entry . getKey ( ) ) ; if ( allowNull || o != null ) { h . put ( o , entry . getValue ( ) ) ; } } return h ;
public class CmsDeleteOUDialog { /** * Initialized the dialog . < p > * @ param cms CmsObject * @ param window window * @ param app */ private void init ( CmsObject cms , final Window window , final CmsAccountsApp app ) { } }
CmsVaadinUtils . readAndLocalizeDesign ( this , CmsVaadinUtils . getWpMessagesForCurrentLocale ( ) , null ) ; try { displayResourceInfoDirectly ( Collections . singletonList ( CmsAccountsApp . getOUInfo ( OpenCms . getOrgUnitManager ( ) . readOrganizationalUnit ( A_CmsUI . getCmsObject ( ) , m_ouName ) ) ) ) ; } catch ( CmsException e ) { LOG . error ( "Unable to read OU" , e ) ; } m_icon . setContentMode ( ContentMode . HTML ) ; m_icon . setValue ( FontOpenCms . WARNING . getHtml ( ) ) ; m_cms = cms ; m_okButton . addClickListener ( new ClickListener ( ) { private static final long serialVersionUID = - 7845894751587879028L ; public void buttonClick ( ClickEvent event ) { deletePrincipal ( ) ; window . close ( ) ; app . reload ( ) ; } } ) ; m_cancelButton . addClickListener ( new ClickListener ( ) { private static final long serialVersionUID = 6649262870116199591L ; public void buttonClick ( ClickEvent event ) { window . close ( ) ; } } ) ;
public class LdapDao { /** * Inserts an entry into the DIT . If the entry exists , it is overwritten . Overwriting leaves is straight forward . * Overwriting inner nodes will first delete the node ' s entire subtree , then overwrite the node . Throws * { @ link MissingParentException } if an ancestor of the entry is missing . */ public void storeOverwriteExisting ( Entry entry ) throws MissingParentException { } }
try { deleteSubtreeIncludingRoot ( entry . getDn ( ) ) ; } catch ( NoSuchNodeException e ) { LOGGER . debug ( "nothing to overwrite" ) ; } finally { try { store ( entry ) ; } catch ( EntryAlreadyExistsException e ) { LOGGER . warn ( "should never reach here - entry should have been deleted" ) ; } }
public class FieldInfo { /** * Convert and move string to this field . * Override this method to convert the String to the actual Physical Data Type . * @ param bState the state to set the data to . * @ param bDisplayOption Display the data on the screen if true . * @ param iMoveMode INIT , SCREEN , or READ move mode . * @ return The error code ( or NORMAL _ RETURN ) . */ public int setString ( String strString , boolean bDisplayOption , int iMoveMode ) // init this field override for other value { } }
try { Object objData = Converter . convertObjectToDatatype ( strString , this . getDataClass ( ) , null , m_ibScale ) ; if ( objData == null ) if ( this . getDataClass ( ) != Boolean . class ) if ( ! ( Number . class . isAssignableFrom ( this . getDataClass ( ) ) ) ) if ( this . getDataClass ( ) != java . util . Date . class ) objData = Constants . BLANK ; // To set a null internally , you must call setData directly return this . setData ( objData , bDisplayOption , iMoveMode ) ; } catch ( Exception ex ) { String strError = ex . getMessage ( ) ; if ( strError == null ) strError = ex . getClass ( ) . getName ( ) ; if ( this . getRecord ( ) != null ) if ( this . getRecord ( ) . getTask ( ) != null ) return this . getRecord ( ) . getTask ( ) . setLastError ( strError ) ; return Constants . ERROR_RETURN ; }
public class InjectorImpl { /** * Returns the current inject manager . */ public static InjectorImpl current ( ClassLoader loader ) { } }
if ( loader instanceof DynamicClassLoader ) { return _localManager . getLevel ( loader ) ; } else { SoftReference < InjectorImpl > injectRef = _loaderManagerMap . get ( loader ) ; if ( injectRef != null ) { return injectRef . get ( ) ; } else { return null ; } }
public class PriorityQueue { /** * Increases the capacity of the array . * @ param minCapacity the desired minimum capacity */ private void growToSize ( int minCapacity ) { } }
if ( minCapacity < 0 ) // overflow throw new GdxRuntimeException ( "Capacity upper limit exceeded." ) ; int oldCapacity = queue . length ; // Double size if small ; else grow by 50% int newCapacity = ( int ) ( ( oldCapacity < 64 ) ? ( ( oldCapacity + 1 ) * CAPACITY_RATIO_HI ) : ( oldCapacity * CAPACITY_RATIO_LOW ) ) ; if ( newCapacity < 0 ) // overflow newCapacity = Integer . MAX_VALUE ; if ( newCapacity < minCapacity ) newCapacity = minCapacity ; Object [ ] newQueue = new Object [ newCapacity ] ; System . arraycopy ( queue , 0 , newQueue , 0 , size ) ; queue = newQueue ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertObjectOffsetObjTpeToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class NodeImpl { /** * { @ inheritDoc } */ public NodeType getPrimaryNodeType ( ) throws RepositoryException { } }
checkValid ( ) ; ExtendedNodeTypeManager nodeTypeManager = ( ExtendedNodeTypeManager ) session . getWorkspace ( ) . getNodeTypeManager ( ) ; return nodeTypeManager . findNodeType ( nodeData ( ) . getPrimaryTypeName ( ) ) ;
public class ListIndexRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListIndexRequest listIndexRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listIndexRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listIndexRequest . getDirectoryArn ( ) , DIRECTORYARN_BINDING ) ; protocolMarshaller . marshall ( listIndexRequest . getRangesOnIndexedValues ( ) , RANGESONINDEXEDVALUES_BINDING ) ; protocolMarshaller . marshall ( listIndexRequest . getIndexReference ( ) , INDEXREFERENCE_BINDING ) ; protocolMarshaller . marshall ( listIndexRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listIndexRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listIndexRequest . getConsistencyLevel ( ) , CONSISTENCYLEVEL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LocalAccountPersonImpl { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . persondir . jpa . ILocalAccountPersonAttribute # setAttributes ( java . util . Map ) */ @ Override public void setAttributes ( Map < String , List < String > > attributes ) { } }
// Tries to modify as many of the existing attributes in place to reduce DB churn in // hibernate // Make a local copy so we don ' t edit the original reference attributes = new LinkedHashMap < String , List < String > > ( attributes ) ; for ( final Iterator < LocalAccountPersonAttributeImpl > attributesItr = this . attributes . iterator ( ) ; attributesItr . hasNext ( ) ; ) { final LocalAccountPersonAttributeImpl attribute = attributesItr . next ( ) ; // Remove the new values for the attribute from the input map final String name = attribute . getName ( ) ; final List < String > newValues = attributes . remove ( name ) ; // If no new values remove the attribute if ( newValues == null ) { attributesItr . remove ( ) ; } // Otherwise update the existing values else { attribute . setValues ( new ArrayList < String > ( newValues ) ) ; } } // Add any remaining new attributes to the list for ( final Map . Entry < String , List < String > > attribute : attributes . entrySet ( ) ) { final String name = attribute . getKey ( ) ; final List < String > values = attribute . getValue ( ) ; this . attributes . add ( new LocalAccountPersonAttributeImpl ( name , values ) ) ; }
public class MethodSignature { /** * Makes a descriptor for a given method . * @ param paramTypes parameter types . * @ return method descriptor */ private String makeSignature ( List < ASTType > paramTypes ) { } }
StringBuilder builder = new StringBuilder ( ) ; builder . append ( '(' ) ; for ( ASTType paramType : paramTypes ) { makeTypeDescriptor ( builder , paramType ) ; } builder . append ( ')' ) ; return builder . toString ( ) ;
public class JDBC4DatabaseMetaData { /** * Retrieves whether this database supports the given transaction isolation level . */ @ Override public boolean supportsTransactionIsolationLevel ( int level ) throws SQLException { } }
checkClosed ( ) ; if ( level == Connection . TRANSACTION_SERIALIZABLE ) return true ; return false ;
public class CssFormatter { /** * Create a new formatter for a single rule with optional output . * @ param output * optional target * @ return a new formatter * @ throws LessException * if any error occur . */ private CssFormatter copy ( @ Nullable StringBuilder output ) { } }
try { CssFormatter formatter = ( CssFormatter ) clone ( ) ; formatter . output = output == null ? state . pool . get ( ) : output ; formatter . insets = state . pool . get ( ) ; formatter . insets . append ( insets ) ; return formatter ; } catch ( CloneNotSupportedException ex ) { throw new LessException ( ex ) ; }
public class RubyErrHandler { /** * rb _ syck _ err _ handler */ public void handle ( Parser p , String msg ) { } }
int endl = p . cursor ; while ( p . buffer . buffer [ endl ] != 0 && p . buffer . buffer [ endl ] != '\n' ) { endl ++ ; } try { int lp = p . lineptr ; if ( lp < 0 ) { lp = 0 ; } int len = endl - lp ; if ( len < 0 ) { len = 0 ; } String line = new String ( p . buffer . buffer , lp , len , "ISO-8859-1" ) ; String m1 = msg + " on line " + p . linect + ", col " + ( p . cursor - lp ) + ": `" + line + "'" ; throw runtime . newArgumentError ( m1 ) ; } catch ( java . io . UnsupportedEncodingException e ) { }
public class Functions { /** * Runs random UUID function with arguments . * @ return */ public static String randomUUID ( TestContext context ) { } }
return new RandomUUIDFunction ( ) . execute ( Collections . < String > emptyList ( ) , context ) ;
public class SpecParser { /** * IDEA : check for misspellings other than wrong capitalization */ private boolean isFixtureMethod ( MethodNode method ) { } }
String name = method . getName ( ) ; for ( String fmName : FIXTURE_METHODS ) { if ( ! fmName . equalsIgnoreCase ( name ) ) continue ; // assertion : is ( meant to be ) a fixture method , so we ' ll return true in the end if ( method . isStatic ( ) ) errorReporter . error ( method , "Fixture methods must not be static" ) ; if ( ! fmName . equals ( name ) ) errorReporter . error ( method , "Misspelled '%s()' method (wrong capitalization)" , fmName ) ; return true ; } return false ;
public class RoboSputnik { /** * NOTE : originally in RobolectricTestRunner getConfig takes Method as parameter * and is a bit more complicated */ public Config getConfig ( Class < ? > clazz ) { } }
Config config = DEFAULT_CONFIG ; Config globalConfig = Config . Implementation . fromProperties ( getConfigProperties ( ) ) ; if ( globalConfig != null ) { config = new Config . Implementation ( config , globalConfig ) ; } Config classConfig = clazz . getAnnotation ( Config . class ) ; if ( classConfig != null ) { config = new Config . Implementation ( config , classConfig ) ; } return config ;
public class ImgUtil { /** * 图像类型转换 : GIF = 》 JPG 、 GIF = 》 PNG 、 PNG = 》 JPG 、 PNG = 》 GIF ( X ) 、 BMP = 》 PNG < br > * 此方法并不关闭流 * @ param srcStream 源图像流 * @ param formatName 包含格式非正式名称的 String : 如JPG 、 JPEG 、 GIF等 * @ param destStream 目标图像输出流 * @ since 3.0.9 */ public static void convert ( InputStream srcStream , String formatName , OutputStream destStream ) { } }
write ( read ( srcStream ) , formatName , getImageOutputStream ( destStream ) ) ;
public class PdfCollectionItem { /** * Sets the value of the collection item . * @ param d */ public void addItem ( String key , PdfDate d ) { } }
PdfName fieldname = new PdfName ( key ) ; PdfCollectionField field = ( PdfCollectionField ) schema . get ( fieldname ) ; if ( field . fieldType == PdfCollectionField . DATE ) { put ( fieldname , d ) ; }
public class SClassDefinitionAssistantInterpreter { public PStm findStatement ( ClassListInterpreter classes , File file , int lineno ) { } }
for ( SClassDefinition c : classes ) { if ( c . getName ( ) . getLocation ( ) . getFile ( ) . equals ( file ) ) { PStm stmt = findStatement ( c , lineno ) ; if ( stmt != null ) { return stmt ; } } } return null ;
public class LambdaDslObject { /** * Attribute that must match the given timestamp format * @ param name attribute name * @ param format timestamp format * @ param example example date and time to use for generated bodies */ public LambdaDslObject timestamp ( String name , String format , Instant example ) { } }
object . timestamp ( name , format , example ) ; return this ;
public class DOM2DTMdefaultNamespaceDeclarationNode { /** * DOM Level 3 - Experimental : * Look up the namespace URI associated to the given prefix , starting from this node . * Use lookupNamespaceURI ( null ) to lookup the default namespace * @ param namespaceURI * @ return th URI for the namespace * @ since DOM Level 3 */ public String lookupNamespaceURI ( String specifiedPrefix ) { } }
short type = this . getNodeType ( ) ; switch ( type ) { case Node . ELEMENT_NODE : { String namespace = this . getNamespaceURI ( ) ; String prefix = this . getPrefix ( ) ; if ( namespace != null ) { // REVISIT : is it possible that prefix is empty string ? if ( specifiedPrefix == null && prefix == specifiedPrefix ) { // looking for default namespace return namespace ; } else if ( prefix != null && prefix . equals ( specifiedPrefix ) ) { // non default namespace return namespace ; } } if ( this . hasAttributes ( ) ) { NamedNodeMap map = this . getAttributes ( ) ; int length = map . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { Node attr = map . item ( i ) ; String attrPrefix = attr . getPrefix ( ) ; String value = attr . getNodeValue ( ) ; namespace = attr . getNamespaceURI ( ) ; if ( namespace != null && namespace . equals ( "http://www.w3.org/2000/xmlns/" ) ) { // at this point we are dealing with DOM Level 2 nodes only if ( specifiedPrefix == null && attr . getNodeName ( ) . equals ( "xmlns" ) ) { // default namespace return value ; } else if ( attrPrefix != null && attrPrefix . equals ( "xmlns" ) && attr . getLocalName ( ) . equals ( specifiedPrefix ) ) { // non default namespace return value ; } } } } /* NodeImpl ancestor = ( NodeImpl ) getElementAncestor ( this ) ; if ( ancestor ! = null ) { return ancestor . lookupNamespaceURI ( specifiedPrefix ) ; */ return null ; } /* case Node . DOCUMENT _ NODE : { return ( ( NodeImpl ) ( ( Document ) this ) . getDocumentElement ( ) ) . lookupNamespaceURI ( specifiedPrefix ) ; */ case Node . ENTITY_NODE : case Node . NOTATION_NODE : case Node . DOCUMENT_FRAGMENT_NODE : case Node . DOCUMENT_TYPE_NODE : // type is unknown return null ; case Node . ATTRIBUTE_NODE : { if ( this . getOwnerElement ( ) . getNodeType ( ) == Node . ELEMENT_NODE ) { return getOwnerElement ( ) . lookupNamespaceURI ( specifiedPrefix ) ; } return null ; } default : { /* NodeImpl ancestor = ( NodeImpl ) getElementAncestor ( this ) ; if ( ancestor ! = null ) { return ancestor . lookupNamespaceURI ( specifiedPrefix ) ; */ return null ; } }
public class Context { /** * Close this contexts , meaning this context object is removed as thread * context . < br / > * If not all connection are closed , all connection are closed . */ public void close ( ) { } }
Context . LOG . debug ( "close context for {}" , this . person ) ; QueryCache . cleanByKey ( getRequestId ( ) ) ; if ( getThreadLocal ( ) . get ( ) != null && getThreadLocal ( ) . get ( ) == this ) { getThreadLocal ( ) . set ( null ) ; }
public class DRUMSReader { /** * Opens all files used by the underlying HashFunction . The pointers are stored in < code > files < / code > . * @ throws FileLockException * @ throws IOException */ @ SuppressWarnings ( "unchecked" ) public void openFiles ( ) throws FileLockException , IOException { } }
files = new HeaderIndexFile [ numberOfBuckets ] ; cumulativeElementsPerFile = new int [ numberOfBuckets ] ; int lastfile = 0 ; String path = drums . getDatabaseDirectory ( ) ; for ( int i = 0 ; i < numberOfBuckets ; i ++ ) { String filename = path + "/" + drums . getHashFunction ( ) . getFilename ( i ) ; if ( ! new File ( filename ) . exists ( ) ) { cumulativeElementsPerFile [ i ] = 0 ; } else { lastfile = i ; files [ i ] = new HeaderIndexFile < Data > ( filename , 10 , drums . gp ) ; cumulativeElementsPerFile [ i ] = ( int ) ( files [ i ] . getFilledUpFromContentStart ( ) / elementSize ) ; } if ( i > 0 ) { cumulativeElementsPerFile [ i ] += cumulativeElementsPerFile [ i - 1 ] ; } } destBuffer = ByteBuffer . allocate ( ( int ) files [ lastfile ] . getChunkSize ( ) ) ; filesAreOpened = true ;
public class HttpHandler { /** * Creates the parameter ' s value . * @ param argument the argument * @ param context the current HTTP context * @ param engine the converter * @ return the created object */ @ Override public Object create ( ActionParameter argument , Context context , ParameterFactories engine ) { } }
if ( argument . getRawType ( ) . equals ( Context . class ) ) { return context ; } // Check whether we have a ParameterFactory that can handle the creation . Set < Class > handled = engine . getTypesHandledByFactories ( ) ; if ( handled . contains ( argument . getRawType ( ) ) ) { return engine . newInstance ( context , argument . getRawType ( ) ) ; } if ( argument . getRawType ( ) . equals ( Request . class ) ) { return context . request ( ) ; } if ( argument . getRawType ( ) . equals ( SessionCookie . class ) ) { return context . session ( ) ; } if ( argument . getRawType ( ) . equals ( FlashCookie . class ) ) { return context . flash ( ) ; } if ( argument . getRawType ( ) . equals ( Cookie . class ) ) { if ( ! Strings . isNullOrEmpty ( argument . getName ( ) ) ) { return context . cookie ( argument . getName ( ) ) ; } else { throw new IllegalArgumentException ( "Missing cookie's name set in the @HttpParameter annotation" ) ; } } if ( argument . getRawType ( ) . equals ( Route . class ) ) { return context . route ( ) ; } if ( argument . getRawType ( ) . equals ( BufferedReader . class ) || argument . getRawType ( ) . equals ( Reader . class ) ) { try { return context . reader ( ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( "Cannot inject the reader object in the @HttpParameter injected " + "parameter" , e ) ; } } // Ran out of possibilities based on the type , check for the request scope and then HTTP headers if ( Strings . isNullOrEmpty ( argument . getName ( ) ) ) { throw new IllegalArgumentException ( "Cannot inject the value of a HTTP header and request scope value in " + "the @HttpParameter - header's name not defined" ) ; } else { final Object requestScopeValue = context . request ( ) . data ( ) . get ( argument . getName ( ) ) ; if ( requestScopeValue != null ) { return requestScopeValue ; } else { if ( context . header ( argument . getName ( ) ) != null || argument . getDefaultValue ( ) != null ) { return engine . convertValues ( context . headers ( argument . getName ( ) ) , argument . getRawType ( ) , argument . getGenericType ( ) , argument . getDefaultValue ( ) ) ; } else { // No value . if ( List . class . isAssignableFrom ( argument . getRawType ( ) ) ) { return Collections . emptyList ( ) ; } if ( Set . class . isAssignableFrom ( argument . getRawType ( ) ) ) { return Collections . emptySet ( ) ; } if ( Collections . class . isAssignableFrom ( argument . getRawType ( ) ) ) { return Collections . emptyList ( ) ; } return null ; } } }
public class AdRule { /** * Gets the preroll value for this AdRule . * @ return preroll * This { @ link AdRule } object ' s pre - roll slot . This attribute * is required . */ public com . google . api . ads . admanager . axis . v201805 . BaseAdRuleSlot getPreroll ( ) { } }
return preroll ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://belframework.org/schema/1.0/xbel" , name = "evidence" ) public JAXBElement < String > createEvidence ( String value ) { } }
return new JAXBElement < String > ( _Evidence_QNAME , String . class , null , value ) ;
public class CachingCodecRegistry { /** * Try to create a codec when we haven ' t found it in the cache */ protected TypeCodec < ? > createCodec ( DataType cqlType , GenericType < ? > javaType , boolean isJavaCovariant ) { } }
LOG . trace ( "[{}] Cache miss, creating codec" , logPrefix ) ; // Either type can be null , but not both . if ( javaType == null ) { assert cqlType != null ; return createCodec ( cqlType ) ; } else if ( cqlType == null ) { return createCodec ( javaType , isJavaCovariant ) ; } else { // Both non - null TypeToken < ? > token = javaType . __getToken ( ) ; if ( cqlType instanceof ListType && List . class . isAssignableFrom ( token . getRawType ( ) ) ) { DataType elementCqlType = ( ( ListType ) cqlType ) . getElementType ( ) ; TypeCodec < Object > elementCodec ; if ( token . getType ( ) instanceof ParameterizedType ) { Type [ ] typeArguments = ( ( ParameterizedType ) token . getType ( ) ) . getActualTypeArguments ( ) ; GenericType < ? > elementJavaType = GenericType . of ( typeArguments [ 0 ] ) ; elementCodec = uncheckedCast ( codecFor ( elementCqlType , elementJavaType , isJavaCovariant ) ) ; } else { elementCodec = codecFor ( elementCqlType ) ; } return TypeCodecs . listOf ( elementCodec ) ; } else if ( cqlType instanceof SetType && Set . class . isAssignableFrom ( token . getRawType ( ) ) ) { DataType elementCqlType = ( ( SetType ) cqlType ) . getElementType ( ) ; TypeCodec < Object > elementCodec ; if ( token . getType ( ) instanceof ParameterizedType ) { Type [ ] typeArguments = ( ( ParameterizedType ) token . getType ( ) ) . getActualTypeArguments ( ) ; GenericType < ? > elementJavaType = GenericType . of ( typeArguments [ 0 ] ) ; elementCodec = uncheckedCast ( codecFor ( elementCqlType , elementJavaType , isJavaCovariant ) ) ; } else { elementCodec = codecFor ( elementCqlType ) ; } return TypeCodecs . setOf ( elementCodec ) ; } else if ( cqlType instanceof MapType && Map . class . isAssignableFrom ( token . getRawType ( ) ) ) { DataType keyCqlType = ( ( MapType ) cqlType ) . getKeyType ( ) ; DataType valueCqlType = ( ( MapType ) cqlType ) . getValueType ( ) ; TypeCodec < Object > keyCodec ; TypeCodec < Object > valueCodec ; if ( token . getType ( ) instanceof ParameterizedType ) { Type [ ] typeArguments = ( ( ParameterizedType ) token . getType ( ) ) . getActualTypeArguments ( ) ; GenericType < ? > keyJavaType = GenericType . of ( typeArguments [ 0 ] ) ; GenericType < ? > valueJavaType = GenericType . of ( typeArguments [ 1 ] ) ; keyCodec = uncheckedCast ( codecFor ( keyCqlType , keyJavaType , isJavaCovariant ) ) ; valueCodec = uncheckedCast ( codecFor ( valueCqlType , valueJavaType , isJavaCovariant ) ) ; } else { keyCodec = codecFor ( keyCqlType ) ; valueCodec = codecFor ( valueCqlType ) ; } return TypeCodecs . mapOf ( keyCodec , valueCodec ) ; } else if ( cqlType instanceof TupleType && TupleValue . class . isAssignableFrom ( token . getRawType ( ) ) ) { return TypeCodecs . tupleOf ( ( TupleType ) cqlType ) ; } else if ( cqlType instanceof UserDefinedType && UdtValue . class . isAssignableFrom ( token . getRawType ( ) ) ) { return TypeCodecs . udtOf ( ( UserDefinedType ) cqlType ) ; } else if ( cqlType instanceof CustomType && ByteBuffer . class . isAssignableFrom ( token . getRawType ( ) ) ) { return TypeCodecs . custom ( cqlType ) ; } throw new CodecNotFoundException ( cqlType , javaType ) ; }
public class FirstFitSchedulingPolicy { /** * Checking from nextIndex , choose the first worker that fits to schedule the tasklet onto . * @ param tasklet to schedule * @ return the next worker that has enough resources for the tasklet */ @ Override public Optional < String > trySchedule ( final Tasklet tasklet ) { } }
for ( int i = 0 ; i < idList . size ( ) ; i ++ ) { final int index = ( nextIndex + i ) % idList . size ( ) ; final String workerId = idList . get ( index ) ; if ( idLoadMap . get ( workerId ) < workerCapacity ) { nextIndex = ( index + 1 ) % idList . size ( ) ; return Optional . of ( workerId ) ; } } return Optional . empty ( ) ;
public class ContextFactory { /** * Returns a " null " context - one which does nothing . */ public static synchronized MetricsContext getNullContext ( String contextName ) { } }
MetricsContext nullContext = nullContextMap . get ( contextName ) ; if ( nullContext == null ) { nullContext = new NullContext ( ) ; nullContextMap . put ( contextName , nullContext ) ; } return nullContext ;
public class AmazonDLMClient { /** * Gets summary information about all or the specified data lifecycle policies . * To get complete information about a policy , use < a > GetLifecyclePolicy < / a > . * @ param getLifecyclePoliciesRequest * @ return Result of the GetLifecyclePolicies operation returned by the service . * @ throws ResourceNotFoundException * A requested resource was not found . * @ throws InvalidRequestException * Bad request . The request is missing required parameters or has invalid parameters . * @ throws InternalServerException * The service failed in an unexpected way . * @ throws LimitExceededException * The request failed because a limit was exceeded . * @ sample AmazonDLM . GetLifecyclePolicies * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dlm - 2018-01-12 / GetLifecyclePolicies " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetLifecyclePoliciesResult getLifecyclePolicies ( GetLifecyclePoliciesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetLifecyclePolicies ( request ) ;
public class CommerceTaxFixedRateLocalServiceBaseImpl { /** * Deletes the commerce tax fixed rate from the database . Also notifies the appropriate model listeners . * @ param commerceTaxFixedRate the commerce tax fixed rate * @ return the commerce tax fixed rate that was removed */ @ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceTaxFixedRate deleteCommerceTaxFixedRate ( CommerceTaxFixedRate commerceTaxFixedRate ) { } }
return commerceTaxFixedRatePersistence . remove ( commerceTaxFixedRate ) ;
public class InvalidLayerPartException { /** * The upload ID associated with the exception . * @ param uploadId * The upload ID associated with the exception . */ @ com . fasterxml . jackson . annotation . JsonProperty ( "uploadId" ) public void setUploadId ( String uploadId ) { } }
this . uploadId = uploadId ;
public class GenericIHEAuditEventMessage { /** * Adds an Active Participant block representing the destination participant * @ param userId The Active Participant ' s User ID * @ param altUserId The Active Participant ' s Alternate UserID * @ param userName The Active Participant ' s UserName * @ param networkId The Active Participant ' s Network Access Point ID * @ param isRequestor Whether the participant represents the requestor */ public void addDestinationActiveParticipant ( String userId , String altUserId , String userName , String networkId , boolean isRequestor ) { } }
addActiveParticipant ( userId , altUserId , userName , isRequestor , Collections . singletonList ( new DICOMActiveParticipantRoleIdCodes . Destination ( ) ) , networkId ) ;
public class JSONObject { /** * Put a key / value pair in the JSONObject , where the value will be a * JSONObject which is produced from a Map . * @ param key A key string . * @ param value A Map value . * @ return this . * @ throws JSONException */ public JSONObject element ( String key , Map value , JsonConfig jsonConfig ) { } }
verifyIsNull ( ) ; if ( value instanceof JSONObject ) { return setInternal ( key , value , jsonConfig ) ; } else { return element ( key , JSONObject . fromObject ( value , jsonConfig ) , jsonConfig ) ; }
public class RecordFile { /** * Sets a value of the specified field in the current record . The type of * the value must be equal to that of the specified field . * @ param fldName * the name of the field * @ param val * the new value for the field */ public void setVal ( String fldName , Constant val ) { } }
if ( tx . isReadOnly ( ) && ! isTempTable ( ) ) throw new UnsupportedOperationException ( ) ; Type fldType = ti . schema ( ) . type ( fldName ) ; Constant v = val . castTo ( fldType ) ; if ( Page . size ( v ) > Page . maxSize ( fldType ) ) throw new SchemaIncompatibleException ( ) ; rp . setVal ( fldName , v ) ;
public class ELImgHtmlTag { /** * Resets attribute values for tag reuse . */ @ Override public void release ( ) { } }
super . release ( ) ; setBase64Expr ( null ) ; setActionExpr ( null ) ; setModuleExpr ( null ) ; setAlignExpr ( null ) ; setAltExpr ( null ) ; setAltKeyExpr ( null ) ; setBorderExpr ( null ) ; setBundleExpr ( null ) ; setDirExpr ( null ) ; setHeightExpr ( null ) ; setHspaceExpr ( null ) ; setImageNameExpr ( null ) ; setIsmapExpr ( null ) ; setLangExpr ( null ) ; setLocaleExpr ( null ) ; setNameExpr ( null ) ; setOnclickExpr ( null ) ; setOndblclickExpr ( null ) ; setOnkeydownExpr ( null ) ; setOnkeypressExpr ( null ) ; setOnkeyupExpr ( null ) ; setOnmousedownExpr ( null ) ; setOnmousemoveExpr ( null ) ; setOnmouseoutExpr ( null ) ; setOnmouseoverExpr ( null ) ; setOnmouseupExpr ( null ) ; setPageExpr ( null ) ; setPageKeyExpr ( null ) ; setParamIdExpr ( null ) ; setParamNameExpr ( null ) ; setParamPropertyExpr ( null ) ; setParamScopeExpr ( null ) ; setPropertyExpr ( null ) ; setScopeExpr ( null ) ; setSrcExpr ( null ) ; setSrcKeyExpr ( null ) ; setStyleExpr ( null ) ; setStyleClassExpr ( null ) ; setStyleIdExpr ( null ) ; setTitleExpr ( null ) ; setTitleKeyExpr ( null ) ; setUseLocalEncodingExpr ( null ) ; setUsemapExpr ( null ) ; setVspaceExpr ( null ) ; setWidthExpr ( null ) ;
public class UssdCallManager { /** * Try to locate a hosted voice app corresponding to the callee / To address . If one is found , begin execution , otherwise * return false ; * @ param request * @ param accounts * @ param applications * @ throws Exception */ private boolean redirectToHostedVoiceApp ( final SipServletRequest request , final AccountsDao accounts , final ApplicationsDao applications , IncomingPhoneNumber number ) throws Exception { } }
boolean isFoundHostedApp = false ; // This is a USSD Invite if ( number != null ) { ExtensionController ec = ExtensionController . getInstance ( ) ; IExtensionFeatureAccessRequest far = new FeatureAccessRequest ( FeatureAccessRequest . Feature . INBOUND_USSD , number . getAccountSid ( ) ) ; ExtensionResponse er = ec . executePreInboundAction ( far , extensions ) ; if ( er . isAllowed ( ) ) { final UssdInterpreterParams . Builder builder = new UssdInterpreterParams . Builder ( ) ; builder . setConfiguration ( configuration ) ; builder . setStorage ( storage ) ; builder . setAccount ( number . getAccountSid ( ) ) ; builder . setVersion ( number . getApiVersion ( ) ) ; final Account account = accounts . getAccount ( number . getAccountSid ( ) ) ; builder . setEmailAddress ( account . getEmailAddress ( ) ) ; final Sid sid = number . getUssdApplicationSid ( ) ; if ( sid != null ) { final Application application = applications . getApplication ( sid ) ; RcmlserverConfigurationSet rcmlserverConfig = RestcommConfiguration . getInstance ( ) . getRcmlserver ( ) ; RcmlserverResolver resolver = RcmlserverResolver . getInstance ( rcmlserverConfig . getBaseUrl ( ) , rcmlserverConfig . getApiPath ( ) ) ; builder . setUrl ( uriUtils . resolve ( resolver . resolveRelative ( application . getRcmlUrl ( ) ) , number . getAccountSid ( ) ) ) ; } else { builder . setUrl ( uriUtils . resolve ( number . getUssdUrl ( ) , number . getAccountSid ( ) ) ) ; } final String ussdMethod = number . getUssdMethod ( ) ; if ( ussdMethod == null || ussdMethod . isEmpty ( ) ) { builder . setMethod ( "POST" ) ; } else { builder . setMethod ( ussdMethod ) ; } if ( number . getUssdFallbackUrl ( ) != null ) builder . setFallbackUrl ( number . getUssdFallbackUrl ( ) ) ; builder . setFallbackMethod ( number . getUssdFallbackMethod ( ) ) ; builder . setStatusCallback ( number . getStatusCallback ( ) ) ; builder . setStatusCallbackMethod ( number . getStatusCallbackMethod ( ) ) ; final Props props = UssdInterpreter . props ( builder . build ( ) ) ; final ActorRef ussdInterpreter = getContext ( ) . actorOf ( props ) ; final ActorRef ussdCall = ussdCall ( ) ; ussdCall . tell ( request , self ( ) ) ; ussdInterpreter . tell ( new StartInterpreter ( ussdCall ) , self ( ) ) ; SipApplicationSession applicationSession = request . getApplicationSession ( ) ; applicationSession . setAttribute ( "UssdCall" , "true" ) ; applicationSession . setAttribute ( UssdInterpreter . class . getName ( ) , ussdInterpreter ) ; applicationSession . setAttribute ( UssdCall . class . getName ( ) , ussdCall ) ; isFoundHostedApp = true ; ec . executePostInboundAction ( far , extensions ) ; } else { if ( logger . isDebugEnabled ( ) ) { final String errMsg = "Inbound USSD session is not Allowed" ; logger . debug ( errMsg ) ; } String errMsg = "Inbound USSD session to Number: " + number . getPhoneNumber ( ) + " is not allowed" ; sendNotification ( errMsg , 11001 , "warning" , true ) ; final SipServletResponse resp = request . createResponse ( SC_FORBIDDEN , "Inbound USSD session is not Allowed" ) ; resp . send ( ) ; ec . executePostInboundAction ( far , extensions ) ; return false ; } } else { logger . info ( "USSD Number registration NOT FOUND" ) ; request . createResponse ( SipServletResponse . SC_NOT_FOUND ) . send ( ) ; } return isFoundHostedApp ;
public class SchemaSet { /** * Adds the specified element to this set if it is not already present * ( optional operation ) . More formally , adds the specified element , * < code > o < / code > , to this set if this set contains no element * < code > e < / code > such that < code > ( o = = null ? e = = null : * o . equals ( e ) ) < / code > . If this set already contains the specified * element , the call leaves this set unchanged and returns < tt > false < / tt > . * In combination with the restriction on constructors , this ensures that * sets never contain duplicate elements . < p > * Note : This method is synchronized , so two elements can not be added * simultaneously . * @ param o element to be added to this set . * @ return < tt > true < / tt > if this set did not already contain the specified * element . * @ throws ClassCastException if the class of the specified element * prevents it from being added to this set . * @ throws NullPointerException if the specified element is null and this * set does not support null elements . */ public synchronized boolean add ( Object o ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "add" , debugId ( o ) ) ; boolean result = false ; /* It should always be an Id so just cast it . If someone is using the */ /* class for the wrong purpose they will get a ClassCastException , which */ /* is permissable . */ Long id = ( Long ) o ; /* NPE is also permissable , if someone is using the class incorrectly . */ int i = hashToTable ( id , table ) ; /* If there is no Entry in the table slot , just add it . */ if ( table [ i ] == null ) { table [ i ] = new Entry ( id ) ; result = true ; } /* Otherwise , we have to search down the Entry list for the Schema Id . */ else { Entry current = table [ i ] ; int depth = current . contains ( id , 0 ) ; /* If depth > 0 , we didn ' t find the Schema so we must add it */ if ( depth > 0 ) { /* If the depth searched was less than the MAX , just make a new Entry */ /* at of the list , and put it in the table . */ if ( depth < MAX_ENTRY_DEPTH ) { Entry newEntry = new Entry ( id , current ) ; table [ i ] = newEntry ; result = true ; } /* Otherwise , the depth is too big so we must resize first . */ else { resize ( ) ; /* Now we have to get the new hash value & look in the new table */ i = hashToTable ( id , table ) ; current = table [ i ] ; /* Make the new Entry and add it */ Entry newEntry = new Entry ( id , current ) ; table [ i ] = newEntry ; result = true ; } } /* If depth = 0 , the SchemaId is already in the SchemaSet so do nothing */ else { } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "add" , result ) ; return result ;
public class ConvertImage { /** * Converts a { @ link Planar } into the equivalent { @ link InterleavedS8} * @ param input ( Input ) Planar image that is being converted . Not modified . * @ param output ( Optional ) The output image . If null a new image is created . Modified . * @ return Converted image . */ public static InterleavedS8 convert ( Planar < GrayS8 > input , InterleavedS8 output ) { } }
if ( output == null ) { output = new InterleavedS8 ( input . width , input . height , input . getNumBands ( ) ) ; } else { output . reshape ( input . width , input . height , input . getNumBands ( ) ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ImplConvertImage_MT . convert ( input , output ) ; } else { ImplConvertImage . convert ( input , output ) ; } return output ;
public class NodeUtil { /** * Is this node a hoisted function declaration ? A function declaration in the * scope root is hoisted to the top of the scope . * See { @ link # isFunctionDeclaration } ) . */ public static boolean isHoistedFunctionDeclaration ( Node n ) { } }
if ( isFunctionDeclaration ( n ) ) { Node parent = n . getParent ( ) ; return parent . isScript ( ) || parent . isModuleBody ( ) || parent . getParent ( ) . isFunction ( ) || parent . isExport ( ) ; } return false ;
public class InsertAllRequest { /** * Returns a builder for an { @ code InsertAllRequest } object given the destination table and the * rows to insert . */ public static Builder newBuilder ( TableInfo tableInfo , RowToInsert ... rows ) { } }
return newBuilder ( tableInfo . getTableId ( ) , rows ) ;
public class Component { /** * Sets references to dependent components . * @ param references references to locate the component dependencies . */ public void setReferences ( IReferences references ) throws ReferenceException { } }
_dependencyResolver . setReferences ( references ) ; _logger . setReferences ( references ) ; _counters . setReferences ( references ) ;
public class FastaWriterHelper { /** * Write a collection of NucleotideSequences to a file * @ param outputStream * @ param dnaSequences * @ throws Exception */ public static void writeNucleotideSequence ( OutputStream outputStream , Collection < DNASequence > dnaSequences ) throws Exception { } }
FastaWriter < DNASequence , NucleotideCompound > fastaWriter = new FastaWriter < DNASequence , NucleotideCompound > ( outputStream , dnaSequences , new GenericFastaHeaderFormat < DNASequence , NucleotideCompound > ( ) ) ; fastaWriter . process ( ) ;
public class SpelExpression { /** * { @ inheritDoc } */ public void evaluateAndSet ( Object target , Object value ) { } }
Expression expression = getParsedExpression ( ) ; context . setRootObject ( target ) ; try { expression . setValue ( context , value ) ; } catch ( EvaluationException e ) { throw new RuntimeException ( e ) ; }
public class SignerUtils { /** * Returns a " canned " policy for the given parameters . * For more information , see < a href = * " http : / / docs . aws . amazon . com / AmazonCloudFront / latest / DeveloperGuide / private - content - signed - urls - overview . html " * > Overview of Signed URLs < / a > . */ public static String buildCannedPolicy ( String resourceUrlOrPath , Date dateLessThan ) { } }
return "{\"Statement\":[{\"Resource\":\"" + resourceUrlOrPath + "\",\"Condition\":{\"DateLessThan\":{\"AWS:EpochTime\":" + MILLISECONDS . toSeconds ( dateLessThan . getTime ( ) ) + "}}}]}" ;
public class CommandForWheeledRobotNavigationImplementation { /** * Called to turn the robot / move the camera . */ public void updateYawAndPitch ( ) { } }
// Work out the time that has elapsed since we last updated the values . // ( We need to do this because we can ' t guarantee that this method will be // called at a constant frequency . ) long timeNow = System . currentTimeMillis ( ) ; long deltaTime = timeNow - this . lastAngularUpdateTime ; this . lastAngularUpdateTime = timeNow ; // Work out how much the yaw and pitch should have changed in that time : double overclockScale = 50.0 / ( double ) TimeHelper . serverTickLength ; double deltaYaw = this . yawScale * overclockScale * this . maxAngularVelocityDegreesPerSecond * ( deltaTime / 1000.0 ) ; double deltaPitch = this . pitchScale * overclockScale * this . maxAngularVelocityDegreesPerSecond * ( deltaTime / 1000.0 ) ; // And update them : mYaw += deltaYaw ; mCameraPitch += deltaPitch ; mCameraPitch = ( mCameraPitch < - 90 ) ? - 90 : ( mCameraPitch > 90 ? 90 : mCameraPitch ) ; // Clamp to [ - 90 , 90] // And update the player : EntityPlayerSP player = Minecraft . getMinecraft ( ) . player ; if ( player != null ) { player . rotationPitch = this . mCameraPitch ; player . rotationYaw = this . mYaw ; }
public class RsWithCookie { /** * Checks value according RFC 6265 section 4.1.1. * @ param value Cookie value * @ return Cookie value */ private static CharSequence validValue ( final CharSequence value ) { } }
if ( ! RsWithCookie . CVALUE_PTRN . matcher ( value ) . matches ( ) ) { throw new IllegalArgumentException ( String . format ( "Cookie value \"%s\" contains invalid characters" , value ) ) ; } return value ;
public class ConfigFactory { /** * Converts a Java { @ link java . util . Properties } object to a * { @ link ConfigObject } using the rules documented in the < a * href = " https : / / github . com / lightbend / config / blob / master / HOCON . md " > HOCON * spec < / a > . The keys in the < code > Properties < / code > object are split on the * period character ' . ' and treated as paths . The values will all end up as * string values . If you have both " a = foo " and " a . b = bar " in your properties * file , so " a " is both the object containing " b " and the string " foo " , then * the string value is dropped . * If you want to have < code > System . getProperties ( ) < / code > as a * ConfigObject , it ' s better to use the { @ link # systemProperties ( ) } method * which returns a cached global singleton . * @ param properties * a Java Properties object * @ param options * the parse options * @ return the parsed configuration */ public static Config parseProperties ( Properties properties , ConfigParseOptions options ) { } }
return Parseable . newProperties ( properties , options ) . parse ( ) . toConfig ( ) ;
public class Commands { /** * Starts or stops saving all output to a file . * @ param line Command line * @ param callback Callback for command status */ public void record ( String line , DispatchCallback callback ) { } }
if ( sqlLine . getRecordOutputFile ( ) == null ) { startRecording ( line , callback ) ; } else { stopRecording ( line , callback ) ; }
public class Controller { /** * Creates controller threads for migrators and consistency checkers based * on the configuration loaded from the db . * For each configuration item , a migrator controller thread is created and * started . * Once created , each thread manages its own lifecycle . If the corresponding * configuration is removed , thread terminates , or it is modified , thread * behaves accordingly . */ public void createControllers ( MigrationConfiguration [ ] configurations ) throws Exception { } }
for ( MigrationConfiguration cfg : configurations ) { MigrationProcess process = migrationMap . get ( cfg . get_id ( ) ) ; if ( process == null ) { LOGGER . debug ( "Creating a controller thread for configuration {}: {}" , cfg . get_id ( ) , cfg . getConfigurationName ( ) ) ; MigratorController c = new MigratorController ( this , cfg ) ; if ( c instanceof MonitoredThread ) { ( ( MonitoredThread ) c ) . registerThreadMonitor ( threadMonitor ) ; } AbstractController ccc = getConsistencyCheckerController ( cfg ) ; ; if ( ccc instanceof MonitoredThread ) { ( ( MonitoredThread ) ccc ) . registerThreadMonitor ( threadMonitor ) ; } migrationMap . put ( cfg . get_id ( ) , new MigrationProcess ( cfg , c , ccc ) ) ; c . start ( ) ; if ( ccc != null ) { ccc . start ( ) ; } } else { healthcheck ( cfg ) ; } }
public class ModuleUploads { /** * Get information about the given upload . * This method will override the configuration specified through * { @ link CMAClient . Builder # setSpaceId ( String ) } and will ignore * { @ link CMAClient . Builder # setEnvironmentId ( String ) } . * @ param spaceId which space is this upload hosted under ? * @ param uploadId what id does the upload have ? * @ return an CMAUpload based on this id and space combination . * @ throws IllegalArgumentException if spaceId is null . * @ throws IllegalArgumentException if uploadId is null . */ public CMAUpload fetchOne ( String spaceId , String uploadId ) { } }
assertNotNull ( spaceId , "spaceId" ) ; assertNotNull ( uploadId , "uploadId" ) ; return service . fetchOne ( spaceId , uploadId ) . blockingFirst ( ) ;
public class DeleteProjectRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteProjectRequest deleteProjectRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteProjectRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteProjectRequest . getProjectId ( ) , PROJECTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class InboxQueue { /** * Closes the inbox */ @ Override public void shutdown ( ShutdownModeAmp mode ) { } }
if ( ! _lifecycle . toStopping ( ) ) { return ; } _lifecycle . toDestroy ( ) ; OnShutdownMessage shutdownMessage = new OnShutdownMessage ( this , mode , isSingle ( ) ) ; _queue . offer ( shutdownMessage ) ; // _ queue . close ( ) ; /* for ( Actor < ? > actorProcessor : _ queueActors ) { actorProcessor . close ( ) ; */ _queue . wakeAllAndWait ( ) ; shutdownMessage . waitFor ( 1 , TimeUnit . SECONDS ) ; super . shutdown ( mode ) ; try ( OutboxAmp outbox = OutboxAmp . currentOrCreate ( manager ( ) ) ) { Object ctx = outbox . getAndSetContext ( this ) ; try { outbox . flush ( ) ; if ( ! isSingle ( ) ) { _worker . shutdown ( mode ) ; } } finally { outbox . getAndSetContext ( ctx ) ; } } // XXX : _ worker . shutdown ( ShutdownModeAmp . IMMEDIATE ) ;
public class X500Name { /** * Find the most specific ( " last " ) attribute of the given * type . */ public DerValue findMostSpecificAttribute ( ObjectIdentifier attribute ) { } }
if ( names != null ) { for ( int i = names . length - 1 ; i >= 0 ; i -- ) { DerValue value = names [ i ] . findAttribute ( attribute ) ; if ( value != null ) { return value ; } } } return null ;
public class Typ { /** * Returns a Object of type type converted from constant * @ param constant * @ param kind * @ return */ public static Object convert ( String constant , TypeKind kind ) { } }
switch ( kind ) { case INT : return Integer . parseInt ( constant ) ; case LONG : return java . lang . Long . parseLong ( constant ) ; case FLOAT : return java . lang . Float . parseFloat ( constant ) ; case DOUBLE : return java . lang . Double . parseDouble ( constant ) ; default : throw new UnsupportedOperationException ( kind + "Not yet implemented" ) ; }
public class HasResultXMLConverter { /** * return flag if the server has a result * @ param hasResult * @ return flag if there is a result */ public String toXML ( boolean hasResult ) throws IOException { } }
StringWriter swriter = new StringWriter ( ) ; PrintWriter writer = new PrintWriter ( swriter ) ; PrettyXMLWriter xml = new PrettyXMLWriter ( writer ) ; xml . openTag ( "alignment" ) ; xml . attribute ( "hasResult" , String . valueOf ( hasResult ) ) ; xml . closeTag ( "alignment" ) ; xml . close ( ) ; return swriter . toString ( ) ;
public class ControlBeanContextSupport { /** * Init this classes data structures . */ protected void initialize ( ) { } }
_bcMembershipListeners = new ArrayList < BeanContextMembershipListener > ( ) ; _children = Collections . synchronizedMap ( new HashMap < Object , BCChild > ( ) ) ; _childPcl = new PropertyChangeListener ( ) { public void propertyChange ( PropertyChangeEvent pce ) { ControlBeanContextSupport . this . propertyChange ( pce ) ; } } ; _childVcl = new VetoableChangeListener ( ) { public void vetoableChange ( PropertyChangeEvent pce ) throws PropertyVetoException { ControlBeanContextSupport . this . vetoableChange ( pce ) ; } } ;
public class SlingApi { /** * ( asynchronously ) * @ param runmode ( required ) * @ param allowEmpty ( optional ) * @ param allowEmptyTypeHint ( optional ) * @ param allowHosts ( optional ) * @ param allowHostsTypeHint ( optional ) * @ param allowHostsRegexp ( optional ) * @ param allowHostsRegexpTypeHint ( optional ) * @ param filterMethods ( optional ) * @ param filterMethodsTypeHint ( optional ) * @ param callback The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException If fail to process the API call , e . g . serializing the request body object */ public com . squareup . okhttp . Call postConfigApacheSlingReferrerFilterAsync ( String runmode , Boolean allowEmpty , String allowEmptyTypeHint , String allowHosts , String allowHostsTypeHint , String allowHostsRegexp , String allowHostsRegexpTypeHint , String filterMethods , String filterMethodsTypeHint , final ApiCallback < Void > callback ) throws ApiException { } }
ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = postConfigApacheSlingReferrerFilterValidateBeforeCall ( runmode , allowEmpty , allowEmptyTypeHint , allowHosts , allowHostsTypeHint , allowHostsRegexp , allowHostsRegexpTypeHint , filterMethods , filterMethodsTypeHint , progressListener , progressRequestListener ) ; apiClient . executeAsync ( call , callback ) ; return call ;
public class AbstractMemberPropertyAccessor { /** * Introspect fields of a class . This excludes static fields and handles * final fields as readOnly . * @ param type the class to inspect . * @ param introspectedClasses a set of already inspected classes . */ private void introspectFields ( Class type , Set introspectedClasses ) { } }
if ( type == null || Object . class . equals ( type ) || type . isInterface ( ) || introspectedClasses . contains ( type ) ) { return ; } introspectedClasses . add ( type ) ; introspectFields ( type . getSuperclass ( ) , introspectedClasses ) ; Field [ ] fields = type . getDeclaredFields ( ) ; for ( int i = 0 ; i < fields . length ; i ++ ) { if ( ! Modifier . isStatic ( fields [ i ] . getModifiers ( ) ) ) { readAccessors . put ( fields [ i ] . getName ( ) , fields [ i ] ) ; if ( ! Modifier . isFinal ( fields [ i ] . getModifiers ( ) ) ) { writeAccessors . put ( fields [ i ] . getName ( ) , fields [ i ] ) ; } } }
public class GroupZipper { /** * Navigates right the tree to the next node at the same level . * @ return a new groupzipper which points to next node at the same level * @ throws NoSuchElementException * if the last node at the current level has already been * reached */ public GroupZipper right ( ) { } }
if ( ! canRight ( ) ) { throw new NoSuchElementException ( "Could not move right because the last node at this level has already been reached" ) ; } index ++ ; node = parent . getNode ( ) . getGroups ( ) . get ( index ) ; return this ;
public class RBBIRuleScanner { boolean doParseActions ( int action ) { } }
RBBINode n = null ; boolean returnVal = true ; switch ( action ) { case RBBIRuleParseTable . doExprStart : pushNewNode ( RBBINode . opStart ) ; fRuleNum ++ ; break ; case RBBIRuleParseTable . doNoChain : // Scanned a ' ^ ' while on the rule start state . fNoChainInRule = true ; break ; case RBBIRuleParseTable . doExprOrOperator : { fixOpStack ( RBBINode . precOpCat ) ; RBBINode operandNode = fNodeStack [ fNodeStackPtr -- ] ; RBBINode orNode = pushNewNode ( RBBINode . opOr ) ; orNode . fLeftChild = operandNode ; operandNode . fParent = orNode ; } break ; case RBBIRuleParseTable . doExprCatOperator : // concatenation operator . // For the implicit concatenation of adjacent terms in an expression // that are // not separated by any other operator . Action is invoked between the // actions for the two terms . { fixOpStack ( RBBINode . precOpCat ) ; RBBINode operandNode = fNodeStack [ fNodeStackPtr -- ] ; RBBINode catNode = pushNewNode ( RBBINode . opCat ) ; catNode . fLeftChild = operandNode ; operandNode . fParent = catNode ; } break ; case RBBIRuleParseTable . doLParen : // Open Paren . // The openParen node is a dummy operation type with a low // precedence , // which has the affect of ensuring that any real binary op that // follows within the parens binds more tightly to the operands than // stuff outside of the parens . pushNewNode ( RBBINode . opLParen ) ; break ; case RBBIRuleParseTable . doExprRParen : fixOpStack ( RBBINode . precLParen ) ; break ; case RBBIRuleParseTable . doNOP : break ; case RBBIRuleParseTable . doStartAssign : // We ' ve just scanned " $ variable = " // The top of the node stack has the $ variable ref node . // Save the start position of the RHS text in the StartExpression // node // that precedes the $ variableReference node on the stack . // This will eventually be used when saving the full $ variable // replacement // text as a string . n = fNodeStack [ fNodeStackPtr - 1 ] ; n . fFirstPos = fNextIndex ; // move past the ' = ' // Push a new start - of - expression node ; needed to keep parse of the // RHS expression happy . pushNewNode ( RBBINode . opStart ) ; break ; case RBBIRuleParseTable . doEndAssign : { // We have reached the end of an assignement statement . // Current scan char is the ' ; ' that terminates the assignment . // Terminate expression , leaves expression parse tree rooted in TOS // node . fixOpStack ( RBBINode . precStart ) ; RBBINode startExprNode = fNodeStack [ fNodeStackPtr - 2 ] ; RBBINode varRefNode = fNodeStack [ fNodeStackPtr - 1 ] ; RBBINode RHSExprNode = fNodeStack [ fNodeStackPtr ] ; // Save original text of right side of assignment , excluding the // terminating ' ; ' // in the root of the node for the right - hand - side expression . RHSExprNode . fFirstPos = startExprNode . fFirstPos ; RHSExprNode . fLastPos = fScanIndex ; // fRB . fRules . extractBetween ( RHSExprNode . fFirstPos , // RHSExprNode . fLastPos , RHSExprNode . fText ) ; RHSExprNode . fText = fRB . fRules . substring ( RHSExprNode . fFirstPos , RHSExprNode . fLastPos ) ; // Expression parse tree becomes l . child of the $ variable reference // node . varRefNode . fLeftChild = RHSExprNode ; RHSExprNode . fParent = varRefNode ; // Make a symbol table entry for the $ variableRef node . fSymbolTable . addEntry ( varRefNode . fText , varRefNode ) ; // Clean up the stack . fNodeStackPtr -= 3 ; break ; } case RBBIRuleParseTable . doEndOfRule : { fixOpStack ( RBBINode . precStart ) ; // Terminate expression , leaves // expression if ( fRB . fDebugEnv != null && fRB . fDebugEnv . indexOf ( "rtree" ) >= 0 ) { printNodeStack ( "end of rule" ) ; } Assert . assrt ( fNodeStackPtr == 1 ) ; RBBINode thisRule = fNodeStack [ fNodeStackPtr ] ; // If this rule includes a look - ahead ' / ' , add a endMark node to the // expression tree . if ( fLookAheadRule ) { RBBINode endNode = pushNewNode ( RBBINode . endMark ) ; RBBINode catNode = pushNewNode ( RBBINode . opCat ) ; fNodeStackPtr -= 2 ; catNode . fLeftChild = thisRule ; catNode . fRightChild = endNode ; fNodeStack [ fNodeStackPtr ] = catNode ; endNode . fVal = fRuleNum ; endNode . fLookAheadEnd = true ; thisRule = catNode ; // TODO : Disable chaining out of look - ahead ( hard break ) rules . // The break on rule match is forced , so there is no point in building up // the state table to chain into another rule for a longer match . } // Mark this node as being the root of a rule . thisRule . fRuleRoot = true ; // Flag if chaining into this rule is wanted . if ( fRB . fChainRules && // If rule chaining is enabled globally via ! ! chain ! fNoChainInRule ) { // and no ' ^ ' chain - in inhibit was on this rule thisRule . fChainIn = true ; } // All rule expressions are ORed together . // The ' ; ' that terminates an expression really just functions as a // ' | ' with // a low operator prededence . // Each of the four sets of rules are collected separately . // ( forward , reverse , safe _ forward , safe _ reverse ) // OR this rule into the appropriate group of them . int destRules = ( fReverseRule ? RBBIRuleBuilder . fReverseTree : fRB . fDefaultTree ) ; if ( fRB . fTreeRoots [ destRules ] != null ) { // This is not the first rule encountered . // OR previous stuff ( from * destRules ) // with the current rule expression ( on the Node Stack ) // with the resulting OR expression going to * destRules thisRule = fNodeStack [ fNodeStackPtr ] ; RBBINode prevRules = fRB . fTreeRoots [ destRules ] ; RBBINode orNode = pushNewNode ( RBBINode . opOr ) ; orNode . fLeftChild = prevRules ; prevRules . fParent = orNode ; orNode . fRightChild = thisRule ; thisRule . fParent = orNode ; fRB . fTreeRoots [ destRules ] = orNode ; } else { // This is the first rule encountered ( for this direction ) . // Just move its parse tree from the stack to * destRules . fRB . fTreeRoots [ destRules ] = fNodeStack [ fNodeStackPtr ] ; } fReverseRule = false ; // in preparation for the next rule . fLookAheadRule = false ; fNoChainInRule = false ; fNodeStackPtr = 0 ; } break ; case RBBIRuleParseTable . doRuleError : error ( RBBIRuleBuilder . U_BRK_RULE_SYNTAX ) ; returnVal = false ; break ; case RBBIRuleParseTable . doVariableNameExpectedErr : error ( RBBIRuleBuilder . U_BRK_RULE_SYNTAX ) ; break ; // Unary operands + ? * // These all appear after the operand to which they apply . // When we hit one , the operand ( may be a whole sub expression ) // will be on the top of the stack . // Unary Operator becomes TOS , with the old TOS as its one child . case RBBIRuleParseTable . doUnaryOpPlus : { RBBINode operandNode = fNodeStack [ fNodeStackPtr -- ] ; RBBINode plusNode = pushNewNode ( RBBINode . opPlus ) ; plusNode . fLeftChild = operandNode ; operandNode . fParent = plusNode ; } break ; case RBBIRuleParseTable . doUnaryOpQuestion : { RBBINode operandNode = fNodeStack [ fNodeStackPtr -- ] ; RBBINode qNode = pushNewNode ( RBBINode . opQuestion ) ; qNode . fLeftChild = operandNode ; operandNode . fParent = qNode ; } break ; case RBBIRuleParseTable . doUnaryOpStar : { RBBINode operandNode = fNodeStack [ fNodeStackPtr -- ] ; RBBINode starNode = pushNewNode ( RBBINode . opStar ) ; starNode . fLeftChild = operandNode ; operandNode . fParent = starNode ; } break ; case RBBIRuleParseTable . doRuleChar : // A " Rule Character " is any single character that is a literal part // of the regular expression . Like a , b and c in the expression " ( abc * ) // These are pretty uncommon in break rules ; the terms are more commonly // sets . To keep things uniform , treat these characters like as // sets that just happen to contain only one character . { n = pushNewNode ( RBBINode . setRef ) ; String s = String . valueOf ( ( char ) fC . fChar ) ; findSetFor ( s , n , null ) ; n . fFirstPos = fScanIndex ; n . fLastPos = fNextIndex ; n . fText = fRB . fRules . substring ( n . fFirstPos , n . fLastPos ) ; break ; } case RBBIRuleParseTable . doDotAny : // scanned a " . " , meaning match any single character . { n = pushNewNode ( RBBINode . setRef ) ; findSetFor ( kAny , n , null ) ; n . fFirstPos = fScanIndex ; n . fLastPos = fNextIndex ; n . fText = fRB . fRules . substring ( n . fFirstPos , n . fLastPos ) ; break ; } case RBBIRuleParseTable . doSlash : // Scanned a ' / ' , which identifies a look - ahead break position in a // rule . n = pushNewNode ( RBBINode . lookAhead ) ; n . fVal = fRuleNum ; n . fFirstPos = fScanIndex ; n . fLastPos = fNextIndex ; n . fText = fRB . fRules . substring ( n . fFirstPos , n . fLastPos ) ; fLookAheadRule = true ; break ; case RBBIRuleParseTable . doStartTagValue : // Scanned a ' { ' , the opening delimiter for a tag value within a // rule . n = pushNewNode ( RBBINode . tag ) ; n . fVal = 0 ; n . fFirstPos = fScanIndex ; n . fLastPos = fNextIndex ; break ; case RBBIRuleParseTable . doTagDigit : // Just scanned a decimal digit that ' s part of a tag value { n = fNodeStack [ fNodeStackPtr ] ; int v = UCharacter . digit ( ( char ) fC . fChar , 10 ) ; n . fVal = n . fVal * 10 + v ; break ; } case RBBIRuleParseTable . doTagValue : n = fNodeStack [ fNodeStackPtr ] ; n . fLastPos = fNextIndex ; n . fText = fRB . fRules . substring ( n . fFirstPos , n . fLastPos ) ; break ; case RBBIRuleParseTable . doTagExpectedError : error ( RBBIRuleBuilder . U_BRK_MALFORMED_RULE_TAG ) ; returnVal = false ; break ; case RBBIRuleParseTable . doOptionStart : // Scanning a ! ! option . At the start of string . fOptionStart = fScanIndex ; break ; case RBBIRuleParseTable . doOptionEnd : { String opt = fRB . fRules . substring ( fOptionStart , fScanIndex ) ; if ( opt . equals ( "chain" ) ) { fRB . fChainRules = true ; } else if ( opt . equals ( "LBCMNoChain" ) ) { fRB . fLBCMNoChain = true ; } else if ( opt . equals ( "forward" ) ) { fRB . fDefaultTree = RBBIRuleBuilder . fForwardTree ; } else if ( opt . equals ( "reverse" ) ) { fRB . fDefaultTree = RBBIRuleBuilder . fReverseTree ; } else if ( opt . equals ( "safe_forward" ) ) { fRB . fDefaultTree = RBBIRuleBuilder . fSafeFwdTree ; } else if ( opt . equals ( "safe_reverse" ) ) { fRB . fDefaultTree = RBBIRuleBuilder . fSafeRevTree ; } else if ( opt . equals ( "lookAheadHardBreak" ) ) { fRB . fLookAheadHardBreak = true ; } else { error ( RBBIRuleBuilder . U_BRK_UNRECOGNIZED_OPTION ) ; } break ; } case RBBIRuleParseTable . doReverseDir : fReverseRule = true ; break ; case RBBIRuleParseTable . doStartVariableName : n = pushNewNode ( RBBINode . varRef ) ; n . fFirstPos = fScanIndex ; break ; case RBBIRuleParseTable . doEndVariableName : n = fNodeStack [ fNodeStackPtr ] ; if ( n == null || n . fType != RBBINode . varRef ) { error ( RBBIRuleBuilder . U_BRK_INTERNAL_ERROR ) ; break ; } n . fLastPos = fScanIndex ; n . fText = fRB . fRules . substring ( n . fFirstPos + 1 , n . fLastPos ) ; // Look the newly scanned name up in the symbol table // If there ' s an entry , set the l . child of the var ref to the // replacement expression . // ( We also pass through here when scanning assignments , but no harm // is done , other // than a slight wasted effort that seems hard to avoid . Lookup will // be null ) n . fLeftChild = fSymbolTable . lookupNode ( n . fText ) ; break ; case RBBIRuleParseTable . doCheckVarDef : n = fNodeStack [ fNodeStackPtr ] ; if ( n . fLeftChild == null ) { error ( RBBIRuleBuilder . U_BRK_UNDEFINED_VARIABLE ) ; returnVal = false ; } break ; case RBBIRuleParseTable . doExprFinished : break ; case RBBIRuleParseTable . doRuleErrorAssignExpr : error ( RBBIRuleBuilder . U_BRK_ASSIGN_ERROR ) ; returnVal = false ; break ; case RBBIRuleParseTable . doExit : returnVal = false ; break ; case RBBIRuleParseTable . doScanUnicodeSet : scanSet ( ) ; break ; default : error ( RBBIRuleBuilder . U_BRK_INTERNAL_ERROR ) ; returnVal = false ; break ; } return returnVal ;
public class JNDIURLEntry { /** * Registers the JNDI service for the supplied properties as long as the jndiName and value are set * @ param context * @ param props The properties containing values for < code > " jndiName " < / code > and < code > " value " < / code > */ protected void activate ( BundleContext context , Map < String , Object > props ) { } }
final String jndiName = ( String ) props . get ( "jndiName" ) ; final String urlValue = ( String ) props . get ( "value" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Registering JNDIURLEntry with value " + urlValue + " and JNDI name " + jndiName ) ; } if ( jndiName == null || jndiName . isEmpty ( ) || urlValue == null || urlValue . isEmpty ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to register JNDIURLEntry with jndiName [" + jndiName + "] and value [" + urlValue + "] because both must be set" ) ; } return ; } // Test that the URL is valid // creating a url should be a protected action createURL ( jndiName , urlValue ) ; Dictionary < String , Object > propertiesForJndiService = new Hashtable < String , Object > ( ) ; propertiesForJndiService . put ( "osgi.jndi.service.name" , jndiName ) ; propertiesForJndiService . put ( Constants . OBJECTCLASS , Reference . class ) ; Reference ref = new Reference ( URL . class . getName ( ) , this . getClass ( ) . getName ( ) , null ) ; ref . add ( new RefAddr ( "JndiURLEntry" ) { private static final long serialVersionUID = 5168161341101144689L ; @ Override public Object getContent ( ) { return urlValue ; } } ) ; this . serviceRegistration = context . registerService ( Reference . class , ref , propertiesForJndiService ) ;
public class Output { /** * { @ inheritDoc } */ @ Override public void writeDate ( Date date ) { } }
buf . put ( AMF . TYPE_DATE ) ; buf . putDouble ( date . getTime ( ) ) ; buf . putShort ( ( short ) ( TimeZone . getDefault ( ) . getRawOffset ( ) / 60 / 1000 ) ) ;
public class CmsDbSettingsPanel { /** * Initializes fields with data from setup bean . * @ param the webapp name ( null for root webapp ) */ public void initFromSetupBean ( String webapp ) { } }
String db = m_setupBean . getDatabase ( ) ; CmsSetupBean bean = m_setupBean ; Map < String , String [ ] > params = new HashMap < > ( ) ; m_setupPanel . setVisible ( true ) ; switch ( db ) { case "mysql" : case "mssql" : setVisible ( m_dbCreateUser , m_dbCreatePwd , m_dbWorkUser , m_dbWorkPwd , m_dbCreateConStr , m_dbName , m_createDb , m_dropDatabase ) ; m_dbCreateUser . setValue ( bean . getDbCreateUser ( ) ) ; m_dbCreatePwd . setValue ( bean . getDbCreatePwd ( ) ) ; m_dbWorkUser . setValue ( bean . getDbWorkUser ( ) ) ; m_dbWorkPwd . setValue ( bean . getDbWorkPwd ( ) ) ; m_dbCreateConStr . setValue ( bean . getDbCreateConStr ( ) ) ; m_dbName . setValue ( webapp != null ? webapp : bean . getDb ( ) ) ; m_createDb . setValue ( true ) ; m_createDb . setCaption ( "Create database and tables" ) ; m_dropDatabase . setValue ( false ) ; break ; case "postgresql" : setVisible ( m_dbCreateUser , m_dbCreatePwd , m_dbWorkUser , m_dbWorkPwd , m_dbCreateConStr , m_dbName , m_createDb , m_createTables , m_dropDatabase , m_templateDb ) ; m_dbCreateUser . setValue ( bean . getDbCreateUser ( ) ) ; m_dbCreatePwd . setValue ( bean . getDbCreatePwd ( ) ) ; m_dbWorkUser . setValue ( bean . getDbWorkUser ( ) ) ; m_dbWorkPwd . setValue ( bean . getDbWorkPwd ( ) ) ; m_dbCreateConStr . setValue ( bean . getDbCreateConStr ( ) ) ; m_dbName . setValue ( webapp != null ? webapp : bean . getDb ( ) ) ; m_createDb . setValue ( true ) ; m_createDb . setCaption ( "Create database and user" ) ; m_templateDb . setValue ( dbProp ( "templateDb" ) ) ; m_createTables . setValue ( true ) ; m_dropDatabase . setValue ( false ) ; break ; case "hsqldb" : setVisible ( m_dbCreateUser , m_dbCreatePwd , m_dbWorkUser , m_dbWorkPwd , m_dbCreateConStr , m_createDb , m_dropDatabase ) ; m_dbCreateUser . setValue ( bean . getDbCreateUser ( ) ) ; m_dbCreatePwd . setValue ( bean . getDbCreatePwd ( ) ) ; m_dbWorkUser . setValue ( bean . getDbWorkUser ( ) ) ; m_dbWorkPwd . setValue ( bean . getDbWorkPwd ( ) ) ; m_createDb . setValue ( true ) ; m_createDb . setCaption ( "Create database and tables" ) ; m_dropDatabase . setValue ( false ) ; String origCreateConStr = bean . getDbProperty ( "hsqldb.constr" ) ; String createConStr = bean . getDbCreateConStr ( ) ; if ( ( origCreateConStr != null ) && origCreateConStr . equals ( createConStr ) ) { createConStr = "jdbc:hsqldb:file:" + bean . getWebAppRfsPath ( ) + "WEB-INF" + File . separatorChar + "hsqldb" + File . separatorChar + "opencms;shutdown=false" ; } m_dbCreateConStr . setValue ( createConStr ) ; break ; case "oracle" : setVisible ( m_dbCreateUser , m_dbCreatePwd , m_dbWorkUser , m_dbWorkPwd , m_dbCreateConStr , m_createDb , m_createTables , m_dropDatabase , m_temporaryTablespace , m_indexTablespace , m_defaultTablespace ) ; m_dbCreateUser . setValue ( bean . getDbCreateUser ( ) ) ; m_dbCreatePwd . setValue ( bean . getDbCreatePwd ( ) ) ; m_dbWorkUser . setValue ( bean . getDbWorkUser ( ) ) ; m_dbWorkPwd . setValue ( bean . getDbWorkPwd ( ) ) ; m_dbCreateConStr . setValue ( bean . getDbCreateConStr ( ) ) ; m_dbName . setValue ( webapp != null ? webapp : bean . getDb ( ) ) ; m_createDb . setValue ( true ) ; m_createDb . setCaption ( "Create user" ) ; m_createTables . setValue ( true ) ; m_dropDatabase . setValue ( false ) ; m_temporaryTablespace . setValue ( dbProp ( "temporaryTablespace" ) ) ; m_indexTablespace . setValue ( dbProp ( "indexTablespace" ) ) ; m_defaultTablespace . setValue ( dbProp ( "defaultTablespace" ) ) ; break ; case "db2" : case "as400" : setVisible ( m_dbWorkUser , m_dbWorkPwd , m_dbCreateConStr , m_dbName , m_createTables ) ; m_setupPanel . setVisible ( false ) ; m_dbWorkUser . setValue ( bean . getDbWorkUser ( ) ) ; m_dbWorkPwd . setValue ( bean . getDbWorkPwd ( ) ) ; m_dbCreateConStr . setValue ( bean . getDbCreateConStr ( ) ) ; m_dbName . setValue ( webapp != null ? webapp : bean . getDb ( ) ) ; m_createDb . setValue ( false ) ; m_createTables . setValue ( true ) ; m_dropDatabase . setValue ( true ) ; break ; default : break ; }
public class Util { /** * Parses the provided string value into a double value . * < p > If the string is null or empty this returns the default value . < / p > * @ param value value to parse * @ param defaultValue default value * @ return double representation of provided value or default value . */ public static Double parseDouble ( String value , Double defaultValue ) { } }
if ( isNullOrEmpty ( value ) ) { return defaultValue ; } return Double . parseDouble ( value ) ;
public class DelegatedClientAuthenticationAction { /** * Gets credentials from delegated client . * @ param webContext the web context * @ param client the client * @ return the credentials from delegated client */ protected Credentials getCredentialsFromDelegatedClient ( final J2EContext webContext , final BaseClient < Credentials , CommonProfile > client ) { } }
val credentials = client . getCredentials ( webContext ) ; LOGGER . debug ( "Retrieved credentials from client as [{}]" , credentials ) ; if ( credentials == null ) { throw new IllegalArgumentException ( "Unable to determine credentials from the context with client " + client . getName ( ) ) ; } return credentials ;
public class SARLAgentLaunchConfigurationDelegate { /** * Returns the main type name specified by the given launch configuration , * or < code > null < / code > if none . * @ param configuration launch configuration * @ return the main type name specified by the given launch configuration , * or < code > null < / code > if none * @ throws CoreException if unable to retrieve the attribute */ protected String getAgentName ( ILaunchConfiguration configuration ) throws CoreException { } }
final String agentName = getConfigurationAccessor ( ) . getAgent ( configuration ) ; if ( agentName == null ) { return null ; } return VariablesPlugin . getDefault ( ) . getStringVariableManager ( ) . performStringSubstitution ( agentName ) ;
public class SraReader { /** * Read a study from the specified input stream . * @ param inputStream input stream , must not be null * @ return a study read from the specified input stream * @ throws IOException if an I / O error occurs */ public static Study readStudy ( final InputStream inputStream ) throws IOException { } }
checkNotNull ( inputStream ) ; try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( inputStream ) ) ) { return readStudy ( reader ) ; }
public class MESubscription { /** * Marks this object . * Used for deciding if this object is to be deleted . * The object is marked , the proxies are all reregistered . * If there is a mark still left , then this proxy can be deleted . */ void mark ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "mark" ) ; SibTr . exit ( tc , "mark" ) ; } _marked = true ;
public class LoadClient { /** * Start the load scenario . */ void start ( ) { } }
for ( int i = 0 ; i < threadCount ; i ++ ) { Runnable r = null ; switch ( config . getPayloadConfig ( ) . getPayloadCase ( ) ) { case SIMPLE_PARAMS : { if ( config . getClientType ( ) == Control . ClientType . ASYNC_CLIENT ) { if ( config . getRpcType ( ) == Control . RpcType . UNARY ) { r = new AsyncUnaryWorker ( clients [ i % clients . length ] ) ; } else if ( config . getRpcType ( ) == Control . RpcType . STREAMING ) { r = new AsyncPingPongWorker ( mat , clients [ i % clients . length ] ) ; } } break ; } default : { throw Status . UNIMPLEMENTED . withDescription ( "Unknown payload case " + config . getPayloadConfig ( ) . getPayloadCase ( ) . name ( ) ) . asRuntimeException ( ) ; } } if ( r == null ) { throw new IllegalStateException ( config . getRpcType ( ) . name ( ) + " not supported for client type " + config . getClientType ( ) ) ; } fixedThreadPool . execute ( r ) ; } if ( osBean != null ) { lastMarkCpuTime = osBean . getProcessCpuTime ( ) ; }
public class CommerceAddressLocalServiceUtil { /** * Returns a range of all the commerce addresses . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . model . impl . CommerceAddressModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce addresses * @ param end the upper bound of the range of commerce addresses ( not inclusive ) * @ return the range of commerce addresses */ public static java . util . List < com . liferay . commerce . model . CommerceAddress > getCommerceAddresses ( int start , int end ) { } }
return getService ( ) . getCommerceAddresses ( start , end ) ;
public class FNCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setRPatDCnt ( Integer newRPatDCnt ) { } }
Integer oldRPatDCnt = rPatDCnt ; rPatDCnt = newRPatDCnt ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNC__RPAT_DCNT , oldRPatDCnt , rPatDCnt ) ) ;
public class NamespacesInner { /** * Gets an authorization rule for a namespace by name . * @ param resourceGroupName The name of the resource group . * @ param namespaceName The namespace name * @ param authorizationRuleName Authorization rule name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the SharedAccessAuthorizationRuleResourceInner object if successful . */ public SharedAccessAuthorizationRuleResourceInner getAuthorizationRule ( String resourceGroupName , String namespaceName , String authorizationRuleName ) { } }
return getAuthorizationRuleWithServiceResponseAsync ( resourceGroupName , namespaceName , authorizationRuleName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Cron4jCron { @ Override public void setupNeighborConcurrent ( String groupName , JobConcurrentExec concurrentExec , RegisteredJob ... jobs ) { } }
assertArgumentNotNull ( "groupName" , groupName ) ; if ( groupName . trim ( ) . isEmpty ( ) ) { throw new IllegalArgumentException ( "The argument 'groupName' should not be empty: [" + groupName + "]" ) ; } assertArgumentNotNull ( "concurrentExec" , concurrentExec ) ; assertArgumentNotNull ( "jobs" , jobs ) ; final Set < LaJobKey > jobKeySet = Stream . of ( jobs ) . map ( job -> job . getJobKey ( ) ) . collect ( Collectors . toSet ( ) ) ; cron4jNow . setupNeighborConcurrent ( groupName , concurrentExec , jobKeySet ) ;
public class DoubleList { /** * Operates exactly as { @ link # remove ( int ) } * @ param index the index to remove * @ return the value removed */ public double removeD ( int index ) { } }
boundsCheck ( index ) ; double ret = array [ index ] ; for ( int i = index ; i < end - 1 ; i ++ ) array [ i ] = array [ i + 1 ] ; decreaseSize ( 1 ) ; return ret ;
public class Optionals { /** * Sequence operation , take a Collection of Optionals and turn it into a Optional with a Collection * By constrast with { @ link Optionals # sequencePresent ( IterableX ) } if any Optional types are zero * the return type will be an zero Optional * < pre > * { @ code * Optional < Integer > just = Optional . of ( 10 ) ; * Optional < Integer > none = Optional . zero ( ) ; * Optional < Seq < Integer > > maybes = Optionals . sequence ( Seq . of ( just , none , Optional . of ( 1 ) ) ) ; * Optional . zero ( ) ; * < / pre > * @ param opts Maybes to Sequence * @ return Optional with a List of values */ public static < T > Optional < ReactiveSeq < T > > sequence ( final Stream < ? extends Optional < T > > opts ) { } }
return sequence ( ReactiveSeq . fromStream ( opts ) ) ;
public class DuplicateSegmentsLessThan { /** * Apply the filter to a given URI * @ param url the URI to be filtered * @ return < code > true < / code > if the path contains a number of duplicate segments less than a threshold */ @ Override public boolean apply ( final URI url ) { } }
final String s = url . getRawPath ( ) ; final int length = s . length ( ) ; final boolean pathEndsWithSlash = s . charAt ( length - 1 ) == '/' ; final char [ ] path = new char [ length + 1 + ( ! pathEndsWithSlash ? 1 : 0 ) ] ; path [ path . length - 1 ] = EXTRA_SYMBOL ; // Usual suffix - array trick if ( ! pathEndsWithSlash ) path [ path . length - 2 ] = '/' ; // To guarantee that each segment ends with a slash s . getChars ( 0 , length , path , 0 ) ; // Phase 1 : count slashes int c = 0 ; for ( int i = length ; i -- != 0 ; ) if ( path [ i ] == '/' ) c ++ ; if ( c < threshold ) { if ( ASSERTS ) matches ( true , s ) ; return true ; // No way } // Phase 2 : allocate and fill start array final int [ ] start = new int [ c ] ; c = 0 ; for ( int i = 0 ; i < length ; i ++ ) if ( path [ i ] == '/' ) start [ c ++ ] = i ; // Phase 3 : build suffix array for path components and compute largest number of common path segments final int [ ] a = new int [ c ] ; for ( int i = c ; i -- != 0 ; ) a [ i ] = i ; IntArrays . quickSort ( a , 0 , c , ( x , y ) -> { if ( x == y ) return 0 ; int j = start [ x ] , k = start [ y ] ; while ( path [ ++ j ] == path [ ++ k ] ) ; return path [ j ] - path [ k ] ; } ) ; // Linear - time LCP computation , from Kasai et . al paper . final int [ ] r = new int [ c ] ; for ( int i = c ; i -- != 0 ; ) r [ a [ i ] ] = i ; final int [ ] lcp = new int [ c + 1 ] ; // Last element account for the $ element int h = 0 ; int p = 1 ; boolean maxNonZero = false ; for ( int i = 0 ; i < c ; i ++ ) { if ( r [ i ] > 0 ) { int j = a [ r [ i ] - 1 ] ; final int starti = start [ i ] ; final int startj = start [ j ] ; while ( path [ starti + p ] == path [ startj + p ] ) { if ( path [ starti + p ] == '/' ) h ++ ; p ++ ; } lcp [ r [ i ] ] = h ; if ( h > 0 ) { maxNonZero = true ; // Discard first common segment int k = 1 ; while ( path [ starti + k ] != '/' ) k ++ ; p -= k ; h -- ; } else p = 1 ; } } if ( ! maxNonZero ) { if ( ASSERTS ) matches ( true , s ) ; return true ; // Not a single common prefix } if ( ASSERTS ) { final int [ ] lcp2 = new int [ c + 1 ] ; for ( int i = c ; i -- != 1 ; ) { final int starti = start [ a [ i - 1 ] ] ; final int startipp = start [ a [ i ] ] ; int k = 1 ; int n = 0 ; while ( path [ starti + k ] == path [ startipp + k ] ) { if ( path [ starti + k ] == '/' ) n ++ ; k ++ ; } lcp2 [ i ] = n ; } assert Arrays . equals ( lcp2 , lcp ) ; } if ( DEBUG ) System . err . println ( "Path: " + Arrays . toString ( path ) ) ; if ( DEBUG ) System . err . println ( "Start: " + Arrays . toString ( start ) ) ; if ( DEBUG ) System . err . println ( "Suffixes: " + Arrays . toString ( a ) ) ; if ( DEBUG ) System . err . println ( "Common paths: " + Arrays . toString ( lcp ) ) ; // Phase 4 : Simulate depth - first visit of the suffix tree // Simulated visit of the associated suffix tree , always by Kasai et . al . // A stack for left extremes and depth , initialised with - 1 , - 1. final int [ ] ls = new int [ c + 1 ] , ds = new int [ c + 1 ] ; /* A support array where , while visiting a node , we will store the length of the * maximal arithmetic progression of ratio d among the leaves of the current * node . */ final int [ ] prog = new int [ c ] ; ls [ 0 ] = ds [ 0 ] = - 1 ; p = 1 ; int llca , dlca ; int l , d ; for ( int i = 0 ; i < c ; i ++ ) { llca = i ; dlca = lcp [ i + 1 ] ; // Note that when i = = c - 1 then lcp [ i + 1 ] = = 0. while ( ds [ p - 1 ] > dlca ) { // Pop ( l , d ) off the stack l = ls [ -- p ] ; d = ds [ p ] ; if ( DEBUG ) System . err . printf ( "Got triple <" + l + ", " + i + ", " + d + "\n" ) ; if ( DEBUG ) System . err . println ( IntArrayList . wrap ( a ) . subList ( l , i + 1 ) ) ; // Now we have a visit interval start at L , ending at i of depth H if ( i - l + 1 >= threshold ) { /* Now we have a list of leaves which share a common prefix of length d . * Stoye and Gusfield note that we can find an arithmetic progression of * ratio d among those leaves ( e . g . , we can find leaves whose associated positions are * i , i + d , i + 2d , . . . , i + ( k - 1 ) d ) iff those positions * are the starting points of a tandem array of length k . * To do this in linear time , we exploit the fact ( noted by Stoye and Gusfield ) * that for l < = j < = i , r [ a [ j ] + t * d ] is the position in the string of * a [ j ] + t * d , which means that a [ j ] + t * d is in the set of * leaves under examination ( i . e . , a [ l . . i ] ) iff r [ a [ j ] + t * d ] * is between l and i ( inclusive ) . * To avoid testing all elements separately ( which would require potentially * ( i - l + 1 ) * k tests ) we use prog either to remember the length of the longest * increasing progression found starting with the corresponding element of a , * or to remember that an element need not being examined because it cannot lead * to maximal progressions . * Starting from each leaf a [ j ] , we try to * extent greedily an arithmetic progression of ratio d , and record its length * in prog [ j ] . When examining the following elements , if following the progression * we hit an element with nonzero prog , we can just sum to the current length * the number found thereis and break the loop , as the maximal arithmetic * progression of ratio d from our current position has been already computed . */ Arrays . fill ( prog , l , i + 1 , 0 ) ; for ( int j = l ; j <= i ; j ++ ) { if ( prog [ j ] != 0 ) continue ; int t = 1 , u = a [ j ] , k = u , pos ; for ( ; ; ) { k += d ; // The next element of the progression if ( k >= c ) break ; pos = r [ k ] ; // Its position ( in [ l . . i ] ) if ( pos < l || i < pos ) break ; else if ( prog [ pos ] != 0 ) { if ( ASSERTS ) assert prog [ pos ] > 0 : "l=" + l + " , i=" + i + ", j=" + j + ", t=" + t + ", a=" + Arrays . toString ( a ) + ", prog=" + Arrays . toString ( prog ) ; t += prog [ pos ] ; break ; } t ++ ; } if ( t >= threshold ) { if ( ASSERTS ) matches ( false , s ) ; return false ; } prog [ j ] = t ; // We backtrack , putting - 1 in all intermediate entries so we won ' t examine them further while ( ( k -= d ) != u ) prog [ r [ k ] ] = - 1 ; } } llca = l ; } if ( ds [ p - 1 ] < dlca ) { // Push ( llca , dlca ) on the stack ls [ p ] = llca ; ds [ p ++ ] = dlca ; } } if ( ASSERTS ) matches ( true , s ) ; return true ;
public class CommonOps_DSCC { /** * Converts the permutation matrix into a vector * @ param P ( Input ) Permutation matrix * @ param vector ( Output ) Permutation vector */ public static void permutationVector ( DMatrixSparseCSC P , int [ ] vector ) { } }
if ( P . numCols != P . numRows ) { throw new MatrixDimensionException ( "Expected a square matrix" ) ; } else if ( P . nz_length != P . numCols ) { throw new IllegalArgumentException ( "Expected N non-zero elements in permutation matrix" ) ; } else if ( vector . length < P . numCols ) { throw new IllegalArgumentException ( "vector is too short" ) ; } int M = P . numCols ; for ( int i = 0 ; i < M ; i ++ ) { if ( P . col_idx [ i + 1 ] != i + 1 ) throw new IllegalArgumentException ( "Unexpected number of elements in a column" ) ; vector [ P . nz_rows [ i ] ] = i ; }
public class ProtobufIDLProxy { /** * Creates the single { @ link IDLProxyObject } . * @ param is the is * @ param debug the debug * @ param path the path * @ param isUniName the is uni name * @ return the IDL proxy object * @ throws IOException Signals that an I / O exception has occurred . */ public static IDLProxyObject createSingle ( InputStream is , boolean debug , File path , boolean isUniName ) throws IOException { } }
ProtoFile protoFile = ProtoSchemaParser . parseUtf8 ( DEFAULT_FILE_NAME , is ) ; List < CodeDependent > cds = new ArrayList < CodeDependent > ( ) ; Map < String , IDLProxyObject > map = doCreate ( protoFile , false , debug , path , false , null , cds , new HashMap < String , String > ( ) , isUniName ) ; return map . entrySet ( ) . iterator ( ) . next ( ) . getValue ( ) ;
public class ClassUtils { /** * Attempts to resolve the method with the specified name and signature on the given class type . The named method ' s * resolution is first attempted by using the specified method ' s name along with the array of parameter types . * If unsuccessful , the method proceeds to lookup the named method by searching all " declared " methods * of the class type having a signature compatible with the given argument types . This method operates recursively * until the method is resolved or the class type hierarchy is exhausted , in which case , * a MethodNotFoundException is thrown . * @ param type the Class type on which to resolve the method . * @ param methodName a String indicating the name of the method to resolve . * @ param parameterTypes an array of Class objects used to resolve the exact signature of the method . * @ param arguments an array of Objects used in a method invocation serving as a fallback search / lookup strategy * if the method cannot be resolved using it ' s parameter types . Maybe null . * @ param returnType the declared class type of the method ' s return value ( used only for Exception message purposes ) . * @ return the resolved method from the given class type given the name , parameter types ( signature ) * and calling arguments , if any . * @ throws MethodNotFoundException if the specified method cannot be resolved on the given class type . * @ throws NullPointerException if the class type is null . * @ see # getMethod ( Class , String , Class [ ] ) * @ see # findMethod ( Class , String , Object . . . ) * @ see java . lang . Class * @ see java . lang . reflect . Method */ public static Method resolveMethod ( Class < ? > type , String methodName , Class < ? > [ ] parameterTypes , Object [ ] arguments , Class < ? > returnType ) { } }
try { return getMethod ( type , methodName , parameterTypes ) ; } catch ( MethodNotFoundException cause ) { Method method = findMethod ( type , methodName , arguments ) ; Assert . notNull ( method , new MethodNotFoundException ( String . format ( "Failed to resolve method with signature [%1$s] on class type [%2$s]" , getMethodSignature ( methodName , parameterTypes , returnType ) , getName ( type ) ) , cause . getCause ( ) ) ) ; return method ; }
public class Closeables { /** * Provides { @ link Closeable } interface for { @ link Connection } * @ param connection the connection to decorate * @ return a closeable decorated connection */ public static Closeable closeableFrom ( @ WillNotClose @ Nullable final Connection connection ) { } }
return closeableFrom ( connection , Connection :: close ) ;
public class FileUtility { /** * { @ inheritDoc } */ @ Override public boolean exists ( String path ) { } }
File fPath = new File ( path ) ; return fPath . exists ( ) ;
public class ExecutionPipeline { /** * Sets the given allocated resource for all vertices included in this pipeline . * @ param resource * the allocated resource to set for all vertices included in this pipeline */ public void setAllocatedResource ( final AllocatedResource resource ) { } }
final Iterator < ExecutionVertex > it = this . vertices . iterator ( ) ; while ( it . hasNext ( ) ) { final ExecutionVertex vertex = it . next ( ) ; vertex . setAllocatedResource ( resource ) ; }
public class CFFFontSubset { /** * Function calcs bias according to the CharString type and the count * of the subrs * @ param Offset The offset to the relevant subrs index * @ param Font the font * @ return The calculated Bias */ protected int CalcBias ( int Offset , int Font ) { } }
seek ( Offset ) ; int nSubrs = getCard16 ( ) ; // If type = = 1 - > bias = 0 if ( fonts [ Font ] . CharstringType == 1 ) return 0 ; // else calc according to the count else if ( nSubrs < 1240 ) return 107 ; else if ( nSubrs < 33900 ) return 1131 ; else return 32768 ;
public class Binomial { /** * Returns the Pvalue for a particular score * @ param score * @ param n * @ param p * @ return */ private static double scoreToPvalue ( double score , int n , double p ) { } }
/* if ( n < = 20 ) { / / calculate it from binomial distribution */ double z = ( score + 0.5 - n * p ) / Math . sqrt ( n * p * ( 1.0 - p ) ) ; return ContinuousDistributions . gaussCdf ( z ) ;