signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Types { /** * Return the least upper bound of list of types . if the lub does * not exist return null . */ public Type lub ( List < Type > ts ) { } }
return lub ( ts . toArray ( new Type [ ts . length ( ) ] ) ) ;
public class CPOptionUtil { /** * Returns a range of all the cp options where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPOptionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of cp options * @ param end the upper bound of the range of cp options ( not inclusive ) * @ return the range of matching cp options */ public static List < CPOption > findByGroupId ( long groupId , int start , int end ) { } }
return getPersistence ( ) . findByGroupId ( groupId , start , end ) ;
public class GroupByBuilder { /** * Get the results as a map * @ param expression projection * @ return new result transformer */ public < V > ResultTransformer < Map < K , V > > as ( FactoryExpression < V > expression ) { } }
final FactoryExpression < ? > transformation = FactoryExpressionUtils . wrap ( expression ) ; List < Expression < ? > > args = transformation . getArgs ( ) ; return new GroupByMap < K , V > ( key , args . toArray ( new Expression < ? > [ args . size ( ) ] ) ) { @ Override protected Map < K , V > transform ( Map < K , Group > groups ) { Map < K , V > results = new LinkedHashMap < K , V > ( ( int ) Math . ceil ( groups . size ( ) / 0.75 ) , 0.75f ) ; for ( Map . Entry < K , Group > entry : groups . entrySet ( ) ) { results . put ( entry . getKey ( ) , transform ( entry . getValue ( ) ) ) ; } return results ; } @ SuppressWarnings ( "unchecked" ) protected V transform ( Group group ) { // XXX Isn ' t group . toArray ( ) suitable here ? List < Object > args = new ArrayList < Object > ( groupExpressions . size ( ) - 1 ) ; for ( int i = 1 ; i < groupExpressions . size ( ) ; i ++ ) { args . add ( group . getGroup ( groupExpressions . get ( i ) ) ) ; } return ( V ) transformation . newInstance ( args . toArray ( ) ) ; } } ;
public class PGpath { /** * This returns the path in the syntax expected by org . postgresql . */ public String getValue ( ) { } }
StringBuilder b = new StringBuilder ( open ? "[" : "(" ) ; for ( int p = 0 ; p < points . length ; p ++ ) { if ( p > 0 ) { b . append ( "," ) ; } b . append ( points [ p ] . toString ( ) ) ; } b . append ( open ? "]" : ")" ) ; return b . toString ( ) ;
public class WorkflowRunsInner { /** * Gets a workflow run . * @ param resourceGroupName The resource group name . * @ param workflowName The workflow name . * @ param runName The workflow run name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < WorkflowRunInner > getAsync ( String resourceGroupName , String workflowName , String runName , final ServiceCallback < WorkflowRunInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , workflowName , runName ) , serviceCallback ) ;
public class MicroWriter { /** * Write a Micro Node to a file using the default settings . * @ param aNode * The node to be serialized . May be any kind of node ( incl . * documents ) . May not be < code > null < / code > . * @ param aPath * The file to write to . May not be < code > null < / code > . * @ return { @ link ESuccess } */ @ Nonnull public static ESuccess writeToFile ( @ Nonnull final IMicroNode aNode , @ Nonnull final Path aPath ) { } }
return writeToFile ( aNode , aPath , XMLWriterSettings . DEFAULT_XML_SETTINGS ) ;
public class RMQSink { /** * Sets up the queue . The default implementation just declares the queue . The user may override * this method to have a custom setup for the queue ( i . e . binding the queue to an exchange or * defining custom queue parameters ) */ protected void setupQueue ( ) throws IOException { } }
if ( queueName != null ) { channel . queueDeclare ( queueName , false , false , false , null ) ; }
public class VoiceApi { /** * Create a conference in a single step * Perform a single - step conference to the specified destination . This adds the destination to the existing call , creating a conference if necessary . * @ param id The connection ID of the call to conference . ( required ) * @ param singleStepConferenceData ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > singleStepConferenceWithHttpInfo ( String id , SingleStepConferenceData singleStepConferenceData ) throws ApiException { } }
com . squareup . okhttp . Call call = singleStepConferenceValidateBeforeCall ( id , singleStepConferenceData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class AWSCognitoIdentityProviderClient { /** * Disables the specified user as an administrator . Works on any user . * Requires developer credentials . * @ param adminDisableUserRequest * Represents the request to disable any user as an administrator . * @ return Result of the AdminDisableUser operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws UserNotFoundException * This exception is thrown when a user is not found . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . AdminDisableUser * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / AdminDisableUser " target = " _ top " > AWS * API Documentation < / a > */ @ Override public AdminDisableUserResult adminDisableUser ( AdminDisableUserRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAdminDisableUser ( request ) ;
public class CassandraStateFactory { /** * / * ( non - Javadoc ) * @ see storm . trident . state . StateFactory # makeState ( java . util . Map , backtype . storm . task . IMetricsContext , int , int ) */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) @ Override public State makeState ( Map conf , IMetricsContext metrics , int partitionIndex , int numPartitions ) { LOG . info ( "Making new CassandraState object for cluster " + cassandraClusterId + ": partition [" + partitionIndex + "] of [" + numPartitions + "]" ) ; AstyanaxClient client = AstyanaxClientFactory . getInstance ( cassandraClusterId , ( Map ) conf . get ( cassandraClusterId ) ) ; int batchMaxSize = Utils . getInt ( Utils . get ( conf , StormCassandraConstants . CASSANDRA_BATCH_MAX_SIZE , CassandraState . DEFAULT_MAX_BATCH_SIZE ) ) ; return new CassandraState ( client , batchMaxSize , this . exceptionHandler ) ;
public class XCalElement { /** * Adds a child element . * @ param name the name of the child element * @ param value the value of the child element . * @ return the created element */ public Element append ( String name , String value ) { } }
Element child = document . createElementNS ( XCAL_NS , name ) ; child . setTextContent ( value ) ; element . appendChild ( child ) ; return child ;
public class ContentValues { /** * Gets a value and converts it to a Short . * @ param key the value to get * @ return the Short value , or null if the value is missing or cannot be converted */ public Short getAsShort ( String key ) { } }
Object value = mValues . get ( key ) ; try { return value != null ? ( ( Number ) value ) . shortValue ( ) : null ; } catch ( ClassCastException e ) { if ( value instanceof CharSequence ) { try { return Short . valueOf ( value . toString ( ) ) ; } catch ( NumberFormatException e2 ) { logger . severe ( "Cannot parse Short value for " + value + " at key " + key ) ; return null ; } } else { logger . log ( Level . SEVERE , "Cannot cast value for " + key + " to a Short: " + value , e ) ; return null ; } }
public class TmdbDiscover { /** * Discover movies by different types of data like average rating , number of votes , genres and certifications . * @ param discover A discover object containing the search criteria required * @ return * @ throws MovieDbException */ public ResultList < MovieBasic > getDiscoverMovies ( Discover discover ) throws MovieDbException { } }
URL url = new ApiUrl ( apiKey , MethodBase . DISCOVER ) . subMethod ( MethodSub . MOVIE ) . buildUrl ( discover . getParams ( ) ) ; String webpage = httpTools . getRequest ( url ) ; WrapperGenericList < MovieBasic > wrapper = processWrapper ( getTypeReference ( MovieBasic . class ) , url , webpage ) ; return wrapper . getResultsList ( ) ;
public class CheckpointStatsTracker { /** * Creates a new pending checkpoint tracker . * @ param checkpointId ID of the checkpoint . * @ param triggerTimestamp Trigger timestamp of the checkpoint . * @ param props The checkpoint properties . * @ return Tracker for statistics gathering . */ PendingCheckpointStats reportPendingCheckpoint ( long checkpointId , long triggerTimestamp , CheckpointProperties props ) { } }
ConcurrentHashMap < JobVertexID , TaskStateStats > taskStateStats = createEmptyTaskStateStatsMap ( ) ; PendingCheckpointStats pending = new PendingCheckpointStats ( checkpointId , triggerTimestamp , props , totalSubtaskCount , taskStateStats , new PendingCheckpointStatsCallback ( ) ) ; statsReadWriteLock . lock ( ) ; try { counts . incrementInProgressCheckpoints ( ) ; history . addInProgressCheckpoint ( pending ) ; dirty = true ; } finally { statsReadWriteLock . unlock ( ) ; } return pending ;
public class ModuleWebhooks { /** * Create a new webhook . * This will create a new ID and return the newly created webhook as a return value . * This method will override the configuration specified through * { @ link CMAClient . Builder # setSpaceId ( String ) } and will ignore * { @ link CMAClient . Builder # setEnvironmentId ( String ) } . * @ param spaceId Which space should be used ? * @ param webhook A representation of the Webhook to be used . * @ return The webhook returned from the backend , containing created its ID and more . * @ throws IllegalArgumentException if space id is null . * @ throws IllegalArgumentException if webhook is null . */ public CMAWebhook create ( String spaceId , CMAWebhook webhook ) { } }
assertNotNull ( spaceId , "spaceId" ) ; assertNotNull ( webhook , "webhook" ) ; final String webhookId = webhook . getId ( ) ; final CMASystem system = webhook . getSystem ( ) ; webhook . setSystem ( null ) ; try { if ( webhookId == null ) { return service . create ( spaceId , webhook ) . blockingFirst ( ) ; } else { return service . create ( spaceId , webhookId , webhook ) . blockingFirst ( ) ; } } finally { webhook . setSystem ( system ) ; }
public class ImmutableValueMap { /** * Returns an immutable map containing a single entry . This map behaves and * performs comparably to { @ link Collections # singletonMap } but will not accept * a null key or value . It is preferable mainly for consistency and * maintainability of your code . * @ param k1 Key 1 * @ param v1 Value 1 * @ return ImmutableValueMap */ public static @ NotNull ImmutableValueMap of ( @ NotNull String k1 , @ NotNull Object v1 ) { } }
return new ImmutableValueMap ( ImmutableMap . < String , Object > of ( k1 , v1 ) ) ;
public class CompositeEntityMapper { /** * Return the column name byte buffer for this entity * @ param obj * @ return */ private ByteBuffer toColumnName ( Object obj ) { } }
SimpleCompositeBuilder composite = new SimpleCompositeBuilder ( bufferSize , Equality . EQUAL ) ; // Iterate through each component and add to a CompositeType structure for ( FieldMapper < ? > mapper : components ) { try { composite . addWithoutControl ( mapper . toByteBuffer ( obj ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } return composite . get ( ) ;
public class JsonRequestHandler { /** * Check whether the HTTP method with which the request was sent is allowed according to the policy * installed * @ param pRequest request to check */ private void checkHttpMethod ( R pRequest ) { } }
if ( ! restrictor . isHttpMethodAllowed ( pRequest . getHttpMethod ( ) ) ) { throw new SecurityException ( "HTTP method " + pRequest . getHttpMethod ( ) . getMethod ( ) + " is not allowed according to the installed security policy" ) ; }
public class CookieHelper { /** * Invalidate ( clear ) the cookie in the HttpServletResponse . * Setting age to 0 to invalidate it . * @ param res */ @ Sensitive public static void clearCookie ( HttpServletRequest req , HttpServletResponse res , String cookieName , Cookie [ ] cookies ) { } }
Cookie existing = getCookie ( cookies , cookieName ) ; if ( existing != null ) { Cookie c = new Cookie ( cookieName , "" ) ; String path = existing . getPath ( ) ; if ( path == null ) path = "/" ; c . setPath ( path ) ; c . setMaxAge ( 0 ) ; // c . setHttpOnly ( existing . isHttpOnly ( ) ) ; c . setSecure ( existing . getSecure ( ) ) ; res . addCookie ( c ) ; }
public class JournalWriter { /** * Subclasses should call this method to initialize a new Journal file , if * they already know the repository hash and the current date . */ protected void writeDocumentHeader ( XMLEventWriter writer , String repositoryHash , Date currentDate ) throws JournalException { } }
try { putStartDocument ( writer ) ; putStartTag ( writer , QNAME_TAG_JOURNAL ) ; putAttribute ( writer , QNAME_ATTR_REPOSITORY_HASH , repositoryHash ) ; putAttribute ( writer , QNAME_ATTR_TIMESTAMP , JournalHelper . formatDate ( currentDate ) ) ; } catch ( XMLStreamException e ) { throw new JournalException ( e ) ; }
public class JSONGetter { /** * 从JSON中直接获取Bean对象 < br > * 先获取JSONObject对象 , 然后转为Bean对象 * @ param < T > Bean类型 * @ param key KEY * @ param beanType Bean类型 * @ return Bean对象 , 如果值为null或者非JSONObject类型 , 返回null * @ since 3.1.1 */ public < T > T getBean ( K key , Class < T > beanType ) { } }
final JSONObject obj = getJSONObject ( key ) ; return ( null == obj ) ? null : obj . toBean ( beanType ) ;
public class UCharacterName { /** * Adds all algorithmic names into the name set . * Equivalent to part of calcAlgNameSetsLengths . * @ param maxlength length to compare to * @ return the maximum length of any possible algorithmic name if it is > * maxlength , otherwise maxlength is returned . */ private int addAlgorithmName ( int maxlength ) { } }
int result = 0 ; for ( int i = m_algorithm_ . length - 1 ; i >= 0 ; i -- ) { result = m_algorithm_ [ i ] . add ( m_nameSet_ , maxlength ) ; if ( result > maxlength ) { maxlength = result ; } } return maxlength ;
public class ORecordSerializerStringAbstract { /** * Parses a string returning the value with the closer type . Numbers by default are INTEGER if haven ' t decimal separator , * otherwise FLOAT . To treat all the number types numbers are postponed with a character that tells the type : b = byte , s = short , * l = long , f = float , d = double , t = date . If starts with # it ' s a RecordID . Most of the code is equals to getType ( ) but has been * copied to speed - up it . * @ param iUnusualSymbols * Localized decimal number separators * @ param iValue * Value to parse * @ return The closest type recognized */ public static Object getTypeValue ( final String iValue ) { } }
if ( iValue == null ) return null ; if ( iValue . length ( ) == 0 ) return "" ; if ( iValue . length ( ) > 1 ) if ( iValue . charAt ( 0 ) == '"' && iValue . charAt ( iValue . length ( ) - 1 ) == '"' ) // STRING return OStringSerializerHelper . decode ( iValue . substring ( 1 , iValue . length ( ) - 1 ) ) ; else if ( iValue . charAt ( 0 ) == OStringSerializerHelper . BINARY_BEGINEND && iValue . charAt ( iValue . length ( ) - 1 ) == OStringSerializerHelper . BINARY_BEGINEND ) // STRING return OStringSerializerHelper . getBinaryContent ( iValue ) ; else if ( iValue . charAt ( 0 ) == OStringSerializerHelper . COLLECTION_BEGIN && iValue . charAt ( iValue . length ( ) - 1 ) == OStringSerializerHelper . COLLECTION_END ) { // COLLECTION final ArrayList < String > coll = new ArrayList < String > ( ) ; OStringSerializerHelper . getCollection ( iValue , 0 , coll ) ; return coll ; } else if ( iValue . charAt ( 0 ) == OStringSerializerHelper . MAP_BEGIN && iValue . charAt ( iValue . length ( ) - 1 ) == OStringSerializerHelper . MAP_END ) { // MAP return OStringSerializerHelper . getMap ( iValue ) ; } if ( iValue . charAt ( 0 ) == ORID . PREFIX ) // RID return new ORecordId ( iValue ) ; boolean integer = true ; char c ; for ( int index = 0 ; index < iValue . length ( ) ; ++ index ) { c = iValue . charAt ( index ) ; if ( c < '0' || c > '9' ) if ( ( index == 0 && ( c == '+' || c == '-' ) ) ) continue ; else if ( c == DECIMAL_SEPARATOR ) integer = false ; else { if ( index > 0 ) { if ( ! integer && c == 'E' ) { // CHECK FOR SCIENTIFIC NOTATION if ( index < iValue . length ( ) ) index ++ ; if ( iValue . charAt ( index ) == '-' ) continue ; } final String v = iValue . substring ( 0 , index ) ; if ( c == 'f' ) return new Float ( v ) ; else if ( c == 'c' ) return new BigDecimal ( v ) ; else if ( c == 'l' ) return new Long ( v ) ; else if ( c == 'd' ) return new Double ( v ) ; else if ( c == 'b' ) return new Byte ( v ) ; else if ( c == 'a' || c == 't' ) return new Date ( Long . parseLong ( v ) ) ; else if ( c == 's' ) return new Short ( v ) ; } return iValue ; } } if ( integer ) { try { return new Integer ( iValue ) ; } catch ( NumberFormatException e ) { return new Long ( iValue ) ; } } else return new BigDecimal ( iValue ) ;
public class GlobusPathMatchingResourcePatternResolver { /** * Converts an Ant - style pattern to a regex pattern by replacing ( . with \ \ . ) , ( ? with . ) , * ( * * with . * ) , and ( * with [ ^ / ] * ) . * @ param antStyleLocationPattern An Ant - Stlye location pattern . * @ return A regex style location pattern representation of the antStyleLocationPattern */ private String antToRegexConverter ( String antStyleLocationPattern ) { } }
String regexStyleLocationPattern = antStyleLocationPattern . replace ( "\\" , "/" ) ; regexStyleLocationPattern = regexStyleLocationPattern . replaceAll ( "\\." , "\\\\." ) ; // replace . with \ \ . regexStyleLocationPattern = regexStyleLocationPattern . replaceAll ( "//" , "/" ) ; // Solution for known test cases with / / issue at org . globus . gsi . proxy . ProxyPathValidatorTest line 536 , Needs Review regexStyleLocationPattern = regexStyleLocationPattern . replace ( '?' , '.' ) ; // replace ? with . regexStyleLocationPattern = regexStyleLocationPattern . replaceAll ( "\\*" , "[^/]*" ) ; // replace all * with [ ^ / ] * , this will make * * become [ ^ / ] * [ ^ / ] * regexStyleLocationPattern = regexStyleLocationPattern . replaceAll ( "\\[\\^/\\]\\*\\[\\^/\\]\\*" , ".*" ) ; // now replace the . * . * with just . * regexStyleLocationPattern = "^" + this . mainClassPath + regexStyleLocationPattern + "$" ; // add the beginning and end symbols , and mainClassPath , if the object is of the type classpath : / return regexStyleLocationPattern ;
public class CleverTapAPI { /** * Event */ private void queueEvent ( final Context context , final JSONObject event , final int eventType ) { } }
postAsyncSafely ( "queueEvent" , new Runnable ( ) { @ Override public void run ( ) { if ( isCurrentUserOptedOut ( ) ) { String eventString = event == null ? "null" : event . toString ( ) ; getConfigLogger ( ) . debug ( getAccountId ( ) , "Current user is opted out dropping event: " + eventString ) ; return ; } if ( shouldDeferProcessingEvent ( event , eventType ) ) { getConfigLogger ( ) . debug ( getAccountId ( ) , "App Launched not yet processed, re-queuing event " + event + "after 2s" ) ; getHandlerUsingMainLooper ( ) . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { postAsyncSafely ( "queueEventWithDelay" , new Runnable ( ) { @ Override public void run ( ) { lazyCreateSession ( context ) ; addToQueue ( context , event , eventType ) ; } } ) ; } } , 2000 ) ; } else { lazyCreateSession ( context ) ; addToQueue ( context , event , eventType ) ; } } } ) ;
public class GwtWorkarounds { /** * Views a { @ code ByteOutput } as an { @ code OutputStream } . */ @ GwtIncompatible ( "OutputStream" ) static OutputStream asOutputStream ( final ByteOutput output ) { } }
checkNotNull ( output ) ; return new OutputStream ( ) { @ Override public void write ( int b ) throws IOException { output . write ( ( byte ) b ) ; } @ Override public void flush ( ) throws IOException { output . flush ( ) ; } @ Override public void close ( ) throws IOException { output . close ( ) ; } } ;
public class SafeTraceLevelIndexFactory { /** * Add the filters and values to the index . * The values are inserted as Integers for easy comparison in TraceComponent * and to avoid having to recompute each time the spec is checked . */ private static void addFiltersAndValuesToIndex ( BufferedReader br , PackageIndex < Integer > packageIndex ) throws IOException { } }
String line ; while ( ( line = br . readLine ( ) ) != null ) { if ( line . isEmpty ( ) || line . startsWith ( "#" ) ) { continue ; } int pos = line . indexOf ( '=' ) ; if ( pos > 0 ) { String filter = line . substring ( 0 , pos ) . trim ( ) ; String value = line . substring ( pos + 1 ) . trim ( ) ; packageIndex . add ( filter , getMinLevelIndex ( value ) ) ; } }
public class FileUtil { /** * Copies the fromFile to the toFile location . If toFile is a directory , a * new file is created in that directory , with the name of the fromFile . * If the toFile exists , the file will not be copied , unless owerWrite is * true . * @ param pFromFile The file to copy from * @ param pToFile The file to copy to * @ param pOverWrite Specifies if the toFile should be overwritten , if it * exists . * @ return { @ code true } if the file was copied successfully , * { @ code false } if the output file exists , and the * { @ code pOwerWrite } parameter is * { @ code false } . In all other cases , an * { @ code IOExceptio } n is thrown , and the method does not return . * @ throws IOException if an i / o error occurs during copy * @ todo Test copyDir functionality ! */ public static boolean copy ( File pFromFile , File pToFile , boolean pOverWrite ) throws IOException { } }
// Copy all directory structure if ( pFromFile . isDirectory ( ) ) { return copyDir ( pFromFile , pToFile , pOverWrite ) ; } // Check if destination is a directory if ( pToFile . isDirectory ( ) ) { // Create a new file with same name as from pToFile = new File ( pToFile , pFromFile . getName ( ) ) ; } // Check if file exists , and return false if overWrite is false if ( ! pOverWrite && pToFile . exists ( ) ) { return false ; } InputStream in = null ; OutputStream out = null ; try { // Use buffer size two times byte array , to avoid i / o bottleneck in = new FileInputStream ( pFromFile ) ; out = new FileOutputStream ( pToFile ) ; // Copy from inputStream to outputStream copy ( in , out ) ; } // Just pass any IOException on up the stack finally { close ( in ) ; close ( out ) ; } return true ; // If we got here , everything is probably okay . . ; - )
public class CommerceAddressPersistenceImpl { /** * Returns an ordered range of all the commerce addresses where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; and defaultBilling = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceAddressModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param defaultBilling the default billing * @ param start the lower bound of the range of commerce addresses * @ param end the upper bound of the range of commerce addresses ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce addresses */ @ Override public List < CommerceAddress > findByG_C_C_DB ( long groupId , long classNameId , long classPK , boolean defaultBilling , int start , int end , OrderByComparator < CommerceAddress > orderByComparator ) { } }
return findByG_C_C_DB ( groupId , classNameId , classPK , defaultBilling , start , end , orderByComparator , true ) ;
public class SmallSortedMap { /** * Creates a new instance for mapping FieldDescriptors to their values . * The { @ link # makeImmutable ( ) } implementation will convert the List values * of any repeated fields to unmodifiable lists . * @ param arraySize The size of the entry array containing the * lexicographically smallest mappings . */ static < FieldDescriptorType extends FieldSet . FieldDescriptorLite < FieldDescriptorType > > SmallSortedMap < FieldDescriptorType , Object > newFieldMap ( int arraySize ) { } }
return new SmallSortedMap < FieldDescriptorType , Object > ( arraySize ) { @ Override @ SuppressWarnings ( "unchecked" ) public void makeImmutable ( ) { if ( ! isImmutable ( ) ) { for ( int i = 0 ; i < getNumArrayEntries ( ) ; i ++ ) { final Map . Entry < FieldDescriptorType , Object > entry = getArrayEntryAt ( i ) ; if ( entry . getKey ( ) . isRepeated ( ) ) { final List value = ( List ) entry . getValue ( ) ; entry . setValue ( Collections . unmodifiableList ( value ) ) ; } } for ( Map . Entry < FieldDescriptorType , Object > entry : getOverflowEntries ( ) ) { if ( entry . getKey ( ) . isRepeated ( ) ) { final List value = ( List ) entry . getValue ( ) ; entry . setValue ( Collections . unmodifiableList ( value ) ) ; } } } super . makeImmutable ( ) ; } } ;
public class ConfigEvaluator { /** * Find the " super " PID from the < code > ibm : extends < / code > attribute of the OCD for < code > pid < / code > . * @ param pid must never be null * @ return the super PID or < code > null < / code > if no < code > ibm : extends < / code > attribute could be found */ private String getExtends ( String pid ) { } }
RegistryEntry re = getRegistryEntry ( pid ) ; return re == null ? null : re . getExtends ( ) ;
public class StringContext { /** * Tests if the given string ends with the given suffix . * Returns true if the given string ends with the given suffix . * @ param str the source string * @ param suffix the suffix to test for * @ return true if the given string ends with the given suffix */ public boolean endsWith ( String str , String suffix ) { } }
return ( str == null || suffix == null ) ? ( str == suffix ) : str . endsWith ( suffix ) ;
public class JobHistoryFileParserHadoop2 { /** * understand the schema so that we can parse the rest of the file * @ throws JSONException */ private void understandSchema ( String schema ) throws JSONException { } }
JSONObject j1 = new JSONObject ( schema ) ; JSONArray fields = j1 . getJSONArray ( FIELDS ) ; String fieldName ; String fieldTypeValue ; Object recName ; for ( int k = 0 ; k < fields . length ( ) ; k ++ ) { if ( fields . get ( k ) == null ) { continue ; } JSONObject allEvents = new JSONObject ( fields . get ( k ) . toString ( ) ) ; Object name = allEvents . get ( NAME ) ; if ( name != null ) { if ( name . toString ( ) . equalsIgnoreCase ( EVENT ) ) { JSONArray allTypeDetails = allEvents . getJSONArray ( TYPE ) ; for ( int i = 0 ; i < allTypeDetails . length ( ) ; i ++ ) { JSONObject actual = ( JSONObject ) allTypeDetails . get ( i ) ; JSONArray types = actual . getJSONArray ( FIELDS ) ; Map < String , String > typeDetails = new HashMap < String , String > ( ) ; for ( int j = 0 ; j < types . length ( ) ; j ++ ) { if ( types . getJSONObject ( j ) == null ) { continue ; } fieldName = types . getJSONObject ( j ) . getString ( NAME ) ; fieldTypeValue = types . getJSONObject ( j ) . getString ( TYPE ) ; if ( ( fieldName != null ) && ( fieldTypeValue != null ) ) { typeDetails . put ( fieldName , fieldTypeValue ) ; } } recName = actual . get ( NAME ) ; if ( recName != null ) { /* the next statement may throw an IllegalArgumentException if * it finds a new string that ' s not part of the Hadoop2RecordType enum * that way we know what types of events we are parsing */ fieldTypes . put ( Hadoop2RecordType . valueOf ( recName . toString ( ) ) , typeDetails ) ; } } } } }
public class SignatureDef { /** * Use { @ link # getInputsMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , org . tensorflow . framework . TensorInfo > getInputs ( ) { } }
return getInputsMap ( ) ;
public class CmsUserIconHelper { /** * Sets the user image for the given user . < p > * @ param cms the cms context * @ param user the user * @ param rootPath the image root path * @ throws CmsException in case anything goes wrong */ public void setUserImage ( CmsObject cms , CmsUser user , String rootPath ) throws CmsException { } }
CmsFile tempFile = cms . readFile ( cms . getRequestContext ( ) . removeSiteRoot ( rootPath ) ) ; CmsImageScaler scaler = new CmsImageScaler ( tempFile . getContents ( ) , tempFile . getRootPath ( ) ) ; if ( scaler . isValid ( ) ) { scaler . setType ( 2 ) ; scaler . setHeight ( 192 ) ; scaler . setWidth ( 192 ) ; byte [ ] content = scaler . scaleImage ( tempFile ) ; String previousImage = ( String ) user . getAdditionalInfo ( CmsUserIconHelper . USER_IMAGE_INFO ) ; String newFileName = USER_IMAGE_FOLDER + user . getId ( ) . toString ( ) + "_" + System . currentTimeMillis ( ) + getSuffix ( tempFile . getName ( ) ) ; CmsObject adminCms = OpenCms . initCmsObject ( m_adminCms ) ; CmsProject tempProject = adminCms . createTempfileProject ( ) ; adminCms . getRequestContext ( ) . setCurrentProject ( tempProject ) ; if ( adminCms . existsResource ( newFileName ) ) { // a user image of the given name already exists , just write the new content CmsFile imageFile = adminCms . readFile ( newFileName ) ; adminCms . lockResource ( imageFile ) ; imageFile . setContents ( content ) ; adminCms . writeFile ( imageFile ) ; adminCms . writePropertyObject ( newFileName , new CmsProperty ( CmsPropertyDefinition . PROPERTY_IMAGE_SIZE , null , "w:192,h:192" ) ) ; } else { // create a new user image file adminCms . createResource ( newFileName , OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypeImage . getStaticTypeName ( ) ) , content , Collections . singletonList ( new CmsProperty ( CmsPropertyDefinition . PROPERTY_IMAGE_SIZE , null , "w:192,h:192" ) ) ) ; } if ( newFileName . equals ( previousImage ) ) { previousImage = null ; } if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( previousImage ) ) { previousImage = ( String ) user . getAdditionalInfo ( CmsUserIconHelper . USER_IMAGE_INFO ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( previousImage ) && cms . existsResource ( newFileName , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ) { try { adminCms . lockResource ( previousImage ) ; adminCms . deleteResource ( previousImage , CmsResource . DELETE_REMOVE_SIBLINGS ) ; } catch ( CmsException e ) { LOG . error ( "Error deleting previous user image." , e ) ; } } } user . setAdditionalInfo ( CmsUserIconHelper . USER_IMAGE_INFO , newFileName ) ; adminCms . writeUser ( user ) ; try { OpenCms . getPublishManager ( ) . publishProject ( adminCms ) ; } catch ( Exception e ) { LOG . error ( "Error publishing user image resources." , e ) ; } }
public class Cookie { /** * Need to call this by reflection for backwards compatibility with Servlet 2.5 */ private static void setHttpOnlyReflect ( org . javalite . activeweb . Cookie awCookie , javax . servlet . http . Cookie servletCookie ) { } }
try { servletCookie . getClass ( ) . getMethod ( "setHttpOnly" , boolean . class ) . invoke ( servletCookie , awCookie . isHttpOnly ( ) ) ; } catch ( Exception e ) { LOGGER . warn ( "You are trying to set HttpOnly on a cookie, but it appears you are running on Servlet version before 3.0." ) ; }
public class OBOOntology { /** * Add a single OntologyTerm to the ontology . * @ param term * The OntologyTerm . */ public void addTerm ( OntologyTerm term ) { } }
terms . put ( term . getId ( ) , term ) ; indexTerm ( term ) ; isTypeOfIsBuilt = false ;
public class CreateConnectorDefinitionVersionRequest { /** * A list of references to connectors in this version , with their corresponding configuration settings . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setConnectors ( java . util . Collection ) } or { @ link # withConnectors ( java . util . Collection ) } if you want to * override the existing values . * @ param connectors * A list of references to connectors in this version , with their corresponding configuration settings . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateConnectorDefinitionVersionRequest withConnectors ( Connector ... connectors ) { } }
if ( this . connectors == null ) { setConnectors ( new java . util . ArrayList < Connector > ( connectors . length ) ) ; } for ( Connector ele : connectors ) { this . connectors . add ( ele ) ; } return this ;
public class SubscriptionAdminClient { /** * Creates a subscription to a given topic . See the & lt ; a * href = " https : / / cloud . google . com / pubsub / docs / admin # resource _ names " & gt ; resource name * rules & lt ; / a & gt ; . If the subscription already exists , returns ` ALREADY _ EXISTS ` . If the * corresponding topic doesn ' t exist , returns ` NOT _ FOUND ` . * < p > If the name is not provided in the request , the server will assign a random name for this * subscription on the same project as the topic , conforming to the [ resource name * format ] ( https : / / cloud . google . com / pubsub / docs / admin # resource _ names ) . The generated name is * populated in the returned Subscription object . Note that for REST API requests , you must * specify a name in the request . * < p > Sample code : * < pre > < code > * try ( SubscriptionAdminClient subscriptionAdminClient = SubscriptionAdminClient . create ( ) ) { * ProjectSubscriptionName name = ProjectSubscriptionName . of ( " [ PROJECT ] " , " [ SUBSCRIPTION ] " ) ; * ProjectTopicName topic = ProjectTopicName . of ( " [ PROJECT ] " , " [ TOPIC ] " ) ; * PushConfig pushConfig = PushConfig . newBuilder ( ) . build ( ) ; * int ackDeadlineSeconds = 0; * Subscription response = subscriptionAdminClient . createSubscription ( name , topic , pushConfig , ackDeadlineSeconds ) ; * < / code > < / pre > * @ param name The name of the subscription . It must have the format * ` " projects / { project } / subscriptions / { subscription } " ` . ` { subscription } ` must start with a * letter , and contain only letters ( ` [ A - Za - z ] ` ) , numbers ( ` [ 0-9 ] ` ) , dashes ( ` - ` ) , underscores * ( ` _ ` ) , periods ( ` . ` ) , tildes ( ` ~ ` ) , plus ( ` + ` ) or percent signs ( ` % ` ) . It must be between 3 * and 255 characters in length , and it must not start with ` " goog " ` * @ param topic The name of the topic from which this subscription is receiving messages . Format * is ` projects / { project } / topics / { topic } ` . The value of this field will be ` _ deleted - topic _ ` * if the topic has been deleted . * @ param pushConfig If push delivery is used with this subscription , this field is used to * configure it . An empty ` pushConfig ` signifies that the subscriber will pull and ack * messages using API methods . * @ param ackDeadlineSeconds The approximate amount of time ( on a best - effort basis ) Pub / Sub waits * for the subscriber to acknowledge receipt before resending the message . In the interval * after the message is delivered and before it is acknowledged , it is considered to be * & lt ; i & gt ; outstanding & lt ; / i & gt ; . During that time period , the message will not be * redelivered ( on a best - effort basis ) . * < p > For pull subscriptions , this value is used as the initial value for the ack deadline . To * override this value for a given message , call ` ModifyAckDeadline ` with the corresponding * ` ack _ id ` if using non - streaming pull or send the ` ack _ id ` in a * ` StreamingModifyAckDeadlineRequest ` if using streaming pull . The minimum custom deadline * you can specify is 10 seconds . The maximum custom deadline you can specify is 600 seconds * ( 10 minutes ) . If this parameter is 0 , a default value of 10 seconds is used . * < p > For push delivery , this value is also used to set the request timeout for the call to * the push endpoint . * < p > If the subscriber never acknowledges the message , the Pub / Sub system will eventually * redeliver the message . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Subscription createSubscription ( ProjectSubscriptionName name , ProjectTopicName topic , PushConfig pushConfig , int ackDeadlineSeconds ) { } }
Subscription request = Subscription . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . setTopic ( topic == null ? null : topic . toString ( ) ) . setPushConfig ( pushConfig ) . setAckDeadlineSeconds ( ackDeadlineSeconds ) . build ( ) ; return createSubscription ( request ) ;
public class ByteUtil { /** * Converts string hex representation to data bytes * @ param data * String like ' 0xa5e . . ' or just ' a5e . . ' * @ return decoded bytes array */ public static byte [ ] hexStringToBytes ( String data ) { } }
if ( data == null ) return EMPTY_BYTE_ARRAY ; if ( data . startsWith ( "0x" ) ) data = data . substring ( 2 ) ; return Hex . decode ( data ) ;
public class CacheMapUtil { /** * retrieval all the values in the cache map * @ param key the key of the cached map * @ param vClazz the value ' s class * @ param < T > the value ' s generic type * @ return the whole list if exists or null if the key does not exist . return */ public static < T > Maybe < List < T > > values ( String key , Class < T > vClazz ) { } }
return values ( CacheService . CACHE_CONFIG_BEAN , key , vClazz ) ;
public class AbstractSpecExecutor { /** * The definition of attributes are the technology that a { @ link SpecExecutor } is using and * the physical location that it runs on . * These attributes are supposed to be static and read - only . */ @ Override public Config getAttrs ( ) { } }
Preconditions . checkArgument ( this . config . hasPath ( ServiceConfigKeys . ATTRS_PATH_IN_CONFIG ) , "Input configuration doesn't contains SpecExecutor Attributes path." ) ; return this . config . getConfig ( ServiceConfigKeys . ATTRS_PATH_IN_CONFIG ) ;
public class VerticalBarChart { /** * This is the main entry point after the graph has been inflated . Used to initialize the graph * and its corresponding members . */ @ Override protected void initializeGraph ( ) { } }
super . initializeGraph ( ) ; mData = new ArrayList < > ( ) ; mValuePaint = new Paint ( mLegendPaint ) ; if ( this . isInEditMode ( ) ) { addBar ( new BarModel ( 2.3f ) ) ; addBar ( new BarModel ( 2.f ) ) ; addBar ( new BarModel ( 3.3f ) ) ; addBar ( new BarModel ( 1.1f ) ) ; addBar ( new BarModel ( 2.7f ) ) ; addBar ( new BarModel ( 2.3f ) ) ; addBar ( new BarModel ( 2.f ) ) ; addBar ( new BarModel ( 3.3f ) ) ; addBar ( new BarModel ( 1.1f ) ) ; addBar ( new BarModel ( 2.7f ) ) ; }
public class ScriptPluginProviderLoader { /** * Get the ScriptPluginProvider definition from the file for the given provider def and ident */ private ScriptPluginProvider getPlugin ( final PluginMeta pluginMeta , final File file , final ProviderDef pluginDef , final ProviderIdent ident ) throws ProviderLoaderException , PluginException { } }
if ( null == fileExpandedDir ) { final File dir ; try { dir = expandScriptPlugin ( file ) ; } catch ( IOException e ) { throw new ProviderLoaderException ( e , ident . getService ( ) , ident . getProviderName ( ) ) ; } fileExpandedDir = dir ; if ( pluginDef . getPluginType ( ) . equals ( "script" ) ) { final File script = new File ( fileExpandedDir , pluginDef . getScriptFile ( ) ) ; // set executable bit for script - file of the provider try { ScriptfileUtils . setExecutePermissions ( script ) ; } catch ( IOException e ) { log . warn ( "Unable to set executable bit for script file: " + script + ": " + e . getMessage ( ) ) ; } } debug ( "expanded plugin dir! " + fileExpandedDir ) ; } else { debug ( "expanded plugin dir: " + fileExpandedDir ) ; } if ( pluginDef . getPluginType ( ) . equals ( "script" ) ) { final File script = new File ( fileExpandedDir , pluginDef . getScriptFile ( ) ) ; if ( ! script . exists ( ) || ! script . isFile ( ) ) { throw new PluginException ( "Script file was not found: " + script . getAbsolutePath ( ) ) ; } } return new ScriptPluginProviderImpl ( pluginMeta , pluginDef , file , fileExpandedDir ) ;
public class NumberConverter { /** * Converts the string to a number , using the given format for parsing . * @ param pString the string to convert . * @ param pType the type to convert to . PropertyConverter * implementations may choose to ignore this parameter . * @ param pFormat the format used for parsing . PropertyConverter * implementations may choose to ignore this parameter . Also , * implementations that require a parser format , should provide a default * format , and allow { @ code null } as the format argument . * @ return the object created from the given string . May safely be typecast * to { @ code java . lang . Number } or the class of the { @ code type } parameter . * @ see Number * @ see java . text . NumberFormat * @ throws ConversionException */ public Object toObject ( final String pString , final Class pType , final String pFormat ) throws ConversionException { } }
if ( StringUtil . isEmpty ( pString ) ) { return null ; } try { if ( pType . equals ( BigInteger . class ) ) { return new BigInteger ( pString ) ; // No format ? } if ( pType . equals ( BigDecimal . class ) ) { return new BigDecimal ( pString ) ; // No format ? } NumberFormat format ; if ( pFormat == null ) { // Use system default format , using default locale format = sDefaultFormat ; } else { // Get format from cache format = getNumberFormat ( pFormat ) ; } Number num ; synchronized ( format ) { num = format . parse ( pString ) ; } if ( pType == Integer . TYPE || pType == Integer . class ) { return num . intValue ( ) ; } else if ( pType == Long . TYPE || pType == Long . class ) { return num . longValue ( ) ; } else if ( pType == Double . TYPE || pType == Double . class ) { return num . doubleValue ( ) ; } else if ( pType == Float . TYPE || pType == Float . class ) { return num . floatValue ( ) ; } else if ( pType == Byte . TYPE || pType == Byte . class ) { return num . byteValue ( ) ; } else if ( pType == Short . TYPE || pType == Short . class ) { return num . shortValue ( ) ; } return num ; } catch ( ParseException pe ) { throw new ConversionException ( pe ) ; } catch ( RuntimeException rte ) { throw new ConversionException ( rte ) ; }
public class DatabaseConnection { /** * Initializes a syntax rule for a given database connection . * < p > The rule is good for different highlighter and completer instances , * but not necessarily for other database connections ( because it * depends on the set of keywords and identifier quote string ) . */ private void initSyntaxRule ( ) throws SQLException { } }
// Deduce the string used to quote identifiers . For example , Oracle // uses double - quotes : // SELECT * FROM " My Schema " . " My Table " String identifierQuoteString = meta . getIdentifierQuoteString ( ) ; if ( identifierQuoteString . length ( ) > 1 ) { sqlLine . error ( "Identifier quote string is '" + identifierQuoteString + "'; quote strings longer than 1 char are not supported" ) ; identifierQuoteString = null ; } final String productName = meta . getDatabaseProductName ( ) ; final Set < String > keywords = Stream . of ( meta . getSQLKeywords ( ) . split ( "," ) ) . collect ( Collectors . toSet ( ) ) ; dialect = DialectImpl . create ( keywords , identifierQuoteString , productName , meta . storesUpperCaseIdentifiers ( ) ) ;
public class HttpServletResourceAdaptor { /** * ( non - Javadoc ) * @ see javax . slee . resource . ResourceAdaptor # raVerifyConfiguration ( javax . slee . resource . ConfigProperties ) */ public void raVerifyConfiguration ( ConfigProperties configProperties ) throws InvalidConfigurationException { } }
ConfigProperties . Property property = configProperties . getProperty ( NAME_CONFIG_PROPERTY ) ; if ( property == null ) { throw new InvalidConfigurationException ( "name property not found" ) ; } if ( ! property . getType ( ) . equals ( String . class . getName ( ) ) ) { throw new InvalidConfigurationException ( "name property must be of type java.lang.String" ) ; } if ( property . getValue ( ) == null ) { // don ' t think this can happen , but just to be sure throw new InvalidConfigurationException ( "name property must not have a null value" ) ; }
public class DBFWriter { /** * Sets fields definition * @ param fields fields definition */ public void setFields ( DBFField [ ] fields ) { } }
if ( this . closed ) { throw new IllegalStateException ( "You can not set fields to a closed DBFWriter" ) ; } if ( this . header . fieldArray != null ) { throw new DBFException ( "Fields has already been set" ) ; } if ( fields == null || fields . length == 0 ) { throw new DBFException ( "Should have at least one field" ) ; } for ( int i = 0 ; i < fields . length ; i ++ ) { if ( fields [ i ] == null ) { throw new DBFException ( "Field " + i + " is null" ) ; } } this . header . fieldArray = new DBFField [ fields . length ] ; for ( int i = 0 ; i < fields . length ; i ++ ) { this . header . fieldArray [ i ] = new DBFField ( fields [ i ] ) ; } try { if ( this . raf != null && this . raf . length ( ) == 0 ) { // this is a new / non - existent file . So write header before proceeding this . header . write ( this . raf ) ; } } catch ( IOException e ) { throw new DBFException ( "Error accesing file:" + e . getMessage ( ) , e ) ; }
public class MethodCompiler { /** * Labels a current position * @ param name * @ throws IOException */ @ Override public void fixAddress ( String name ) throws IOException { } }
super . fixAddress ( name ) ; if ( debugMethod != null ) { int position = position ( ) ; tload ( "this" ) ; ldc ( position ) ; ldc ( name ) ; invokevirtual ( debugMethod ) ; }
public class Context { /** * Computes the context after an attribute delimiter is seen . * @ param elType The type of element whose tag the attribute appears in . * @ param attrType The type of attribute whose value the delimiter starts . * @ param delim The type of delimiter that will mark the end of the attribute value . * @ param templateNestDepth The number of ( @ code < template > } elements on the open element stack . * @ return A context suitable for the start of the attribute value . */ static Context computeContextAfterAttributeDelimiter ( ElementType elType , AttributeType attrType , AttributeEndDelimiter delim , UriType uriType , int templateNestDepth ) { } }
HtmlContext state ; JsFollowingSlash slash = JsFollowingSlash . NONE ; UriPart uriPart = UriPart . NONE ; switch ( attrType ) { case PLAIN_TEXT : state = HtmlContext . HTML_NORMAL_ATTR_VALUE ; break ; case SCRIPT : state = HtmlContext . JS ; // Start a JS block in a regex state since // / foo / . test ( str ) & & doSideEffect ( ) ; // which starts with a regular expression literal is a valid and possibly useful program , // but there is no valid program which starts with a division operator . slash = JsFollowingSlash . REGEX ; break ; case STYLE : state = HtmlContext . CSS ; break ; case HTML : state = HtmlContext . HTML_HTML_ATTR_VALUE ; break ; case META_REFRESH_CONTENT : state = HtmlContext . HTML_META_REFRESH_CONTENT ; break ; case URI : state = HtmlContext . URI ; uriPart = UriPart . START ; break ; // NONE is not a valid AttributeType inside an attribute value . default : throw new AssertionError ( "Unexpected attribute type " + attrType ) ; } Preconditions . checkArgument ( ( uriType != UriType . NONE ) == ( attrType == AttributeType . URI ) , "uriType=%s but attrType=%s" , uriType , attrType ) ; return new Context ( state , elType , attrType , delim , slash , uriPart , uriType , HtmlHtmlAttributePosition . NONE , templateNestDepth , 0 ) ;
public class AbstractMapper { /** * By default , if only a single { @ link Binder } is present , returns { @ link Binder # hasStableIds ( ) } , otherwise returns * { @ code false } */ @ Override public boolean hasStableIds ( ) { } }
Collection < ? extends Binder < ? , ? > > allBinders = getAllBinders ( ) ; if ( allBinders . size ( ) == 1 ) { // Save an allocation by checking for List first . if ( allBinders instanceof List ) { // noinspection unchecked return ( ( List < Binder < ? , ? > > ) allBinders ) . get ( 0 ) . hasStableIds ( ) ; } return allBinders . iterator ( ) . next ( ) . hasStableIds ( ) ; } // Can ' t possibly have stable IDs if we have multiple binders . return false ;
public class Snapshot { /** * A list of the AWS customer accounts authorized to restore the snapshot . Returns < code > null < / code > if no accounts * are authorized . Visible only to the snapshot owner . * @ param accountsWithRestoreAccess * A list of the AWS customer accounts authorized to restore the snapshot . Returns < code > null < / code > if no * accounts are authorized . Visible only to the snapshot owner . */ public void setAccountsWithRestoreAccess ( java . util . Collection < AccountWithRestoreAccess > accountsWithRestoreAccess ) { } }
if ( accountsWithRestoreAccess == null ) { this . accountsWithRestoreAccess = null ; return ; } this . accountsWithRestoreAccess = new com . amazonaws . internal . SdkInternalList < AccountWithRestoreAccess > ( accountsWithRestoreAccess ) ;
public class ImageMiscOps { /** * Copies a rectangular region from one image into another . < br > * output [ dstX : ( dstX + width ) , dstY : ( dstY + height - 1 ) ] = input [ srcX : ( srcX + width ) , srcY : ( srcY + height - 1 ) ] * @ param srcX x - coordinate of corner in input image * @ param srcY y - coordinate of corner in input image * @ param dstX x - coordinate of corner in output image * @ param dstY y - coordinate of corner in output image * @ param width Width of region to be copied * @ param height Height of region to be copied * @ param input Input image * @ param output output image */ public static void copy ( int srcX , int srcY , int dstX , int dstY , int width , int height , GrayF32 input , GrayF32 output ) { } }
if ( input . width < srcX + width || input . height < srcY + height ) throw new IllegalArgumentException ( "Copy region must be contained input image" ) ; if ( output . width < dstX + width || output . height < dstY + height ) throw new IllegalArgumentException ( "Copy region must be contained output image" ) ; for ( int y = 0 ; y < height ; y ++ ) { int indexSrc = input . startIndex + ( srcY + y ) * input . stride + srcX ; int indexDst = output . startIndex + ( dstY + y ) * output . stride + dstX ; for ( int x = 0 ; x < width ; x ++ ) { output . data [ indexDst ++ ] = input . data [ indexSrc ++ ] ; } }
public class CacheSweeper { /** * / * ( non - Javadoc ) * @ see java . lang . Thread # run ( ) */ @ Override public void run ( ) { } }
try { while ( true ) { final long start = System . currentTimeMillis ( ) ; synchronized ( lock ) { long remaining = sweepInterval * 1000L ; while ( remaining > 0 ) { lock . wait ( remaining ) ; remaining = start + sweepInterval * 1000 - System . currentTimeMillis ( ) ; } } cache . removeExpired ( ) ; } } catch ( final InterruptedException e ) { // just let thread exit }
public class ImportTraverser { /** * Add a sub - traversal for a neighboring node . */ private void addSubtraversal ( T node , LinkedList < T > imports , Set < T > alreadyIncludedImports , NodePath < T > nodePath ) throws ExecutionException { } }
if ( addNodeIfNotAlreadyIncluded ( node , imports , alreadyIncludedImports ) ) { for ( T inheritedFromParent : doTraverseGraphRecursively ( node , nodePath ) ) { addNodeIfNotAlreadyIncluded ( inheritedFromParent , imports , alreadyIncludedImports ) ; } }
public class Calc { /** * Returns the unit vector of vector a . * @ param a * an Atom object * @ return an Atom object */ public static final Atom unitVector ( Atom a ) { } }
double amount = amount ( a ) ; double [ ] coords = new double [ 3 ] ; coords [ 0 ] = a . getX ( ) / amount ; coords [ 1 ] = a . getY ( ) / amount ; coords [ 2 ] = a . getZ ( ) / amount ; a . setCoords ( coords ) ; return a ;
public class ServerSICoreConnectionListener { /** * This private method will actually send the notification to the client * using the specified event Id . * @ param eventId The event ID * @ param conn The core connection that is dieing * @ param sess The Consumer Session that generated the exception * @ param t The exception to be sent back * @ throws SIConnectionLostException * @ throws SIConnectionDroppedException */ private void sendMeNotificationEvent ( short eventId , SICoreConnection conn , ConsumerSession sess , Throwable t ) throws SIConnectionLostException , SIConnectionDroppedException , SIConnectionUnavailableException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendMeNotificationEvent" , new Object [ ] { "" + eventId , conn , sess , t } ) ; Conversation conversation = null ; int consumerId = - 1 ; // Get the conversation . If we have an SICoreConnection handed to us , we can dive into the // hashtable and retrieve the correct one . if ( conn != null ) { conversation = conversationTable . get ( conn ) ; } else { // If no SICoreConnection was passed in ( for example when sending an async exception we // could retrieve that from the ConsumerSession . However , it is most likely that the reason // we are sending the async exception is because the session has gone down and so the most // reliable way is to iterate over all the Conversations we know about . We expect this // operation to be a fairly ' one off ' - and so we don ' t need the performance for ( Iterator < Conversation > i = conversationTable . values ( ) . iterator ( ) ; i . hasNext ( ) ; ) { conversation = i . next ( ) ; ConversationState convState = ( ConversationState ) conversation . getAttachment ( ) ; // For each conversation look for the right Consumer Session List allObjs = convState . getAllObjects ( ) ; for ( int x = 0 ; x < allObjs . size ( ) ; x ++ ) { if ( allObjs . get ( x ) instanceof CATMainConsumer ) { CATMainConsumer mainConsumer = ( CATMainConsumer ) allObjs . get ( x ) ; ConsumerSession ourSession = mainConsumer . getConsumerSession ( ) ; // If we have found it , save the client session Id if ( ourSession == sess ) { consumerId = mainConsumer . getClientSessionId ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Located consumer. Session Id is" , "" + consumerId ) ; break ; } } } if ( consumerId != - 1 ) break ; } } // Check here to see if the consumer Id is 0 . If this is the case then this consumer is // currently synchronous and probably in the middle of an operation such as a receiveWithWait . // In this case , do not send the notification back to the peer ( as a consumerId of 0 will // break the client and cause the connection to be terminated ) but let the consumer sessions // connection listener send the notification back . if ( consumerId == 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Consumer Id was 0 - ignoring this notification" ) ; } else { if ( conversation == null ) { // Not a lot we can do here . So FFDC , but do not register // that we have notified this client about the termination // of this ME NullPointerException npe = new NullPointerException ( "No conversation was located" ) ; FFDCFilter . processException ( npe , CLASS_NAME + ".sendMeNotificationEvent" , CommsConstants . SERVERSICORECONNECTIONLISTENER_MEN_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , npe . getMessage ( ) , npe ) ; } else if ( sess != null && consumerId == - 1 ) { // This exception is generated purely for FFDC SIResourceException e = new SIResourceException ( "Unable to locate consumer session" ) ; FFDCFilter . processException ( e , CLASS_NAME + ".sendMeNotificationEvent" , CommsConstants . SERVERSICORECONNECTIONLISTENER_MEN_01 , this ) ; // d192146 if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , e . getMessage ( ) , e ) ; } else { // Otherwise inform the client ConversationState convState = ( ConversationState ) conversation . getAttachment ( ) ; CommsByteBuffer buffer = CommsByteBufferPool . getInstance ( ) . allocate ( ) ; buffer . putShort ( convState . getConnectionObjectId ( ) ) ; // BIT16 ConnectionObjectId buffer . putShort ( eventId ) ; // BIT16 Event Id if ( sess != null ) { buffer . putShort ( consumerId ) ; buffer . putException ( t , null , conversation ) ; } // in case if event id is EVENTID _ ME _ QUIESCING or EVENTID _ ME _ TERMINATED then dont block the thread because // this may function may get executed in the context of SCR thread and should not get blocked . . // but it is very rare foo this function ( i . e send ) d to get blocked . . however those rare cases can be // avoided by setting the ThrottlingPolicy to DISCARD _ TRANSMISSION . In the event of ME termination / Quiescing , this should // be harmless if ( eventId == CommsConstants . EVENTID_ME_TERMINATED || eventId == CommsConstants . EVENTID_ME_QUIESCING ) { conversation . send ( buffer , JFapChannelConstants . SEG_EVENT_OCCURRED , 0 , JFapChannelConstants . PRIORITY_HIGH , true , ThrottlingPolicy . DISCARD_TRANSMISSION , null ) ; } else { conversation . send ( buffer , JFapChannelConstants . SEG_EVENT_OCCURRED , 0 , JFapChannelConstants . PRIORITY_HIGH , true , ThrottlingPolicy . BLOCK_THREAD , null ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendMeNotificationEvent" ) ;
public class InstanceValidator { /** * public API */ private void checkCoding ( List < ValidationMessage > errors , String path , WrapperElement focus , Coding fixed ) { } }
checkFixedValue ( errors , path + ".system" , focus . getNamedChild ( "system" ) , fixed . getSystemElement ( ) , "system" ) ; checkFixedValue ( errors , path + ".code" , focus . getNamedChild ( "code" ) , fixed . getCodeElement ( ) , "code" ) ; checkFixedValue ( errors , path + ".display" , focus . getNamedChild ( "display" ) , fixed . getDisplayElement ( ) , "display" ) ; checkFixedValue ( errors , path + ".userSelected" , focus . getNamedChild ( "userSelected" ) , fixed . getUserSelectedElement ( ) , "userSelected" ) ;
public class CodecInfo { /** * Gets the codec info from terms . * @ param t * the t * @ return the codec info from terms * @ throws IOException * Signals that an I / O exception has occurred . */ @ SuppressWarnings ( "unchecked" ) public static CodecInfo getCodecInfoFromTerms ( Terms t ) throws IOException { } }
try { HashMap < String , IndexInput > indexInputList = null ; HashMap < String , Long > indexInputOffsetList = null ; Object version = null ; Method [ ] methods = t . getClass ( ) . getMethods ( ) ; Object [ ] emptyArgs = null ; for ( Method m : methods ) { if ( m . getName ( ) . equals ( "getIndexInputList" ) ) { indexInputList = ( HashMap < String , IndexInput > ) m . invoke ( t , emptyArgs ) ; } else if ( m . getName ( ) . equals ( "getIndexInputOffsetList" ) ) { indexInputOffsetList = ( HashMap < String , Long > ) m . invoke ( t , emptyArgs ) ; } else if ( m . getName ( ) . equals ( "getVersion" ) ) { version = m . invoke ( t , emptyArgs ) ; } } if ( indexInputList == null || indexInputOffsetList == null || version == null ) { throw new IOException ( "Reader doesn't provide MtasFieldsProducer" ) ; } else { return new CodecInfo ( indexInputList , indexInputOffsetList , ( int ) version ) ; } } catch ( IllegalAccessException | InvocationTargetException e ) { throw new IOException ( "Can't get codecInfo" , e ) ; }
public class CCTotalizer { /** * Builds an at - least - k constraint . * @ param result the result * @ param vars the variables * @ param rhs the right - hand side * @ throws IllegalArgumentException if the right hand side of the constraint was negative */ void buildALK ( final EncodingResult result , final Variable [ ] vars , int rhs ) { } }
final LNGVector < Variable > cardinalityOutvars = this . initializeConstraint ( result , vars ) ; this . incData = new CCIncrementalData ( result , CCConfig . ALK_ENCODER . TOTALIZER , rhs , vars . length , cardinalityOutvars ) ; this . toCNF ( cardinalityOutvars , rhs , Bound . LOWER ) ; assert this . cardinalityInvars . size ( ) == 0 ; for ( int i = 0 ; i < rhs ; i ++ ) this . result . addClause ( cardinalityOutvars . get ( i ) ) ;
public class DefaultExpander { /** * Expand constructions like rules and queries * @ param drl * @ return */ private StringBuffer expandConstructions ( final String drl ) { } }
// display keys if requested if ( showKeyword ) { for ( DSLMappingEntry entry : this . keywords ) { logger . info ( "keyword: " + entry . getMappingKey ( ) ) ; logger . info ( " " + entry . getKeyPattern ( ) ) ; } } if ( showWhen ) { for ( DSLMappingEntry entry : this . condition ) { logger . info ( "when: " + entry . getMappingKey ( ) ) ; logger . info ( " " + entry . getKeyPattern ( ) ) ; // logger . info ( " " + entry . getValuePattern ( ) ) ; } } if ( showThen ) { for ( DSLMappingEntry entry : this . consequence ) { logger . info ( "then: " + entry . getMappingKey ( ) ) ; logger . info ( " " + entry . getKeyPattern ( ) ) ; } } // parse and expand specific areas final Matcher m = finder . matcher ( drl ) ; final StringBuffer buf = new StringBuffer ( ) ; int drlPos = 0 ; int linecount = 0 ; while ( m . find ( ) ) { final StringBuilder expanded = new StringBuilder ( ) ; int newPos = m . start ( ) ; linecount += countNewlines ( drl , drlPos , newPos ) ; drlPos = newPos ; String constr = m . group ( ) . trim ( ) ; if ( constr . startsWith ( "rule" ) ) { String headerFragment = m . group ( 1 ) ; expanded . append ( headerFragment ) ; // adding rule header and attributes String lhsFragment = m . group ( 2 ) ; expanded . append ( this . expandLHS ( lhsFragment , linecount + countNewlines ( drl , drlPos , m . start ( 2 ) ) + 1 ) ) ; String thenFragment = m . group ( 3 ) ; expanded . append ( thenFragment ) ; // adding " then " header String rhsFragment = this . expandRHS ( m . group ( 4 ) , linecount + countNewlines ( drl , drlPos , m . start ( 4 ) ) + 1 ) ; expanded . append ( rhsFragment ) ; expanded . append ( m . group ( 5 ) ) ; // adding rule trailer } else if ( constr . startsWith ( "query" ) ) { String fragment = m . group ( 6 ) ; expanded . append ( fragment ) ; // adding query header and attributes String lhsFragment = this . expandLHS ( m . group ( 7 ) , linecount + countNewlines ( drl , drlPos , m . start ( 7 ) ) + 1 ) ; expanded . append ( lhsFragment ) ; expanded . append ( m . group ( 8 ) ) ; // adding query trailer } else { // strange behavior this . addError ( new ExpanderException ( "Unable to expand statement: " + constr , 0 ) ) ; } m . appendReplacement ( buf , Matcher . quoteReplacement ( expanded . toString ( ) ) ) ; } m . appendTail ( buf ) ; return buf ;
public class GrailsLocaleUtils { /** * Add the locale suffix if the message resource bundle file exists . * @ param messageBundlePath * the message resource bundle path * @ param availableLocaleSuffixes * the list of available locale suffix to update * @ param locale * the locale to check . * @ param fileSuffix * the file suffix * @ param rsReader * the grails servlet context resource reader */ private static void addSuffixIfAvailable ( String messageBundlePath , List < String > availableLocaleSuffixes , Locale locale , String fileSuffix , GrailsServletContextResourceReader rsReader ) { } }
String localMsgResourcePath = toBundleName ( messageBundlePath , locale ) + fileSuffix ; boolean isFileSystemResourcePath = rsReader . isFileSystemPath ( localMsgResourcePath ) ; boolean resourceFound = false ; String path = localMsgResourcePath ; if ( ! isFileSystemResourcePath ) { // Try to retrieve the resource from the servlet context ( used in war mode ) path = WEB_INF_DIR + localMsgResourcePath ; } InputStream is = null ; try { is = rsReader . getResourceAsStream ( path ) ; resourceFound = is != null ; } finally { IOUtils . close ( is ) ; } if ( resourceFound ) { String suffix = localMsgResourcePath . substring ( messageBundlePath . length ( ) ) ; if ( suffix . length ( ) > 0 ) { if ( suffix . length ( ) == fileSuffix . length ( ) ) { suffix = "" ; } else { // remove the " _ " before the suffix " _ en _ US " = > " en _ US " suffix = suffix . substring ( 1 , suffix . length ( ) - fileSuffix . length ( ) ) ; } } availableLocaleSuffixes . add ( suffix ) ; }
public class ScriptResourceUtil { /** * Execute a process directly with some arguments * @ param logger logger * @ param workingdir working dir * @ param scriptfile file * @ param scriptargs arguments to the shell * @ param scriptargsarr * @ param envContext Environment variable context * @ param newDataContext context data to replace in the scriptargs * @ param logName name of plugin to use in logging * @ return process */ static Process execScript ( final Logger logger , final File workingdir , final File scriptfile , final String scriptargs , final String [ ] scriptargsarr , final Map < String , Map < String , String > > envContext , final Map < String , Map < String , String > > newDataContext , final String logName ) throws IOException { } }
ExecParams execArgs = buildExecParams ( scriptfile , scriptargs , scriptargsarr , envContext , newDataContext ) ; String [ ] args = execArgs . getArgs ( ) ; String [ ] envarr = execArgs . getEnvarr ( ) ; final Runtime runtime = Runtime . getRuntime ( ) ; logger . info ( "[" + logName + "] executing: " + StringArrayUtil . asString ( args , " " ) ) ; return runtime . exec ( args , envarr , workingdir ) ;
public class IdleConnectionPool { /** * Attempts to remove a connection from the pool . If a connection is available * for the specified endpoint descriptor , it is removed from the pool and returned . * If no suitable connection is present , a value of null is returened . * @ param descriptor An endpoint descriptor which specifies the remote host to which * the connection returned must be connected . * @ return A connection , if present in the pool , otherwise null . */ public synchronized OutboundConnection remove ( EndPointDescriptor descriptor ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "remove" , descriptor ) ; final LinkedList connectionList = descriptorToConnectionListMap . get ( descriptor ) ; OutboundConnection connection = null ; if ( ( connectionList != null ) && ( connectionList . size ( ) > 0 ) ) { // Remove the first entry from the list . Object [ ] connEntry = ( Object [ ] ) connectionList . removeFirst ( ) ; connection = ( OutboundConnection ) connEntry [ 0 ] ; // Invalidate it ' s alarm , so that we don ' t try and close // the connection because it has been idle for too long . ( ( AlarmValid ) connEntry [ 1 ] ) . invalidate ( ) ; // Reset the link level state so that the state does not // survive the pooling process . This may be null if on z / OS and the // WMQRA or MEP function are the only users of the connection . final LinkLevelState lls = ( LinkLevelState ) connection . getAttachment ( ) ; if ( lls != null ) lls . reset ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "remove" , connection ) ; return connection ;
public class SerializedCheckpointData { /** * De - serializes an array of SerializedCheckpointData back into an ArrayDeque of element checkpoints . * @ param data The data to be deserialized . * @ param serializer The serializer used to deserialize the data . * @ param < T > The type of the elements . * @ return An ArrayDeque of element checkpoints . * @ throws IOException Thrown , if the serialization fails . */ public static < T > ArrayDeque < Tuple2 < Long , Set < T > > > toDeque ( SerializedCheckpointData [ ] data , TypeSerializer < T > serializer ) throws IOException { } }
ArrayDeque < Tuple2 < Long , Set < T > > > deque = new ArrayDeque < > ( data . length ) ; DataInputDeserializer deser = null ; for ( SerializedCheckpointData checkpoint : data ) { byte [ ] serializedData = checkpoint . getSerializedData ( ) ; if ( deser == null ) { deser = new DataInputDeserializer ( serializedData , 0 , serializedData . length ) ; } else { deser . setBuffer ( serializedData ) ; } final Set < T > ids = new HashSet < > ( checkpoint . getNumIds ( ) ) ; final int numIds = checkpoint . getNumIds ( ) ; for ( int i = 0 ; i < numIds ; i ++ ) { ids . add ( serializer . deserialize ( deser ) ) ; } deque . addLast ( new Tuple2 < Long , Set < T > > ( checkpoint . checkpointId , ids ) ) ; } return deque ;
public class XMLEncoder { /** * Writes the specified character . If the character is non - printable in * this encoding , then it will be escaped . * It is safe for this method to assume that the specified character does not need to be escaped * unless the encoding does not support the character . * @ param out the character stream to write to , not < code > null < / code > . * @ param c the character to be written . * @ throws InvalidXMLException if the specified text contains an invalid character . * @ throws IOException if an I / O error occurs . * @ deprecated Deprecated since XMLenc 0.51 . Use the text method * { @ link # text ( Writer , char , boolean ) } instead . */ @ Deprecated public void text ( Writer out , char c ) throws InvalidXMLException , IOException { } }
if ( c >= 63 && c <= 127 || c >= 39 && c <= 59 || c >= 32 && c <= 37 || c == 38 || c > 127 && ! _sevenBitEncoding || c == 10 || c == 13 || c == 61 || c == 9 ) { out . write ( c ) ; } else { if ( c == 60 ) { out . write ( ESC_LESS_THAN , 0 , 4 ) ; } else if ( c == 62 ) { out . write ( ESC_GREATER_THAN , 0 , 4 ) ; } else if ( c > 127 ) { out . write ( AMPERSAND_HASH , 0 , 2 ) ; out . write ( Integer . toString ( c ) ) ; out . write ( ';' ) ; } else { throw new InvalidXMLException ( "The character 0x" + Integer . toHexString ( c ) + " is not valid." ) ; } }
public class CUGSubscriptionImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . primitives . MAPAsnPrimitive # encodeData ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encodeData ( AsnOutputStream asnOs ) throws MAPException { } }
if ( this . cugInterlock == null ) throw new MAPException ( "Error while encoding " + _PrimitiveName + ": cugInterlock required." ) ; if ( this . intraCugOptions == null ) throw new MAPException ( "Error while encoding " + _PrimitiveName + ": intraCugOptions required." ) ; try { asnOs . writeInteger ( this . cugIndex ) ; ( ( CUGInterlockImpl ) this . cugInterlock ) . encodeAll ( asnOs ) ; asnOs . writeInteger ( Tag . CLASS_UNIVERSAL , Tag . ENUMERATED , this . intraCugOptions . getCode ( ) ) ; if ( this . basicService != null ) { if ( this . basicService . size ( ) < 1 || this . basicService . size ( ) > 32 ) { throw new MAPException ( "Error while encoding " + _PrimitiveName + ".basicService: basicServiceGroupList must be from 1 to 32 size, found: " + this . basicService . size ( ) ) ; } asnOs . writeTag ( Tag . CLASS_UNIVERSAL , false , Tag . SEQUENCE ) ; int pos = asnOs . StartContentDefiniteLength ( ) ; for ( ExtBasicServiceCode be : this . basicService ) { ExtBasicServiceCodeImpl bee = ( ExtBasicServiceCodeImpl ) be ; bee . encodeAll ( asnOs ) ; } asnOs . FinalizeContent ( pos ) ; } if ( this . extensionContainer != null ) ( ( MAPExtensionContainerImpl ) this . extensionContainer ) . encodeAll ( asnOs , Tag . CLASS_CONTEXT_SPECIFIC , _TAG_extensionContainer ) ; } catch ( IOException e ) { throw new MAPException ( "IOException when encoding " + _PrimitiveName + ": " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new MAPException ( "AsnException when encoding " + _PrimitiveName + ": " + e . getMessage ( ) , e ) ; }
public class Timex3 { /** * getter for timexMod - gets * @ generated * @ return value of the feature */ public String getTimexMod ( ) { } }
if ( Timex3_Type . featOkTst && ( ( Timex3_Type ) jcasType ) . casFeat_timexMod == null ) jcasType . jcas . throwFeatMissing ( "timexMod" , "de.unihd.dbs.uima.types.heideltime.Timex3" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( Timex3_Type ) jcasType ) . casFeatCode_timexMod ) ;
public class BlogConnectionFactory { /** * Create a connection to a blog server . * @ param type Connection type , must be " atom " or " metaweblog " * @ param url End - point URL to connect to * @ param username Username for login to blog server * @ param password Password for login to blog server */ public static BlogConnection getBlogConnection ( final String type , final String url , final String username , final String password ) throws BlogClientException { } }
BlogConnection blogConnection = null ; if ( type == null || type . equals ( "metaweblog" ) ) { blogConnection = createBlogConnection ( METAWEBLOG_IMPL_CLASS , url , username , password ) ; } else if ( type . equals ( "atom" ) ) { blogConnection = createBlogConnection ( ATOMPROTOCOL_IMPL_CLASS , url , username , password ) ; } else { throw new BlogClientException ( "Type must be 'atom' or 'metaweblog'" ) ; } return blogConnection ;
public class OAuthProviderProcessingFilter { /** * The allowed set of HTTP methods . * @ param allowedMethods The allowed set of methods . */ public void setAllowedMethods ( List < String > allowedMethods ) { } }
this . allowedMethods . clear ( ) ; if ( allowedMethods != null ) { for ( String allowedMethod : allowedMethods ) { this . allowedMethods . add ( allowedMethod . toUpperCase ( ) ) ; } }
public class Las { /** * Checks of nativ libs are available . * @ return < code > true < / code > , if native liblas connection is available . */ public static boolean supportsNative ( ) { } }
if ( ! testedLibLoading ) { LiblasJNALibrary wrapper = LiblasWrapper . getWrapper ( ) ; if ( wrapper != null ) { isNativeLibAvailable = true ; } testedLibLoading = true ; } return isNativeLibAvailable ;
public class GetExecutionHistoryRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetExecutionHistoryRequest getExecutionHistoryRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getExecutionHistoryRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getExecutionHistoryRequest . getExecutionArn ( ) , EXECUTIONARN_BINDING ) ; protocolMarshaller . marshall ( getExecutionHistoryRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( getExecutionHistoryRequest . getReverseOrder ( ) , REVERSEORDER_BINDING ) ; protocolMarshaller . marshall ( getExecutionHistoryRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MonteCarloIntegrator { /** * / * ( non - Javadoc ) * @ see net . finmath . integration . AbstractRealIntegral # integrate ( java . util . function . DoubleUnaryOperator ) */ @ Override public double integrate ( DoubleUnaryOperator integrand ) { } }
double lowerBound = getLowerBound ( ) ; double upperBound = getUpperBound ( ) ; double range = upperBound - lowerBound ; // Create random number sequence generator ( we use MersenneTwister ) MersenneTwister mersenneTwister = new MersenneTwister ( seed ) ; double integral = 0.0 ; for ( int i = 0 ; i < numberOfEvaluationPoints ; i ++ ) { integral += integrand . applyAsDouble ( lowerBound + mersenneTwister . nextDouble ( ) * range ) ; } return integral * range / numberOfEvaluationPoints ;
public class OrientResourceAuthorizationStrategy { /** * Check that current user has access to all mentioned resources * @ param resources { @ link RequiredOrientResource } s to check * @ param action { @ link Action } to check for * @ return true if access is allowed */ public boolean checkResources ( RequiredOrientResource [ ] resources , Action action ) { } }
for ( int i = 0 ; i < resources . length ; i ++ ) { RequiredOrientResource requiredOrientResource = resources [ i ] ; if ( ! checkResource ( requiredOrientResource , action ) ) return false ; } return true ;
public class NetUtil { /** * 创建 { @ link InetSocketAddress } * @ param host 域名或IP地址 , 空表示任意地址 * @ param port 端口 , 0表示系统分配临时端口 * @ return { @ link InetSocketAddress } * @ since 3.3.0 */ public static InetSocketAddress createAddress ( String host , int port ) { } }
if ( StrUtil . isBlank ( host ) ) { return new InetSocketAddress ( port ) ; } return new InetSocketAddress ( host , port ) ;
public class URIUtils { /** * Encode a URI path . * @ param path The path the encode * @ param buf StringBuilder to encode path into ( or null ) * @ return The StringBuilder or null if no substitutions required . */ private static StringBuilder encodePath ( StringBuilder buf , String path , int offset ) { } }
byte [ ] bytes = null ; if ( buf == null ) { loop : for ( int i = offset ; i < path . length ( ) ; i ++ ) { char c = path . charAt ( i ) ; switch ( c ) { case '%' : case '?' : case ';' : case '#' : case '"' : case '\'' : case '<' : case '>' : case ' ' : case '[' : case '\\' : case ']' : case '^' : case '`' : case '{' : case '|' : case '}' : buf = new StringBuilder ( path . length ( ) * 2 ) ; break loop ; default : if ( c > 127 ) { bytes = path . getBytes ( URIUtils . __CHARSET ) ; buf = new StringBuilder ( path . length ( ) * 2 ) ; break loop ; } } } if ( buf == null ) return null ; } int i ; loop : for ( i = offset ; i < path . length ( ) ; i ++ ) { char c = path . charAt ( i ) ; switch ( c ) { case '%' : buf . append ( "%25" ) ; continue ; case '?' : buf . append ( "%3F" ) ; continue ; case ';' : buf . append ( "%3B" ) ; continue ; case '#' : buf . append ( "%23" ) ; continue ; case '"' : buf . append ( "%22" ) ; continue ; case '\'' : buf . append ( "%27" ) ; continue ; case '<' : buf . append ( "%3C" ) ; continue ; case '>' : buf . append ( "%3E" ) ; continue ; case ' ' : buf . append ( "%20" ) ; continue ; case '[' : buf . append ( "%5B" ) ; continue ; case '\\' : buf . append ( "%5C" ) ; continue ; case ']' : buf . append ( "%5D" ) ; continue ; case '^' : buf . append ( "%5E" ) ; continue ; case '`' : buf . append ( "%60" ) ; continue ; case '{' : buf . append ( "%7B" ) ; continue ; case '|' : buf . append ( "%7C" ) ; continue ; case '}' : buf . append ( "%7D" ) ; continue ; default : if ( c > 127 ) { bytes = path . getBytes ( URIUtils . __CHARSET ) ; break loop ; } buf . append ( c ) ; } } if ( bytes != null ) { for ( ; i < bytes . length ; i ++ ) { byte c = bytes [ i ] ; switch ( c ) { case '%' : buf . append ( "%25" ) ; continue ; case '?' : buf . append ( "%3F" ) ; continue ; case ';' : buf . append ( "%3B" ) ; continue ; case '#' : buf . append ( "%23" ) ; continue ; case '"' : buf . append ( "%22" ) ; continue ; case '\'' : buf . append ( "%27" ) ; continue ; case '<' : buf . append ( "%3C" ) ; continue ; case '>' : buf . append ( "%3E" ) ; continue ; case ' ' : buf . append ( "%20" ) ; continue ; case '[' : buf . append ( "%5B" ) ; continue ; case '\\' : buf . append ( "%5C" ) ; continue ; case ']' : buf . append ( "%5D" ) ; continue ; case '^' : buf . append ( "%5E" ) ; continue ; case '`' : buf . append ( "%60" ) ; continue ; case '{' : buf . append ( "%7B" ) ; continue ; case '|' : buf . append ( "%7C" ) ; continue ; case '}' : buf . append ( "%7D" ) ; continue ; default : if ( c < 0 ) { buf . append ( '%' ) ; TypeUtils . toHex ( c , buf ) ; } else buf . append ( ( char ) c ) ; } } } return buf ;
public class AbstractSamlObjectBuilder { /** * New attribute value . * @ param value the value * @ param valueType the value type * @ param elementName the element name * @ return the xS string */ protected XMLObject newAttributeValue ( final Object value , final String valueType , final QName elementName ) { } }
if ( XSString . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSStringBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSString . TYPE_NAME ) ; attrValueObj . setValue ( value . toString ( ) ) ; return attrValueObj ; } if ( XSURI . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSURIBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSURI . TYPE_NAME ) ; attrValueObj . setValue ( value . toString ( ) ) ; return attrValueObj ; } if ( XSBoolean . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSBooleanBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSBoolean . TYPE_NAME ) ; attrValueObj . setValue ( XSBooleanValue . valueOf ( value . toString ( ) . toLowerCase ( ) ) ) ; return attrValueObj ; } if ( XSInteger . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSIntegerBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSInteger . TYPE_NAME ) ; attrValueObj . setValue ( Integer . valueOf ( value . toString ( ) ) ) ; return attrValueObj ; } if ( XSDateTime . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSDateTimeBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSDateTime . TYPE_NAME ) ; attrValueObj . setValue ( DateTime . parse ( value . toString ( ) ) ) ; return attrValueObj ; } if ( XSBase64Binary . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val builder = new XSBase64BinaryBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName , XSBase64Binary . TYPE_NAME ) ; attrValueObj . setValue ( value . toString ( ) ) ; return attrValueObj ; } if ( XSObject . class . getSimpleName ( ) . equalsIgnoreCase ( valueType ) ) { val mapper = new JacksonXmlSerializer ( ) ; val builder = new XSAnyBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName ) ; attrValueObj . setTextContent ( mapper . writeValueAsString ( value ) ) ; return attrValueObj ; } val builder = new XSAnyBuilder ( ) ; val attrValueObj = builder . buildObject ( elementName ) ; attrValueObj . setTextContent ( value . toString ( ) ) ; return attrValueObj ;
public class ServerImpl { /** * Remove transport service from accounting collection and notify of complete shutdown if * necessary . * @ param transport service to remove */ private void transportClosed ( ServerTransport transport ) { } }
synchronized ( lock ) { if ( ! transports . remove ( transport ) ) { throw new AssertionError ( "Transport already removed" ) ; } channelz . removeServerSocket ( ServerImpl . this , transport ) ; checkForTermination ( ) ; }
public class ObjIterator { /** * Returns an infinite { @ code ObjIterator } . * @ param supplier * @ return */ public static < T > ObjIterator < T > generate ( final Supplier < T > supplier ) { } }
N . checkArgNotNull ( supplier ) ; return new ObjIterator < T > ( ) { @ Override public boolean hasNext ( ) { return true ; } @ Override public T next ( ) { return supplier . get ( ) ; } } ;
public class StreamUtils { /** * Check if the content of a ByteBuffer is binary * @ param head ByteBuffer to check * @ return true if the content is binary ( Non - white Control Characters ) */ public static boolean isNonWhitespaceControlCharacter ( @ NonNull ByteBuffer head ) { } }
boolean hasControlCharacter = false ; while ( head . hasRemaining ( ) ) { hasControlCharacter |= isNonWhitespaceControlCharacter ( head . get ( ) ) ; } return hasControlCharacter ;
public class ClassUseWriter { /** * Generate the class use list . */ protected void generateClassUseFile ( ) throws IOException { } }
HtmlTree body = getClassUseHeader ( ) ; HtmlTree div = new HtmlTree ( HtmlTag . DIV ) ; div . addStyle ( HtmlStyle . classUseContainer ) ; if ( pkgSet . size ( ) > 0 ) { addClassUse ( div ) ; } else { div . addContent ( getResource ( "doclet.ClassUse_No.usage.of.0" , classdoc . qualifiedName ( ) ) ) ; } if ( configuration . allowTag ( HtmlTag . MAIN ) ) { mainTree . addContent ( div ) ; body . addContent ( mainTree ) ; } else { body . addContent ( div ) ; } HtmlTree htmlTree = ( configuration . allowTag ( HtmlTag . FOOTER ) ) ? HtmlTree . FOOTER ( ) : body ; addNavLinks ( false , htmlTree ) ; addBottom ( htmlTree ) ; if ( configuration . allowTag ( HtmlTag . FOOTER ) ) { body . addContent ( htmlTree ) ; } printHtmlDocument ( null , true , body ) ;
public class SqlSessionTxAdvice { /** * Flushes batch statements and commits database connection . * @ param force forces connection commit */ public void commit ( boolean force ) { } }
if ( checkSession ( ) ) { return ; } if ( log . isDebugEnabled ( ) ) { ToStringBuilder tsb = new ToStringBuilder ( String . format ( "Committing transactional %s@%x" , sqlSession . getClass ( ) . getSimpleName ( ) , sqlSession . hashCode ( ) ) ) ; tsb . append ( "force" , force ) ; log . debug ( tsb . toString ( ) ) ; } sqlSession . commit ( force ) ;
public class CharsetDetector { /** * Get the names of all charsets supported by < code > CharsetDetector < / code > class . * < b > Note : < / b > Multiple different charset encodings in a same family may use * a single shared name in this implementation . For example , this method returns * an array including " ISO - 8859-1 " ( ISO Latin 1 ) , but not including " windows - 1252" * ( Windows Latin 1 ) . However , actual detection result could be " windows - 1252" * when the input data matches Latin 1 code points with any points only available * in " windows - 1252 " . * @ return an array of the names of all charsets supported by * < code > CharsetDetector < / code > class . */ public static String [ ] getAllDetectableCharsets ( ) { } }
String [ ] allCharsetNames = new String [ ALL_CS_RECOGNIZERS . size ( ) ] ; for ( int i = 0 ; i < allCharsetNames . length ; i ++ ) { allCharsetNames [ i ] = ALL_CS_RECOGNIZERS . get ( i ) . recognizer . getName ( ) ; } return allCharsetNames ;
public class CredentialStoreEntryUrl { /** * Get Resource Url for StoreCredentials * @ return String Resource Url */ public static MozuUrl storeCredentialsUrl ( ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/platform/extensions/credentialStore/" ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class ChainedProperty { /** * Appends the chained property formatted as " name . subname . subsubname " . * This format is parseable only if the chain is composed of valid * many - to - one joins . */ public void appendTo ( Appendable app ) throws IOException { } }
appendPropTo ( app , mPrime . getName ( ) , isOuterJoin ( 0 ) ) ; StorableProperty < ? > [ ] chain = mChain ; if ( chain != null ) { for ( int i = 0 ; i < chain . length ; i ++ ) { app . append ( '.' ) ; appendPropTo ( app , chain [ i ] . getName ( ) , isOuterJoin ( i + 1 ) ) ; } }
public class FilterJoinVisitor { /** * Checks for an action failure */ private void checkForFailure ( FilterJoinNode node ) { } }
if ( node . hasFailure ( ) ) { logger . error ( "Node processing failed: {}" , node . getFailure ( ) ) ; throw new ElasticsearchException ( "Unexpected failure while processing a node" , node . getFailure ( ) ) ; }
public class TcpConnector { /** * Opens a connection to the end point specified in the event . * @ param event the event */ @ Handler public void onOpenConnection ( OpenTcpConnection event ) { } }
try { SocketChannel socketChannel = SocketChannel . open ( event . address ( ) ) ; channels . add ( new TcpChannelImpl ( socketChannel ) ) ; } catch ( ConnectException e ) { fire ( new ConnectError ( event , "Connection refused." , e ) ) ; } catch ( IOException e ) { fire ( new ConnectError ( event , "Failed to open TCP connection." , e ) ) ; }
public class UnboundTypeReference { /** * Returns true if the existing hints would allow to resolve to the given reference . */ public boolean canResolveTo ( LightweightTypeReference reference ) { } }
if ( internalIsResolved ( ) ) return reference . isAssignableFrom ( resolvedTo , new TypeConformanceComputationArgument ( false , true , true , true , false , false /* TODO do we need to support synonmys here ? */ ) ) ; List < LightweightBoundTypeArgument > hints = getAllHints ( ) ; if ( ! hints . isEmpty ( ) && hasSignificantHints ( hints ) ) { return canResolveTo ( reference , hints ) ; } return false ;
public class UpdateEndpointGroupRequest { /** * The list of endpoint objects . * @ param endpointConfigurations * The list of endpoint objects . */ public void setEndpointConfigurations ( java . util . Collection < EndpointConfiguration > endpointConfigurations ) { } }
if ( endpointConfigurations == null ) { this . endpointConfigurations = null ; return ; } this . endpointConfigurations = new java . util . ArrayList < EndpointConfiguration > ( endpointConfigurations ) ;
public class BitmapUtil { /** * Clips the long edge of a bitmap , if its width and height are not equal , in order to transform * it into a square . Additionally , the bitmap is resized to a specific size and a border will be * added . * @ param bitmap * The bitmap , which should be clipped , as an instance of the class { @ link Bitmap } . The * bitmap may not be null * @ param size * The size , the bitmap should be resized to , as an { @ link Integer } value in pixels . The * size must be at least 1 * @ param borderWidth * The width of the border as an { @ link Integer } value in pixels . The width must be at * least 0 * @ param borderColor * The color of the border as an { @ link Integer } value * @ return The clipped bitmap as an instance of the class { @ link Bitmap } */ public static Bitmap clipSquare ( @ NonNull final Bitmap bitmap , final int size , final int borderWidth , @ ColorInt final int borderColor ) { } }
Condition . INSTANCE . ensureAtLeast ( borderWidth , 0 , "The border width must be at least 0" ) ; Bitmap clippedBitmap = clipSquare ( bitmap , size ) ; Bitmap result = Bitmap . createBitmap ( clippedBitmap . getWidth ( ) , clippedBitmap . getHeight ( ) , Bitmap . Config . ARGB_8888 ) ; Canvas canvas = new Canvas ( result ) ; float offset = borderWidth / 2.0f ; Rect src = new Rect ( 0 , 0 , clippedBitmap . getWidth ( ) , clippedBitmap . getHeight ( ) ) ; RectF dst = new RectF ( offset , offset , result . getWidth ( ) - offset , result . getHeight ( ) - offset ) ; canvas . drawBitmap ( clippedBitmap , src , dst , null ) ; if ( borderWidth > 0 && Color . alpha ( borderColor ) != 0 ) { Paint paint = new Paint ( ) ; paint . setFilterBitmap ( false ) ; paint . setStyle ( Paint . Style . STROKE ) ; paint . setStrokeWidth ( borderWidth ) ; paint . setColor ( borderColor ) ; offset = borderWidth / 2.0f ; RectF bounds = new RectF ( offset , offset , result . getWidth ( ) - offset , result . getWidth ( ) - offset ) ; canvas . drawRect ( bounds , paint ) ; } return result ;
public class Actor { /** * Ask TaskActor for result * @ param ref ActorRef of task * @ param callback callback for ask * @ return Future */ public AskFuture ask ( ActorRef ref , AskCallback callback ) { } }
return askPattern . ask ( ref , 0 , callback ) ;
public class SystemProperties { /** * Gets the system property indicated by the specified key . * This behaves just like { @ link System # getProperty ( java . lang . String ) } , except that it * also consults the { @ link ServletContext } ' s " init " parameters . * @ param key the name of the system property . * @ return the string value of the system property , * or { @ code null } if there is no property with that key . * @ exception NullPointerException if { @ code key } is { @ code null } . * @ exception IllegalArgumentException if { @ code key } is empty . */ @ CheckForNull public static String getString ( String key ) { } }
String value = System . getProperty ( key ) ; // keep passing on any exceptions if ( value != null ) { if ( LOGGER . isLoggable ( Level . CONFIG ) ) { LOGGER . log ( Level . CONFIG , "Property (system): {0} => {1}" , new Object [ ] { key , value } ) ; } return value ; } value = handler . getString ( key ) ; if ( value != null ) { if ( LOGGER . isLoggable ( Level . CONFIG ) ) { LOGGER . log ( Level . CONFIG , "Property (context): {0} => {1}" , new Object [ ] { key , value } ) ; } return value ; } if ( LOGGER . isLoggable ( Level . CONFIG ) ) { LOGGER . log ( Level . CONFIG , "Property (not found): {0} => {1}" , new Object [ ] { key , value } ) ; } return null ;
public class BandedAffineAligner { /** * Classical Banded Alignment with affine gap scoring . * Both sequences must be highly similar . * Align 2 sequence completely ( i . e . while first sequence will be aligned against whole second sequence ) . * @ param scoring scoring system * @ param seq1 first sequence * @ param seq2 second sequence * @ param offset1 offset in first sequence * @ param length1 length of first sequence ' s part to be aligned * @ param offset2 offset in second sequence * @ param length2 length of second sequence ' s part to be aligned * @ param width width of banded alignment matrix . In other terms max allowed number of indels * @ param mutations mutations array where all mutations will be kept * @ param cache matrix cache */ public static int align0 ( final AffineGapAlignmentScoring < NucleotideSequence > scoring , final NucleotideSequence seq1 , final NucleotideSequence seq2 , final int offset1 , final int length1 , final int offset2 , final int length2 , final int width , final MutationsBuilder < NucleotideSequence > mutations , final MatrixCache cache ) { } }
if ( length1 == 0 && length2 == 0 ) return 0 ; int size1 = length1 + 1 , size2 = length2 + 1 ; cache . prepareMatrices ( size1 , size2 , width , scoring ) ; BandedMatrix main = cache . main ; BandedMatrix gapIn1 = cache . gapIn1 ; BandedMatrix gapIn2 = cache . gapIn2 ; int i , j ; int match , gap1 , gap2 , to ; final int gapExtensionPenalty = scoring . getGapExtensionPenalty ( ) ; for ( i = 0 ; i < length1 ; ++ i ) { to = Math . min ( i + main . getRowFactor ( ) - main . getColumnDelta ( ) + 1 , length2 ) ; for ( j = Math . max ( 0 , i - main . getColumnDelta ( ) ) ; j < to ; ++ j ) { match = main . get ( i , j ) + scoring . getScore ( seq1 . codeAt ( offset1 + i ) , seq2 . codeAt ( offset2 + j ) ) ; gap1 = Math . max ( main . get ( i + 1 , j ) + scoring . getGapOpenPenalty ( ) , gapIn1 . get ( i + 1 , j ) + gapExtensionPenalty ) ; gap2 = Math . max ( main . get ( i , j + 1 ) + scoring . getGapOpenPenalty ( ) , gapIn2 . get ( i , j + 1 ) + gapExtensionPenalty ) ; gapIn1 . set ( i + 1 , j + 1 , gap1 ) ; gapIn2 . set ( i + 1 , j + 1 , gap2 ) ; main . set ( i + 1 , j + 1 , Math . max ( match , Math . max ( gap1 , gap2 ) ) ) ; } } to = mutations . size ( ) ; i = length1 - 1 ; j = length2 - 1 ; int pScore = main . get ( i + 1 , j + 1 ) ; byte c1 , c2 ; boolean inGap1 = false , inGap2 = false ; while ( i >= 0 || j >= 0 ) { assert ! inGap2 || ! inGap1 ; if ( ! inGap1 && ( inGap2 || ( i >= 0 && pScore == gapIn2 . get ( i + 1 , j + 1 ) ) ) ) { inGap2 = false ; if ( pScore == gapIn2 . get ( i , j + 1 ) + gapExtensionPenalty ) { inGap2 = true ; pScore = gapIn2 . get ( i , j + 1 ) ; } else pScore = main . get ( i , j + 1 ) ; mutations . appendDeletion ( offset1 + i , seq1 . codeAt ( offset1 + i ) ) ; -- i ; } else if ( inGap1 || ( j >= 0 && pScore == gapIn1 . get ( i + 1 , j + 1 ) ) ) { inGap1 = false ; if ( pScore == gapIn1 . get ( i + 1 , j ) + gapExtensionPenalty ) { inGap1 = true ; pScore = gapIn1 . get ( i + 1 , j ) ; } else pScore = main . get ( i + 1 , j ) ; mutations . appendInsertion ( offset1 + i + 1 , seq2 . codeAt ( offset2 + j ) ) ; -- j ; } else if ( i >= 0 && j >= 0 && pScore == main . get ( i , j ) + scoring . getScore ( c1 = seq1 . codeAt ( offset1 + i ) , c2 = seq2 . codeAt ( offset2 + j ) ) ) { pScore = main . get ( i , j ) ; if ( c1 != c2 ) mutations . appendSubstitution ( offset1 + i , c1 , c2 ) ; -- i ; -- j ; } else throw new RuntimeException ( ) ; } mutations . reverseRange ( to , mutations . size ( ) ) ; return main . get ( length1 , length2 ) ;
public class PTXImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < Triplet > getCS ( ) { } }
if ( cs == null ) { cs = new EObjectContainmentEList . Resolving < Triplet > ( Triplet . class , this , AfplibPackage . PTX__CS ) ; } return cs ;
public class Kernel { /** * Spawn an agent of the given type , and pass the parameters to its initialization function . * @ param agentID the identifier of the agent to spawn . If < code > null < / code > the identifier is randomly selected . * @ param agent the type of the agent to spawn . * @ param params the list of the parameters to pass to the agent initialization function . * @ return the identifier of the agent , never < code > null < / code > . */ public UUID spawn ( UUID agentID , Class < ? extends Agent > agent , Object ... params ) { } }
final List < UUID > ids = this . spawnService . spawn ( 1 , null , this . janusContext , agentID , agent , params ) ; if ( ids . isEmpty ( ) ) { return null ; } return ids . get ( 0 ) ;
public class XMLOutputter { /** * Writes end tags for elements on the stack until ( and including ) an * element that matches the specified name . * @ param type the type of the tag to end , not < code > null < / code > . * @ throws IllegalStateException if < code > getState ( ) ! = { @ link # START _ TAG _ OPEN } & amp ; & amp ; * getState ( ) ! = { @ link # WITHIN _ ELEMENT } < / code > * @ throws IllegalArgumentException if < code > type = = null < / code > . * @ throws NoSuchElementException if an element of the specified type could not be found on the stack . * @ throws IOException if an I / O error occurs ; this will set the state to { @ link # ERROR _ STATE } . * @ since XMLenc 0.53 */ public final void endTag ( String type ) throws IllegalStateException , NoSuchElementException , IOException { } }
// Check state if ( _state != XMLEventListenerStates . WITHIN_ELEMENT && _state != XMLEventListenerStates . START_TAG_OPEN ) { throw new IllegalStateException ( "getState() == " + _state ) ; } // Temporarily set the state to ERROR _ STATE . Unless an exception is // thrown in the write methods , it will be reset to a valid state . XMLEventListenerState oldState = _state ; _state = XMLEventListenerStates . ERROR_STATE ; String typeFound ; do { typeFound = _elementStack [ _elementStackSize - 1 ] ; // Write output if ( oldState == XMLEventListenerStates . START_TAG_OPEN ) { _out . write ( '/' ) ; closeStartTag ( ) ; } else { _out . write ( _lineBreakChars ) ; writeIndentation ( ) ; _out . write ( '<' ) ; _out . write ( '/' ) ; _out . write ( typeFound ) ; closeStartTag ( ) ; } _elementStackSize -- ; } while ( ! type . equals ( typeFound ) && _elementStackSize > 0 ) ; // Make sure the element was indeed found if ( ! type . equals ( typeFound ) ) { throw new NoSuchElementException ( "No element of type \"" + type + "\" was found on the stack of open elements." ) ; } // Change the state if ( _elementStackSize == 0 ) { _state = XMLEventListenerStates . AFTER_ROOT_ELEMENT ; } else { _state = XMLEventListenerStates . WITHIN_ELEMENT ; } // State has changed , check checkInvariants ( ) ;
public class Codecs { /** * Create a codec , which creates a a mapping from the elements given in the * { @ code source } sequence to the elements given in the { @ code target } * sequence . The returned mapping can be seen as a function which maps every * element of the { @ code target } set to an element of the { @ code source } set . * < pre > { @ code * final ISeq < Integer > numbers = ISeq . of ( 1 , 2 , 3 , 4 , 5 ) ; * final ISeq < String > strings = ISeq . of ( " 1 " , " 2 " , " 3 " ) ; * final Codec < Map < Integer , String > , EnumGene < Integer > > codec = * Codecs . ofMapping ( numbers , strings , HashMap : : new ) ; * } < / pre > * If { @ code source . size ( ) > target . size ( ) } , the created mapping is * < a href = " https : / / en . wikipedia . org / wiki / Surjective _ function " > surjective < / a > , * if { @ code source . size ( ) < target . size ( ) } , the mapping is * < a href = " https : / / en . wikipedia . org / wiki / Injective _ function " > injective < / a > * and if both sets have the same size , the returned mapping is * < a href = " https : / / en . wikipedia . org / wiki / Bijection " > bijective < / a > . * @ since 4.3 * @ param source the source elements . Will be the < em > keys < / em > of the * encoded { @ code Map } . * @ param target the target elements . Will be the < em > values < / em > of the * encoded { @ code Map } . * @ param mapSupplier a function which returns a new , empty Map into which * the mapping will be inserted * @ param < A > the type of the source elements * @ param < B > the type of the target elements * @ param < M > the type of the encoded Map * @ return a new mapping codec * @ throws IllegalArgumentException if the { @ code target } sequences are empty * @ throws NullPointerException if one of the argument is { @ code null } */ public static < A , B , M extends Map < A , B > > Codec < M , EnumGene < Integer > > ofMapping ( final ISeq < ? extends A > source , final ISeq < ? extends B > target , final Supplier < M > mapSupplier ) { } }
requireNonNull ( mapSupplier ) ; return ofPermutation ( target . size ( ) ) . map ( perm -> toMapping ( perm , source , target , mapSupplier ) ) ;
public class Converter { /** * Get the first converter for this field ( must be linked to at least one ScreenComponent for this to work ) . * @ return The converter object ( or this if there is none ) . */ public Converter getFieldConverter ( ) { } }
for ( int i = 0 ; ; i ++ ) { Object ojbComponent = this . getField ( ) . getComponent ( i ) ; if ( ojbComponent == null ) break ; if ( ojbComponent instanceof ScreenComponent ) { Converter conv = ( Converter ) ( ( ScreenComponent ) ojbComponent ) . getConverter ( ) ; if ( ( conv != null ) && ( conv . getField ( ) == this . getField ( ) ) ) return conv ; // Since there is a converter in front of this field , return the converter } } return this ;
public class MapBasedXPathFunctionResolver { /** * Add all functions from the other function resolver into this resolver . * @ param aOther * The function resolver to import the functions from . May not be * < code > null < / code > . * @ param bOverwrite * if < code > true < / code > existing functions will be overwritten with the * new functions , otherwise the old functions are kept . * @ return { @ link EChange } */ @ Nonnull public EChange addAllFrom ( @ Nonnull final MapBasedXPathFunctionResolver aOther , final boolean bOverwrite ) { } }
ValueEnforcer . notNull ( aOther , "Other" ) ; EChange eChange = EChange . UNCHANGED ; for ( final Map . Entry < XPathFunctionKey , XPathFunction > aEntry : aOther . m_aMap . entrySet ( ) ) if ( bOverwrite || ! m_aMap . containsKey ( aEntry . getKey ( ) ) ) { m_aMap . put ( aEntry . getKey ( ) , aEntry . getValue ( ) ) ; eChange = EChange . CHANGED ; } return eChange ;
public class CmsMultiCheckBox { /** * Initializes the widget given a map of select options . < p > * The keys of the map are the values of the select options , while the values of the map * are the labels which should be used for the checkboxes . * @ param items the map of select options */ protected void init ( Map < String , String > items ) { } }
initWidget ( m_panel ) ; m_items = new LinkedHashMap < String , String > ( items ) ; m_panel . setStyleName ( I_CmsInputLayoutBundle . INSTANCE . inputCss ( ) . multiCheckBox ( ) ) ; m_panel . addStyleName ( I_CmsLayoutBundle . INSTANCE . generalCss ( ) . textMedium ( ) ) ; FocusHandler focusHandler = new FocusHandler ( ) { public void onFocus ( FocusEvent event ) { CmsDomUtil . fireFocusEvent ( CmsMultiCheckBox . this ) ; } } ; for ( Map . Entry < String , String > entry : items . entrySet ( ) ) { String value = entry . getValue ( ) ; CmsCheckBox checkbox = new CmsCheckBox ( value ) ; // wrap the check boxes in FlowPanels to arrange them vertically FlowPanel checkboxWrapper = new FlowPanel ( ) ; checkboxWrapper . add ( checkbox ) ; checkbox . addValueChangeHandler ( new ValueChangeHandler < Boolean > ( ) { public void onValueChange ( ValueChangeEvent < Boolean > valueChanged ) { fireValueChanged ( getFormValueAsString ( ) ) ; } } ) ; checkbox . getButton ( ) . addFocusHandler ( focusHandler ) ; m_panel . add ( checkboxWrapper ) ; m_checkboxes . add ( checkbox ) ; } m_panel . add ( m_error ) ;