signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ExecutionChain { /** * Set a callback to handle any exceptions in the chain of execution .
* @ param callback
* Instance of { @ link ErrorCallback } .
* @ return Reference to the { @ code ExecutionChain }
* @ throws IllegalStateException
* if the chain of execution has already been { @ link # execute ( )
* started } . */
public ExecutionChain setErrorCallback ( ErrorCallback callback ) { } } | if ( state . get ( ) == State . RUNNING ) { throw new IllegalStateException ( "Invalid while ExecutionChain is running" ) ; } errorCallback = callback ; return this ; |
public class ImageInlineUtils { /** * Checks if inlining mode is allowed on the provided element .
* @ param img
* the image element to check if the actual inlining mode is
* allowed
* @ param mode
* the actual mode
* @ return true if this mode is allowed , false otherwise */
public static boolean isInlineModeAllowed ( Element img , InlineMode mode ) { } } | // if already inlined = > reject ( do not inline twice )
if ( ! img . attr ( INLINED_ATTR ) . isEmpty ( ) ) { return false ; } // if inline mode defined but not the wanted mode = > reject
if ( ! img . attr ( INLINE_MODE_ATTR ) . isEmpty ( ) && ! img . attr ( INLINE_MODE_ATTR ) . equals ( mode . mode ( ) ) ) { return false ; } // if inline mode defined and matches the wanted mode = > allow
// if no inline mode defined = > allow ( any mode allowed )
return true ; |
public class transformaction { /** * Use this API to add transformaction resources . */
public static base_responses add ( nitro_service client , transformaction resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { transformaction addresources [ ] = new transformaction [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new transformaction ( ) ; addresources [ i ] . name = resources [ i ] . name ; addresources [ i ] . profilename = resources [ i ] . profilename ; addresources [ i ] . priority = resources [ i ] . priority ; addresources [ i ] . state = resources [ i ] . state ; } result = add_bulk_request ( client , addresources ) ; } return result ; |
public class cmppolicylabel { /** * Use this API to fetch cmppolicylabel resource of given name . */
public static cmppolicylabel get ( nitro_service service , String labelname ) throws Exception { } } | cmppolicylabel obj = new cmppolicylabel ( ) ; obj . set_labelname ( labelname ) ; cmppolicylabel response = ( cmppolicylabel ) obj . get_resource ( service ) ; return response ; |
public class ArrayRankDouble { /** * Get the index position of maximum value the given array
* @ param array an array
* @ returnindex of the max value in array */
public int getMaxValueIndex ( double [ ] array ) { } } | int index = 0 ; double max = Integer . MIN_VALUE ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( array [ i ] > max ) { max = array [ i ] ; index = i ; } } return index ; |
public class FieldType { /** * If we have a class Foo with a collection of Bar ' s then we go through Bar ' s DAO looking for a Foo field . We need
* this field to build the query that is able to find all Bar ' s that have foo _ id that matches our id . */
private FieldType findForeignFieldType ( Class < ? > clazz , Class < ? > foreignClass , Dao < ? , ? > foreignDao ) throws SQLException { } } | String foreignColumnName = fieldConfig . getForeignCollectionForeignFieldName ( ) ; for ( FieldType fieldType : foreignDao . getTableInfo ( ) . getFieldTypes ( ) ) { if ( fieldType . getType ( ) == foreignClass && ( foreignColumnName == null || fieldType . getField ( ) . getName ( ) . equals ( foreignColumnName ) ) ) { if ( ! fieldType . fieldConfig . isForeign ( ) && ! fieldType . fieldConfig . isForeignAutoRefresh ( ) ) { // this may never be reached
throw new SQLException ( "Foreign collection object " + clazz + " for field '" + field . getName ( ) + "' contains a field of class " + foreignClass + " but it's not foreign" ) ; } return fieldType ; } } // build our complex error message
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Foreign collection class " ) . append ( clazz . getName ( ) ) ; sb . append ( " for field '" ) . append ( field . getName ( ) ) . append ( "' column-name does not contain a foreign field" ) ; if ( foreignColumnName != null ) { sb . append ( " named '" ) . append ( foreignColumnName ) . append ( '\'' ) ; } sb . append ( " of class " ) . append ( foreignClass . getName ( ) ) ; throw new SQLException ( sb . toString ( ) ) ; |
public class BlockInlineChecksumReader { /** * Return the generation stamp from the name of the block file . */
static long getGenerationStampFromInlineChecksumFile ( String blockName ) throws IOException { } } | String [ ] vals = StringUtils . split ( blockName , '_' ) ; if ( vals . length != 6 ) { // blk , blkid , genstamp , version , checksumtype , byte per checksum
throw new IOException ( "unidentified block name format: " + blockName ) ; } return Long . parseLong ( vals [ 2 ] ) ; |
public class FilterMatcher { /** * Check that two expressions are " equivalent " in the sense of commutative operators and CVE / PVE .
* TODO : most of the code here can be / will be substituted with equivalence relation on AbstractExpression when
* ENG - 14181 is merged .
* @ return whether two expressions match */
public boolean match ( ) { } } | if ( m_expr1 == null || m_expr2 == null ) { return m_expr1 == m_expr2 ; } else if ( m_expr1 . getExpressionType ( ) != m_expr2 . getExpressionType ( ) ) { // Exception to the rule :
// 1 . CVE and PVE need to be translated before compared .
// 2 . Comparisons could be reversed , e . g . " a > = b " and " b < = a " are the same relation
return valueConstantsMatch ( m_expr1 , m_expr2 ) || revComparisonsMatch ( m_expr1 , m_expr2 ) ; } else if ( m_expr1 instanceof ConstantValueExpression ) { return m_expr1 . equals ( m_expr2 ) ; } else if ( m_expr1 instanceof TupleValueExpression ) { return tvesMatch ( ( TupleValueExpression ) m_expr1 , ( TupleValueExpression ) m_expr2 ) ; } else if ( m_expr1 instanceof VectorValueExpression ) { // VVE does not have left / right .
return vectorsMatch ( ( VectorValueExpression ) m_expr1 , ( VectorValueExpression ) m_expr2 ) ; } else if ( EXCHANGEABLE_EXPRESSIONS . contains ( m_expr1 . getExpressionType ( ) ) ) { return ( argsMatch ( m_expr1 , m_expr2 ) && childrenMatch ( m_expr1 , m_expr2 ) ) || ( new FilterMatcher ( m_expr1 . getLeft ( ) , m_expr2 . getRight ( ) ) . match ( ) && new FilterMatcher ( m_expr1 . getRight ( ) , m_expr2 . getLeft ( ) ) . match ( ) ) ; } else { return argsMatch ( m_expr1 , m_expr2 ) && childrenMatch ( m_expr1 , m_expr2 ) ; } |
public class StreamingWorkbook { /** * Not supported */
@ Override public int addOlePackage ( byte [ ] bytes , String s , String s1 , String s2 ) throws IOException { } } | throw new UnsupportedOperationException ( ) ; |
public class Filter { /** * - - select helper methods */
private Object [ ] compile ( String path ) { } } | List < String > lst ; lst = Filesystem . SEPARATOR . split ( path ) ; if ( lst . size ( ) == 0 ) { throw new IllegalArgumentException ( "empty path: " + path ) ; } if ( lst . get ( 0 ) . equals ( "" ) ) { throw new IllegalArgumentException ( "absolute path not allowed: " + path ) ; } if ( lst . get ( lst . size ( ) - 1 ) . equals ( "" ) ) { throw new IllegalArgumentException ( "path must not end with separator: " + path ) ; } return compileTail ( lst , 0 ) ; |
public class WhiteboxImpl { /** * Convenience method to get a field from a class type .
* The method will first try to look for a declared field in the same class .
* If the method is not declared in this class it will look for the field in
* the super class . This will continue throughout the whole class hierarchy .
* If the field is not found an { @ link IllegalArgumentException } is thrown .
* @ param type The type of the class where the method is located .
* @ param fieldName The method names .
* @ return A . */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) public static Field getField ( Class < ? > type , String fieldName ) { LinkedList < Class < ? > > examine = new LinkedList < Class < ? > > ( ) ; examine . add ( type ) ; Set < Class < ? > > done = new HashSet < Class < ? > > ( ) ; while ( ! examine . isEmpty ( ) ) { Class < ? > thisType = examine . removeFirst ( ) ; done . add ( thisType ) ; final Field [ ] declaredField = thisType . getDeclaredFields ( ) ; for ( Field field : declaredField ) { if ( fieldName . equals ( field . getName ( ) ) ) { field . setAccessible ( true ) ; return field ; } } Set < Class < ? > > potential = new HashSet < Class < ? > > ( ) ; final Class < ? > clazz = thisType . getSuperclass ( ) ; if ( clazz != null ) { potential . add ( thisType . getSuperclass ( ) ) ; } potential . addAll ( ( Collection ) Arrays . asList ( thisType . getInterfaces ( ) ) ) ; potential . removeAll ( done ) ; examine . addAll ( potential ) ; } throwExceptionIfFieldWasNotFound ( type , fieldName , null ) ; return null ; |
public class Cache { /** * Implementation of the eviction strategy . */
protected synchronized void evictStaleEntries ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { int size = primaryTable . size ( ) + secondaryTable . size ( ) + tertiaryTable . size ( ) ; Tr . debug ( tc , "The current cache size is " + size + "( " + primaryTable . size ( ) + ", " + secondaryTable . size ( ) + ", " + tertiaryTable . size ( ) + ")" ) ; } tertiaryTable = secondaryTable ; secondaryTable = primaryTable ; primaryTable = Collections . synchronizedMap ( new BoundedHashMap ( this . entryLimit ) ) ; |
public class SimpleXMLParser { /** * Parses the XML document firing the events to the handler .
* @ param doc the document handler
* @ param r the document . The encoding is already resolved . The reader is not closed
* @ throws IOException on error */
public static void parse ( SimpleXMLDocHandler doc , SimpleXMLDocHandlerComment comment , Reader r , boolean html ) throws IOException { } } | SimpleXMLParser parser = new SimpleXMLParser ( doc , comment , html ) ; parser . go ( r ) ; |
public class Instantiators { /** * Creates a converter for { @ code typeLiteral } . */
public static < T > Converter < T > createConverter ( TypeLiteral < T > typeLiteral , InstantiatorModule ... modules ) { } } | return createConverterForType ( typeLiteral . getType ( ) , modules ) ; |
public class RegularPactTask { /** * Utility function that composes a string for logging purposes . The string includes the given message ,
* the given name of the task and the index in its subtask group as well as the number of instances
* that exist in its subtask group .
* @ param message The main message for the log .
* @ param taskName The name of the task .
* @ param parent The nephele task that contains the code producing the message .
* @ return The string for logging . */
public static String constructLogString ( String message , String taskName , AbstractInvokable parent ) { } } | return message + ": " + taskName + " (" + ( parent . getEnvironment ( ) . getIndexInSubtaskGroup ( ) + 1 ) + '/' + parent . getEnvironment ( ) . getCurrentNumberOfSubtasks ( ) + ')' ; |
public class FormMetaData { /** * Find a PropertyBinding with the given bean ( Java Bean Name ) property .
* @ param beanPropName The name of the bean property .
* @ return The PropertyBinding
* @ throws IllegalArgumentException if the PropertyBinding is not found . */
public PropertyBinding findAttribute ( String beanPropName ) { } } | for ( PropertyBinding attrib : baseAttributes ) { if ( attrib . getJavaName ( ) . equals ( beanPropName ) ) return attrib ; } for ( Map . Entry < String , List < PropertyBinding > > entry : groupedAttributes . entrySet ( ) ) { for ( PropertyBinding attrib : entry . getValue ( ) ) { if ( attrib . getJavaName ( ) . equals ( beanPropName ) ) return attrib ; } } throw new IllegalArgumentException ( "Unknown Attribute with beanPropName name " + beanPropName ) ; |
public class StorageAccountsInner { /** * Failover request can be triggered for a storage account in case of availability issues . The failover occurs from the storage account ' s primary cluster to secondary cluster for RA - GRS accounts . The secondary cluster will become primary after failover .
* @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive .
* @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < ServiceResponse < Void > > failoverWithServiceResponseAsync ( String resourceGroupName , String accountName ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Observable < Response < ResponseBody > > observable = service . failover ( resourceGroupName , accountName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPostOrDeleteResultAsync ( observable , new LongRunningOperationOptions ( ) . withFinalStateVia ( LongRunningFinalState . LOCATION ) , new TypeToken < Void > ( ) { } . getType ( ) ) ; |
public class SessionManagementBeanImpl { /** * The following is intended to run ON the IO thread */
@ Override public void setUserPrincipals ( Map < String , String > userPrincipals ) { } } | this . userPrincipals = userPrincipals ; this . serviceManagementBean . addUserPrincipals ( session , userPrincipals ) ; |
public class RefreshRate { /** * Maximum is 24 hrs */
public static int fromHours ( int hours ) { } } | int refreshRate = hours * RefreshRate . SECONDS_PER_HOUR ; RefreshRate . validateRefreshRate ( refreshRate ) ; return refreshRate ; |
public class CoronaJobHistory { /** * Log finish time of map task attempt .
* @ param taskAttemptId task attempt id
* @ param finishTime finish time
* @ param hostName host name
* @ param taskType Whether the attempt is cleanup or setup or map
* @ param stateString state string of the task attempt
* @ param counter counters of the task attempt */
public void logMapTaskFinished ( TaskAttemptID taskAttemptId , long finishTime , String hostName , String taskType , String stateString , Counters counter ) { } } | if ( disableHistory ) { return ; } JobID id = taskAttemptId . getJobID ( ) ; if ( ! this . jobId . equals ( id ) ) { throw new RuntimeException ( "JobId from task: " + id + " does not match expected: " + jobId ) ; } if ( null != writers ) { log ( writers , RecordTypes . MapAttempt , new Keys [ ] { Keys . TASK_TYPE , Keys . TASKID , Keys . TASK_ATTEMPT_ID , Keys . TASK_STATUS , Keys . FINISH_TIME , Keys . HOSTNAME , Keys . STATE_STRING , Keys . COUNTERS } , new String [ ] { taskType , taskAttemptId . getTaskID ( ) . toString ( ) , taskAttemptId . toString ( ) , Values . SUCCESS . name ( ) , String . valueOf ( finishTime ) , hostName , stateString , counter . makeEscapedCompactString ( ) } ) ; } |
public class HttpMediaType { /** * Sets the media parameter to the specified value .
* @ param name case - insensitive name of the parameter
* @ param value value of the parameter or { @ code null } to remove */
public HttpMediaType setParameter ( String name , String value ) { } } | if ( value == null ) { removeParameter ( name ) ; return this ; } Preconditions . checkArgument ( TOKEN_REGEX . matcher ( name ) . matches ( ) , "Name contains reserved characters" ) ; cachedBuildResult = null ; parameters . put ( name . toLowerCase ( Locale . US ) , value ) ; return this ; |
public class RtfListTable { /** * Writes the list and list override tables . */
public void writeDefinition ( final OutputStream result ) throws IOException { } } | result . write ( OPEN_GROUP ) ; result . write ( LIST_TABLE ) ; this . document . outputDebugLinebreak ( result ) ; for ( int i = 0 ; i < picturelists . size ( ) ; i ++ ) { RtfPictureList l = ( RtfPictureList ) picturelists . get ( i ) ; // l . setID ( document . getRandomInt ( ) ) ;
l . writeDefinition ( result ) ; this . document . outputDebugLinebreak ( result ) ; } for ( int i = 0 ; i < lists . size ( ) ; i ++ ) { RtfList l = ( RtfList ) lists . get ( i ) ; l . setID ( document . getRandomInt ( ) ) ; l . writeDefinition ( result ) ; this . document . outputDebugLinebreak ( result ) ; } result . write ( CLOSE_GROUP ) ; this . document . outputDebugLinebreak ( result ) ; result . write ( OPEN_GROUP ) ; result . write ( LIST_OVERRIDE_TABLE ) ; this . document . outputDebugLinebreak ( result ) ; // list override index values are 1 - based , not 0.
// valid list override index values \ ls are 1 to 2000.
// if there are more then 2000 lists , the result is undefined .
for ( int i = 0 ; i < lists . size ( ) ; i ++ ) { result . write ( OPEN_GROUP ) ; result . write ( LIST_OVERRIDE ) ; result . write ( RtfList . LIST_ID ) ; result . write ( intToByteArray ( ( ( RtfList ) lists . get ( i ) ) . getID ( ) ) ) ; result . write ( LIST_OVERRIDE_COUNT ) ; result . write ( intToByteArray ( 0 ) ) ; // is this correct ? Spec says valid values are 1 or 9.
result . write ( RtfList . LIST_NUMBER ) ; result . write ( intToByteArray ( ( ( RtfList ) lists . get ( i ) ) . getListNumber ( ) ) ) ; result . write ( CLOSE_GROUP ) ; this . document . outputDebugLinebreak ( result ) ; } result . write ( CLOSE_GROUP ) ; this . document . outputDebugLinebreak ( result ) ; |
public class CmsScheduledJobInfo { /** * Sets the context information for the user executing the job . < p >
* This will also " freeze " the context information that is set . < p >
* @ param contextInfo the context information for the user executing the job
* @ see CmsContextInfo # freeze ( ) */
public void setContextInfo ( CmsContextInfo contextInfo ) { } } | checkFrozen ( ) ; if ( contextInfo == null ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_BAD_CONTEXT_INFO_0 ) ) ; } m_context = contextInfo ; |
public class FibonacciHeap { /** * Consolidate : Make sure each root tree has a distinct degree . */
@ SuppressWarnings ( "unchecked" ) private void consolidate ( ) { } } | int maxDegree = - 1 ; // for each node in root list
int numRoots = roots ; Node < K , V > x = minRoot ; while ( numRoots > 0 ) { Node < K , V > nextX = x . next ; int d = x . degree ; while ( true ) { Node < K , V > y = aux [ d ] ; if ( y == null ) { break ; } // make sure x ' s key is smaller
int c ; if ( comparator == null ) { c = ( ( Comparable < ? super K > ) y . key ) . compareTo ( x . key ) ; } else { c = comparator . compare ( y . key , x . key ) ; } if ( c < 0 ) { Node < K , V > tmp = x ; x = y ; y = tmp ; } // make y a child of x
link ( y , x ) ; aux [ d ] = null ; d ++ ; } // store result
aux [ d ] = x ; // keep track of max degree
if ( d > maxDegree ) { maxDegree = d ; } // advance
x = nextX ; numRoots -- ; } // recreate root list and find minimum root
minRoot = null ; roots = 0 ; for ( int i = 0 ; i <= maxDegree ; i ++ ) { if ( aux [ i ] != null ) { addToRootList ( aux [ i ] ) ; aux [ i ] = null ; } } |
public class WavefrontNamingConvention { /** * Valid characters are : a - z , A - Z , 0-9 , hyphen ( " - " ) , underscore ( " _ " ) , dot ( " . " ) . Forward slash ( " / " ) and comma
* ( " , " ) are allowed if metricName is enclosed in double quotes . */
@ Override public String name ( String name , Meter . Type type , @ Nullable String baseUnit ) { } } | String sanitizedName = NAME_CLEANUP_PATTERN . matcher ( delegate . name ( name , type , baseUnit ) ) . replaceAll ( "_" ) ; // add name prefix if prefix exists
if ( namePrefix != null ) { return namePrefix + "." + sanitizedName ; } return sanitizedName ; |
public class Strings { /** * Checks if each target element is empty and uses either target element ,
* or if the target element is empty uses { @ code defaultValue } .
* @ param target the set of values that to be checked if is null or empty
* If non - String objects , toString ( ) will be called .
* @ param defaultValue value to return if target is empty
* If non - String objects , toString ( ) will be called .
* @ return a { @ code Set < String > } with the result of
* { @ link # defaultString ( Object , Object ) } for each element of the target .
* @ since 2.1.3 */
public Set < String > setDefaultString ( final Set < ? > target , final Object defaultValue ) { } } | if ( target == null ) { return null ; } final Set < String > result = new LinkedHashSet < String > ( target . size ( ) + 2 ) ; for ( final Object element : target ) { result . add ( defaultString ( element , defaultValue ) ) ; } return result ; |
public class JsonUtils { /** * Parses a JSON - LD document from the given { @ link Reader } to an object that
* can be used as input for the { @ link JsonLdApi } and
* { @ link JsonLdProcessor } methods .
* @ param reader
* The JSON - LD document in a Reader .
* @ return A JSON Object .
* @ throws JsonParseException
* If there was a JSON related error during parsing .
* @ throws IOException
* If there was an IO error during parsing . */
public static Object fromReader ( Reader reader ) throws IOException { } } | final JsonParser jp = JSON_FACTORY . createParser ( reader ) ; return fromJsonParser ( jp ) ; |
public class ProjectiveInitializeAllCommon { /** * Must call if you change configurations . */
public void fixate ( ) { } } | ransac = FactoryMultiViewRobust . trifocalRansac ( configTriRansac , configError , configRansac ) ; sba = FactoryMultiView . bundleSparseProjective ( configSBA ) ; |
public class AbstractMongoDAO { /** * runs a map - reduce - job on the collection . The functions are read from the classpath in the folder mongodb . The systems reads them from
* files called & lt ; name & gt ; . map . js , & lt ; name & gt ; . reduce . js and optionally & lt ; name & gt ; . finalize . js . After this the result is converted
* using the given { @ link MapReduceResultHandler }
* @ param < R > the type of the result class
* @ param name the name of the map - reduce functions
* @ param query the query to filter the elements used for the map - reduce
* @ param sort sort query to sort elements before running map - reduce
* @ param scope the global scope for the JavaScript run
* @ param conv the converter to convert the result
* @ return an { @ link Iterable } with the result entries
* @ throws RuntimeException if resources cannot be read */
protected final < R > Iterable < R > mapReduce ( String name , DBObject query , DBObject sort , Map < String , Object > scope , final MapReduceResultHandler < R > conv ) { } } | return this . dataAccess . mapReduce ( name , query , sort , scope , conv ) ; |
public class OptimizerNode { /** * Causes this node to compute its output estimates ( such as number of rows , size in bytes )
* based on the inputs and the compiler hints . The compiler hints are instantiated with conservative
* default values which are used if no other values are provided . Nodes may access the statistics to
* determine relevant information .
* @ param statistics
* The statistics object which may be accessed to get statistical information .
* The parameter may be null , if no statistics are available . */
public void computeOutputEstimates ( DataStatistics statistics ) { } } | // sanity checking
for ( PactConnection c : getIncomingConnections ( ) ) { if ( c . getSource ( ) == null ) { throw new CompilerException ( "Bug: Estimate computation called before inputs have been set." ) ; } } // let every operator do its computation
computeOperatorSpecificDefaultEstimates ( statistics ) ; // overwrite default estimates with hints , if given
if ( getPactContract ( ) == null || getPactContract ( ) . getCompilerHints ( ) == null ) { return ; } CompilerHints hints = getPactContract ( ) . getCompilerHints ( ) ; if ( hints . getOutputSize ( ) >= 0 ) { this . estimatedOutputSize = hints . getOutputSize ( ) ; } if ( hints . getOutputCardinality ( ) >= 0 ) { this . estimatedNumRecords = hints . getOutputCardinality ( ) ; } if ( hints . getFilterFactor ( ) >= 0.0f ) { if ( this . estimatedNumRecords >= 0 ) { this . estimatedNumRecords = ( long ) ( this . estimatedNumRecords * hints . getFilterFactor ( ) ) ; if ( this . estimatedOutputSize >= 0 ) { this . estimatedOutputSize = ( long ) ( this . estimatedOutputSize * hints . getFilterFactor ( ) ) ; } } else if ( this instanceof SingleInputNode ) { OptimizerNode pred = ( ( SingleInputNode ) this ) . getPredecessorNode ( ) ; if ( pred != null && pred . getEstimatedNumRecords ( ) >= 0 ) { this . estimatedNumRecords = ( long ) ( pred . getEstimatedNumRecords ( ) * hints . getFilterFactor ( ) ) ; } } } // use the width to infer the cardinality ( given size ) and vice versa
if ( hints . getAvgOutputRecordSize ( ) >= 1 ) { // the estimated number of rows based on size
if ( this . estimatedNumRecords == - 1 && this . estimatedOutputSize >= 0 ) { this . estimatedNumRecords = ( long ) ( this . estimatedOutputSize / hints . getAvgOutputRecordSize ( ) ) ; } else if ( this . estimatedOutputSize == - 1 && this . estimatedNumRecords >= 0 ) { this . estimatedOutputSize = ( long ) ( this . estimatedNumRecords * hints . getAvgOutputRecordSize ( ) ) ; } } |
public class JSoupSeleneseParser { /** * processing table row */
private List < String > getCommand ( Element trNode ) { } } | List < String > result = new ArrayList < String > ( ) ; Elements trChildNodes = trNode . getElementsByTag ( "TD" ) ; for ( Element trChild : trChildNodes ) { result . add ( getTableDataValue ( trChild ) ) ; } if ( result . size ( ) != 1 && result . size ( ) != 3 ) { throw new RuntimeException ( "Something strange" ) ; // FIXME
} return result ; |
public class AmazonWorkLinkClient { /** * Signs the user out from all of their devices . The user can sign in again if they have valid credentials .
* @ param signOutUserRequest
* @ return Result of the SignOutUser operation returned by the service .
* @ throws UnauthorizedException
* You are not authorized to perform this action .
* @ throws InternalServerErrorException
* The service is temporarily unavailable .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws ResourceNotFoundException
* The requested resource was not found .
* @ throws TooManyRequestsException
* The number of requests exceeds the limit .
* @ sample AmazonWorkLink . SignOutUser
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / worklink - 2018-09-25 / SignOutUser " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public SignOutUserResult signOutUser ( SignOutUserRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeSignOutUser ( request ) ; |
public class StunAttributeFactory { /** * Create a ChannelNumberAttribute .
* @ param channelNumber
* channel number
* @ return newly created ChannelNumberAttribute */
public static ChannelNumberAttribute createChannelNumberAttribute ( char channelNumber ) { } } | ChannelNumberAttribute attribute = new ChannelNumberAttribute ( ) ; attribute . setChannelNumber ( channelNumber ) ; return attribute ; |
public class MediaHttpUploader { /** * Executes a direct media upload or resumable media upload conforming to the specifications
* listed < a href = ' https : / / developers . google . com / api - client - library / java / google - api - java - client / media - upload ' > here . < / a >
* This method is not reentrant . A new instance of { @ link MediaHttpUploader } must be instantiated
* before upload called be called again .
* If an error is encountered during the request execution the caller is responsible for parsing
* the response correctly . For example for JSON errors :
* < pre >
* if ( ! response . isSuccessStatusCode ( ) ) {
* throw GoogleJsonResponseException . from ( jsonFactory , response ) ;
* < / pre >
* Callers should call { @ link HttpResponse # disconnect } when the returned HTTP response object is
* no longer needed . However , { @ link HttpResponse # disconnect } does not have to be called if the
* response stream is properly closed . Example usage :
* < pre >
* HttpResponse response = batch . upload ( initiationRequestUrl ) ;
* try {
* process the HTTP response object
* } finally {
* response . disconnect ( ) ;
* < / pre >
* @ param initiationRequestUrl The request URL where the initiation request will be sent
* @ return HTTP response */
public HttpResponse upload ( GenericUrl initiationRequestUrl ) throws IOException { } } | Preconditions . checkArgument ( uploadState == UploadState . NOT_STARTED ) ; if ( directUploadEnabled ) { return directUpload ( initiationRequestUrl ) ; } return resumableUpload ( initiationRequestUrl ) ; |
public class SimonDataGenerator { /** * Recursively Add many Simons for performances testing . */
private void addManySimons ( String prefix , int depth , int groupWidth , int leafWidth , int splitWidth ) { } } | if ( depth == 0 ) { // Generate Simons of type Stopwatch
final int sibblings = randomInt ( Math . min ( 1 , groupWidth / 2 ) , leafWidth ) ; for ( int i = 0 ; i < sibblings ; i ++ ) { String name = prefix + "." + ALPHABET . charAt ( i ) ; addStopwatchSplits ( SimonManager . getStopwatch ( name ) , splitWidth ) ; } } else { // Generate Simons of type Unknown
final int sibblings = randomInt ( Math . min ( 1 , groupWidth / 2 ) , groupWidth ) ; for ( int i = 0 ; i < sibblings ; i ++ ) { String name = prefix + "." + ALPHABET . charAt ( i ) ; addManySimons ( name , depth - 1 , groupWidth , leafWidth , splitWidth ) ; } } |
public class File { /** * Renames this file to { @ code newPath } . This operation is supported for both
* files and directories .
* < p > Many failures are possible . Some of the more likely failures include :
* < ul >
* < li > Write permission is required on the directories containing both the source and
* destination paths .
* < li > Search permission is required for all parents of both paths .
* < li > Both paths be on the same mount point . On Android , applications are most likely to hit
* this restriction when attempting to copy between internal storage and an SD card .
* < / ul >
* < p > Note that this method does < i > not < / i > throw { @ code IOException } on failure .
* Callers must check the return value .
* @ param newPath the new path .
* @ return true on success . */
public boolean renameTo ( File newPath ) { } } | try { Libcore . os . rename ( path , newPath . path ) ; return true ; } catch ( ErrnoException errnoException ) { return false ; } |
public class DefaultStreamingClient { /** * { @ inheritDoc } */
@ Override public InputStream getStreamAsync ( Face face , Interest interest , SegmentationType partitionMarker ) throws IOException { } } | return getStreamAsync ( face , interest , partitionMarker , new DefaultOnException ( ) ) ; |
public class AnycastOutputHandler { /** * Callback from a stream , that it has been flushed
* @ param stream */
public final void streamIsFlushed ( AOStream stream ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "streamIsFlushed" , stream ) ; // we schedule an asynchronous removal of the persistent data
synchronized ( streamTable ) { String key = SIMPUtils . getRemoteGetKey ( stream . getRemoteMEUuid ( ) , stream . getGatheringTargetDestUuid ( ) ) ; StreamInfo sinfo = streamTable . get ( key ) ; if ( ( sinfo != null ) && sinfo . streamId . equals ( stream . streamId ) ) { RemovePersistentStream update = null ; synchronized ( sinfo ) { // synchronized since reading sinfo . item
update = new RemovePersistentStream ( key , sinfo . streamId , sinfo . itemStream , sinfo . item ) ; } doEnqueueWork ( update ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "streamIsFlushed" ) ; |
public class FacebookRestClient { /** * Publishes a Mini - Feed story describing an action taken by the logged - in user , and
* publishes aggregating News Feed stories to their friends .
* Stories are identified as being combinable if they have matching templates and substituted values .
* @ param actorId deprecated .
* @ param titleTemplate markup ( up to 60 chars , tags excluded ) for the feed story ' s title
* section . Must include the token < code > { actor } < / code > .
* @ param titleData ( optional ) contains token - substitution mappings for tokens that appear in
* titleTemplate . Should not contain mappings for the < code > { actor } < / code > or
* < code > { target } < / code > tokens . Required if tokens other than < code > { actor } < / code >
* or < code > { target } < / code > appear in the titleTemplate .
* @ param bodyTemplate ( optional ) markup to be displayed in the feed story ' s body section .
* can include tokens , of the form < code > { token } < / code > , to be substituted using
* bodyData .
* @ param bodyData ( optional ) contains token - substitution mappings for tokens that appear in
* bodyTemplate . Required if the bodyTemplate contains tokens other than < code > { actor } < / code >
* and < code > { target } < / code > .
* @ param bodyGeneral ( optional ) additional body markup that is not aggregated . If multiple instances
* of this templated story are combined together , the markup in the bodyGeneral of
* one of their stories may be displayed .
* @ param targetIds The user ids of friends of the actor , used for stories about a direct action between
* the actor and these targets of his / her action . Required if either the titleTemplate or bodyTemplate
* includes the token < code > { target } < / code > .
* @ param images ( optional ) additional body markup that is not aggregated . If multiple instances
* of this templated story are combined together , the markup in the bodyGeneral of
* one of their stories may be displayed .
* @ return whether the action story was successfully published ; false in case
* of a permission error
* @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Feed . publishTemplatizedAction " >
* Developers Wiki : Feed . publishTemplatizedAction < / a >
* @ see < a href = " http : / / developers . facebook . com / tools . php ? feed " >
* Developers Resources : Feed Preview Console < / a >
* @ deprecated since 01/18/2008 */
public boolean feed_publishTemplatizedAction ( Integer actorId , CharSequence titleTemplate , Map < String , CharSequence > titleData , CharSequence bodyTemplate , Map < String , CharSequence > bodyData , CharSequence bodyGeneral , Collection < Integer > targetIds , Collection < IFeedImage > images ) throws FacebookException , IOException { } } | return feed_publishTemplatizedAction ( titleTemplate , titleData , bodyTemplate , bodyData , bodyGeneral , targetIds , images , /* pageActorId */
null ) ; |
public class DeploymentImpl { /** * { @ inheritDoc } */
public boolean activate ( ) throws Exception { } } | if ( ! activated ) { if ( connectionFactories != null ) { for ( ConnectionFactory cf : connectionFactories ) { cf . activate ( ) ; } } if ( adminObjects != null ) { for ( AdminObject ao : adminObjects ) { ao . activate ( ) ; } } if ( resourceAdapter != null ) { resourceAdapter . activate ( ) ; } activated = true ; return true ; } return false ; |
public class Parser { /** * Return the word " connected " to cursor , the word ends at cursor position . Note that cursor position starts at 0
* @ param text to parse
* @ param cursor position
* @ return word connected to cursor */
public static String findCurrentWordFromCursor ( String text , int cursor ) { } } | if ( text . length ( ) <= cursor + 1 ) { // return last word
if ( text . contains ( SPACE ) ) { if ( doWordContainEscapedSpace ( text ) ) { if ( doWordContainOnlyEscapedSpace ( text ) ) return switchEscapedSpacesToSpacesInWord ( text ) ; else { return switchEscapedSpacesToSpacesInWord ( findEscapedSpaceWordCloseToEnd ( text ) ) ; } } else { if ( text . lastIndexOf ( SPACE ) >= cursor ) // cant use lastIndexOf
return text . substring ( text . substring ( 0 , cursor ) . lastIndexOf ( SPACE ) ) . trim ( ) ; else return text . substring ( text . lastIndexOf ( SPACE ) ) . trim ( ) ; } } else return text . trim ( ) ; } else { String rest ; if ( text . length ( ) > cursor + 1 ) rest = text . substring ( 0 , cursor + 1 ) ; else rest = text ; if ( doWordContainOnlyEscapedSpace ( rest ) ) { if ( cursor > 1 && text . charAt ( cursor ) == SPACE_CHAR && text . charAt ( cursor - 1 ) == SPACE_CHAR ) return "" ; else return switchEscapedSpacesToSpacesInWord ( rest ) ; } else { if ( cursor > 1 && text . charAt ( cursor ) == SPACE_CHAR && text . charAt ( cursor - 1 ) == SPACE_CHAR ) return "" ; // only if it contains a ' ' and its not at the end of the string
if ( rest . trim ( ) . contains ( SPACE ) ) return rest . substring ( rest . trim ( ) . lastIndexOf ( " " ) ) . trim ( ) ; else return rest . trim ( ) ; } } |
public class PingManager { /** * Pings the server . This method will return true if the server is reachable . It
* is the equivalent of calling < code > ping < / code > with the XMPP domain .
* Unlike the { @ link # ping ( Jid ) } case , this method will return true even if
* { @ link # isPingSupported ( Jid ) } is false .
* @ param notifyListeners Notify the PingFailedListener in case of error if true
* @ param pingTimeout The time to wait for a reply in milliseconds
* @ return true if the user ' s server could be pinged .
* @ throws NotConnectedException
* @ throws InterruptedException */
public boolean pingMyServer ( boolean notifyListeners , long pingTimeout ) throws NotConnectedException , InterruptedException { } } | boolean res ; try { res = ping ( connection ( ) . getXMPPServiceDomain ( ) , pingTimeout ) ; } catch ( NoResponseException e ) { res = false ; } if ( ! res && notifyListeners ) { for ( PingFailedListener l : pingFailedListeners ) l . pingFailed ( ) ; } return res ; |
public class PageSpec { /** * Returns an alphanumericly sorted list of names of all declared objects */
public List < String > getSortedObjectNames ( ) { } } | List < String > list = new ArrayList < > ( getObjects ( ) . keySet ( ) ) ; Collections . sort ( list , new AlphanumericComparator ( ) ) ; return list ; |
public class REST { /** * Set a proxy for REST - requests .
* @ param proxyHost
* @ param proxyPort */
public void setProxy ( String proxyHost , int proxyPort ) { } } | System . setProperty ( "http.proxySet" , "true" ) ; System . setProperty ( "http.proxyHost" , proxyHost ) ; System . setProperty ( "http.proxyPort" , "" + proxyPort ) ; System . setProperty ( "https.proxyHost" , proxyHost ) ; System . setProperty ( "https.proxyPort" , "" + proxyPort ) ; |
public class FctBnAccEntitiesProcessors { /** * < p > Get PrcAccEntrySave ( create and put into map ) . < / p >
* @ param pAddParam additional param
* @ return requested PrcAccEntrySave
* @ throws Exception - an exception */
protected final PrcAccEntrySave lazyGetPrcAccEntrySave ( final Map < String , Object > pAddParam ) throws Exception { } } | @ SuppressWarnings ( "unchecked" ) PrcAccEntrySave < RS > proc = ( PrcAccEntrySave < RS > ) this . processorsMap . get ( PrcAccEntrySave . class . getSimpleName ( ) ) ; if ( proc == null ) { proc = new PrcAccEntrySave < RS > ( ) ; proc . setSrvAccSettings ( getSrvAccSettings ( ) ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; proc . setSrvDatabase ( getSrvDatabase ( ) ) ; proc . setSrvBalance ( getSrvBalance ( ) ) ; // assigning fully initialized object :
this . processorsMap . put ( PrcAccEntrySave . class . getSimpleName ( ) , proc ) ; } return proc ; |
public class ViewCollections { /** * Apply { @ code value } to { @ code view } using { @ code property } . */
@ UiThread public static < T extends View , V > void set ( @ NonNull T view , @ NonNull Property < ? super T , V > setter , @ Nullable V value ) { } } | setter . set ( view , value ) ; |
public class DatatypeIdImpl { /** * Returns the WDTK datatype IRI for the property datatype as represented by
* the given JSON datatype string .
* @ param jsonDatatype
* the JSON datatype string ; case - sensitive
* @ throws IllegalArgumentException
* if the given datatype string is not known */
public static String getDatatypeIriFromJsonDatatype ( String jsonDatatype ) { } } | switch ( jsonDatatype ) { case JSON_DT_ITEM : return DT_ITEM ; case JSON_DT_PROPERTY : return DT_PROPERTY ; case JSON_DT_GLOBE_COORDINATES : return DT_GLOBE_COORDINATES ; case JSON_DT_URL : return DT_URL ; case JSON_DT_COMMONS_MEDIA : return DT_COMMONS_MEDIA ; case JSON_DT_TIME : return DT_TIME ; case JSON_DT_QUANTITY : return DT_QUANTITY ; case JSON_DT_STRING : return DT_STRING ; case JSON_DT_MONOLINGUAL_TEXT : return DT_MONOLINGUAL_TEXT ; default : if ( ! JSON_DATATYPE_PATTERN . matcher ( jsonDatatype ) . matches ( ) ) { throw new IllegalArgumentException ( "Invalid JSON datatype \"" + jsonDatatype + "\"" ) ; } String [ ] parts = jsonDatatype . split ( "-" ) ; for ( int i = 0 ; i < parts . length ; i ++ ) { parts [ i ] = StringUtils . capitalize ( parts [ i ] ) ; } return "http://wikiba.se/ontology#" + StringUtils . join ( parts ) ; } |
public class LocalDateTime { /** * Returns a copy of this datetime minus the specified number of months .
* This LocalDateTime instance is immutable and unaffected by this method call .
* The following three lines are identical in effect :
* < pre >
* LocalDateTime subtracted = dt . minusMonths ( 6 ) ;
* LocalDateTime subtracted = dt . minus ( Period . months ( 6 ) ) ;
* LocalDateTime subtracted = dt . withFieldAdded ( DurationFieldType . months ( ) , - 6 ) ;
* < / pre >
* @ param months the amount of months to subtract , may be negative
* @ return the new LocalDateTime minus the increased months */
public LocalDateTime minusMonths ( int months ) { } } | if ( months == 0 ) { return this ; } long instant = getChronology ( ) . months ( ) . subtract ( getLocalMillis ( ) , months ) ; return withLocalMillis ( instant ) ; |
public class JToggle { /** * Set the up indicator to display when the drawer indicator is not
* enabled .
* If you pass < code > null < / code > to this method , the default drawable from
* the theme will be used .
* @ param indicator A drawable to use for the up indicator , or null to use
* the theme ' s default
* @ see # setDrawerIndicatorEnabled ( boolean ) */
public void setHomeAsUpIndicator ( Drawable indicator ) { } } | if ( indicator == null ) { mHomeAsUpIndicator = getThemeUpIndicator ( ) ; mHasCustomUpIndicator = false ; } else { mHomeAsUpIndicator = indicator ; mHasCustomUpIndicator = true ; } if ( ! mDrawerIndicatorEnabled ) { setActionBarUpIndicator ( mHomeAsUpIndicator , 0 ) ; } |
public class CassandraSearcher { /** * Returns the set of resource ids that match the given
* boolean query .
* Separate clauses are performed with separate database queries and their
* results are joined in memory . */
private Set < String > searchForIds ( Context context , BooleanQuery query , ConsistencyLevel readConsistency ) { } } | Set < String > ids = Sets . newTreeSet ( ) ; for ( BooleanClause clause : query . getClauses ( ) ) { Set < String > subQueryIds ; Query subQuery = clause . getQuery ( ) ; if ( subQuery instanceof BooleanQuery ) { subQueryIds = searchForIds ( context , ( BooleanQuery ) subQuery , readConsistency ) ; } else if ( subQuery instanceof TermQuery ) { subQueryIds = searchForIds ( context , ( TermQuery ) subQuery , readConsistency ) ; } else { throw new IllegalStateException ( "Unsupported query: " + subQuery ) ; } switch ( clause . getOperator ( ) ) { case AND : // Intersect
ids . retainAll ( subQueryIds ) ; break ; case OR : // Union
ids . addAll ( subQueryIds ) ; break ; default : throw new IllegalStateException ( "Unsupported operator: " + clause . getOperator ( ) ) ; } } return ids ; |
public class JaspiServiceImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . webcontainer . security . JaspiService # postInvoke ( com . ibm . ws . webcontainer . security . WebSecurityContext ) */
@ Override public void postInvoke ( WebSecurityContext webSecurityContext ) throws AuthenticationException { } } | AuthStatus status = null ; if ( webSecurityContext != null ) { JaspiAuthContext jaspiContext = ( JaspiAuthContext ) webSecurityContext . getJaspiAuthContext ( ) ; if ( ! jaspiContext . runSecureResponse ( ) ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "postInvoke" , "skip secureResponse." ) ; return ; } MessageInfo msgInfo = ( MessageInfo ) jaspiContext . getMessageInfo ( ) ; ServerAuthContext authContext = ( ServerAuthContext ) jaspiContext . getServerAuthContext ( ) ; Subject serviceSubject = webSecurityContext . getReceivedSubject ( ) ; try { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "secureResponse with Jaspi" , new Object [ ] { "authContext=" + authContext , "serviceSubject=" + serviceSubject , msgInfo } ) ; status = authContext . secureResponse ( msgInfo , serviceSubject ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "secureResponse status: " + status ) ; // TODO Which reply or exception ?
if ( AuthStatus . SEND_SUCCESS != status && AuthStatus . SEND_FAILURE != status && AuthStatus . SEND_CONTINUE != status ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "secureResponse AuthStatus=" + status ) ; String msg = "Unexpected AuthStatus received during secureResponse() status=" + status + ", MessageInfo=" + msgInfo + ", ServerAuthContext=" + authContext ; throw new AuthenticationException ( msg ) ; } } catch ( AuthException e ) { /* * The runtime must perform whatever processing it requires to complete the processing of a request that failed after
* ( or during ) service invocation , and prior to communicating the invocation result to the client runtime . The runtime
* may send ( without calling secureResponse ) an appropriate response message of its choice . If a failure message is
* returned , it should indicate that the failure in request processing occurred after the service invocation . */
throw new AuthenticationException ( "JASPI authentication failed after invoking the requested target service." , e ) ; } } |
public class Style { /** * setter for styleName - sets the name of the style used .
* @ generated
* @ param v value to set into the feature */
public void setStyleName ( String v ) { } } | if ( Style_Type . featOkTst && ( ( Style_Type ) jcasType ) . casFeat_styleName == null ) jcasType . jcas . throwFeatMissing ( "styleName" , "de.julielab.jules.types.Style" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Style_Type ) jcasType ) . casFeatCode_styleName , v ) ; |
public class JobsInner { /** * Cancel Job .
* Cancel a Job .
* @ param resourceGroupName The name of the resource group within the Azure subscription .
* @ param accountName The Media Services account name .
* @ param transformName The Transform name .
* @ param jobName The Job name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > cancelJobAsync ( String resourceGroupName , String accountName , String transformName , String jobName ) { } } | return cancelJobWithServiceResponseAsync ( resourceGroupName , accountName , transformName , jobName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class Jdk8WorkingWeek { /** * Return a new JodaWorkingWeek if the status for the given day has changed .
* @ param working
* true if working day
* @ param givenDayOfWeek
* e . g . DateTimeConstants . MONDAY , DateTimeConstants . TUESDAY , etc */
public Jdk8WorkingWeek withWorkingDayFromDateTimeConstant ( final boolean working , final DayOfWeek givenDayOfWeek ) { } } | final int dayOfWeek = jdk8ToCalendarDayConstant ( givenDayOfWeek ) ; return new Jdk8WorkingWeek ( super . withWorkingDayFromCalendar ( working , dayOfWeek ) ) ; |
public class EnableSarlMavenNatureAction { /** * Create the configuration job for a Maven project .
* @ param project the project to configure .
* @ return the job . */
@ SuppressWarnings ( "static-method" ) protected Job createJobForMavenProject ( IProject project ) { } } | return new Job ( Messages . EnableSarlMavenNatureAction_0 ) { @ Override protected IStatus run ( IProgressMonitor monitor ) { final SubMonitor mon = SubMonitor . convert ( monitor , 3 ) ; try { // The project should be a Maven project .
final IPath descriptionFilename = project . getFile ( new Path ( IProjectDescription . DESCRIPTION_FILE_NAME ) ) . getLocation ( ) ; final File projectDescriptionFile = descriptionFilename . toFile ( ) ; final IPath classpathFilename = project . getFile ( new Path ( FILENAME_CLASSPATH ) ) . getLocation ( ) ; final File classpathFile = classpathFilename . toFile ( ) ; // Project was open by the super class . Close it because Maven fails when a project already exists .
project . close ( mon . newChild ( 1 ) ) ; // Delete the Eclipse project and classpath definitions because Maven fails when a project already exists .
project . delete ( false , true , mon . newChild ( 1 ) ) ; if ( projectDescriptionFile . exists ( ) ) { projectDescriptionFile . delete ( ) ; } if ( classpathFile . exists ( ) ) { classpathFile . delete ( ) ; } // Import
MavenImportUtils . importMavenProject ( project . getWorkspace ( ) . getRoot ( ) , project . getName ( ) , true , mon . newChild ( 1 ) ) ; } catch ( CoreException exception ) { SARLMavenEclipsePlugin . getDefault ( ) . log ( exception ) ; } return Status . OK_STATUS ; } } ; |
public class FacebookRestClient { /** * Used to retrieve photo objects using the search parameters ( one or more of the
* parameters must be provided ) .
* @ param subjId retrieve from photos associated with this user ( optional ) .
* @ param photoIds retrieve from this list of photos ( optional )
* @ return an T of photo objects .
* @ see # photos _ get ( Integer , Long , Collection )
* @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Photos . get " >
* Developers Wiki : Photos . get < / a > */
public T photos_get ( Integer subjId , Collection < Long > photoIds ) throws FacebookException , IOException { } } | return photos_get ( subjId , /* albumId */
null , photoIds ) ; |
public class ApiOvhTelephony { /** * Search a service with its domain , to get its billing account and type
* REST : GET / telephony / searchServices
* @ param axiom [ required ] Filter the value of property ( like ) */
public ArrayList < OvhTelephonySearchService > searchServices_GET ( String axiom ) throws IOException { } } | String qPath = "/telephony/searchServices" ; StringBuilder sb = path ( qPath ) ; query ( sb , "axiom" , axiom ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t20 ) ; |
public class BitcoinUtil { /** * Calculates the double SHA256 - Hash of a transaction in little endian format . It serve as a unique identifier of a transaction , but cannot be used to link the outputs of other transactions as input
* It corresponds to the Bitcoin specification of wtxid ( https : / / bitcoincore . org / en / segwit _ wallet _ dev / )
* @ param transaction The BitcoinTransaction of which we want to calculate the hash
* @ return byte array containing the hash of the transaction . Note : This one can be compared to a prevTransactionHash . However , if you want to search for it in popular blockchain explorers then you need to apply the function BitcoinUtil . reverseByteArray to it !
* @ throws java . io . IOException in case of errors reading from the InputStream */
public static byte [ ] getTransactionHashSegwit ( BitcoinTransaction transaction ) throws IOException { } } | // convert transaction to byte array
ByteArrayOutputStream transactionBAOS = new ByteArrayOutputStream ( ) ; byte [ ] version = reverseByteArray ( convertIntToByteArray ( transaction . getVersion ( ) ) ) ; transactionBAOS . write ( version ) ; // check if segwit
boolean segwit = false ; if ( ( transaction . getMarker ( ) == 0 ) && ( transaction . getFlag ( ) != 0 ) ) { segwit = true ; // we still need to check the case that all witness script stack items for all input transactions are of size 0 = > traditional transaction hash calculation
// cf . https : / / github . com / bitcoin / bips / blob / master / bip - 0141 . mediawiki
// A non - witness program ( defined hereinafter ) txin MUST be associated with an empty witness field , represented by a 0x00 . If all txins are not witness program , a transaction ' s wtxid is equal to its txid .
boolean emptyWitness = true ; for ( int k = 0 ; k < transaction . getBitcoinScriptWitness ( ) . size ( ) ; k ++ ) { BitcoinScriptWitnessItem currentItem = transaction . getBitcoinScriptWitness ( ) . get ( k ) ; if ( currentItem . getStackItemCounter ( ) . length > 1 ) { emptyWitness = false ; break ; } else if ( ( currentItem . getStackItemCounter ( ) . length == 1 ) && ( currentItem . getStackItemCounter ( ) [ 0 ] != 0x00 ) ) { emptyWitness = false ; break ; } } if ( emptyWitness == true ) { return BitcoinUtil . getTransactionHashSegwit ( transaction ) ; } transactionBAOS . write ( transaction . getMarker ( ) ) ; transactionBAOS . write ( transaction . getFlag ( ) ) ; } byte [ ] inCounter = transaction . getInCounter ( ) ; transactionBAOS . write ( inCounter ) ; for ( int i = 0 ; i < transaction . getListOfInputs ( ) . size ( ) ; i ++ ) { transactionBAOS . write ( transaction . getListOfInputs ( ) . get ( i ) . getPrevTransactionHash ( ) ) ; transactionBAOS . write ( reverseByteArray ( convertIntToByteArray ( ( int ) ( transaction . getListOfInputs ( ) . get ( i ) . getPreviousTxOutIndex ( ) ) ) ) ) ; transactionBAOS . write ( transaction . getListOfInputs ( ) . get ( i ) . getTxInScriptLength ( ) ) ; transactionBAOS . write ( transaction . getListOfInputs ( ) . get ( i ) . getTxInScript ( ) ) ; transactionBAOS . write ( reverseByteArray ( convertIntToByteArray ( ( int ) ( transaction . getListOfInputs ( ) . get ( i ) . getSeqNo ( ) ) ) ) ) ; } byte [ ] outCounter = transaction . getOutCounter ( ) ; transactionBAOS . write ( outCounter ) ; for ( int j = 0 ; j < transaction . getListOfOutputs ( ) . size ( ) ; j ++ ) { transactionBAOS . write ( convertBigIntegerToByteArray ( transaction . getListOfOutputs ( ) . get ( j ) . getValue ( ) , 8 ) ) ; transactionBAOS . write ( transaction . getListOfOutputs ( ) . get ( j ) . getTxOutScriptLength ( ) ) ; transactionBAOS . write ( transaction . getListOfOutputs ( ) . get ( j ) . getTxOutScript ( ) ) ; } if ( segwit ) { for ( int k = 0 ; k < transaction . getBitcoinScriptWitness ( ) . size ( ) ; k ++ ) { BitcoinScriptWitnessItem currentItem = transaction . getBitcoinScriptWitness ( ) . get ( k ) ; transactionBAOS . write ( currentItem . getStackItemCounter ( ) ) ; for ( int l = 0 ; l < currentItem . getScriptWitnessList ( ) . size ( ) ; l ++ ) { transactionBAOS . write ( currentItem . getScriptWitnessList ( ) . get ( l ) . getWitnessScriptLength ( ) ) ; transactionBAOS . write ( currentItem . getScriptWitnessList ( ) . get ( l ) . getWitnessScript ( ) ) ; } } } byte [ ] lockTime = reverseByteArray ( convertIntToByteArray ( transaction . getLockTime ( ) ) ) ; transactionBAOS . write ( lockTime ) ; byte [ ] transactionByteArray = transactionBAOS . toByteArray ( ) ; byte [ ] firstRoundHash ; byte [ ] secondRoundHash ; try { MessageDigest digest = MessageDigest . getInstance ( "SHA-256" ) ; firstRoundHash = digest . digest ( transactionByteArray ) ; secondRoundHash = digest . digest ( firstRoundHash ) ; } catch ( NoSuchAlgorithmException nsae ) { LOG . error ( nsae ) ; return new byte [ 0 ] ; } return secondRoundHash ; |
public class StringUtil { /** * Escapes the specified value , if necessary according to
* < a href = " https : / / tools . ietf . org / html / rfc4180 # section - 2 " > RFC - 4180 < / a > .
* @ param value The value which will be escaped according to
* < a href = " https : / / tools . ietf . org / html / rfc4180 # section - 2 " > RFC - 4180 < / a >
* @ param trimWhiteSpace The value will first be trimmed of its optional white - space characters ,
* according to < a href = " https : / / tools . ietf . org / html / rfc7230 # section - 7 " > RFC - 7230 < / a >
* @ return { @ link CharSequence } the escaped value if necessary , or the value unchanged */
public static CharSequence escapeCsv ( CharSequence value , boolean trimWhiteSpace ) { } } | int length = checkNotNull ( value , "value" ) . length ( ) ; int start ; int last ; if ( trimWhiteSpace ) { start = indexOfFirstNonOwsChar ( value , length ) ; last = indexOfLastNonOwsChar ( value , start , length ) ; } else { start = 0 ; last = length - 1 ; } if ( start > last ) { return EMPTY_STRING ; } int firstUnescapedSpecial = - 1 ; boolean quoted = false ; if ( isDoubleQuote ( value . charAt ( start ) ) ) { quoted = isDoubleQuote ( value . charAt ( last ) ) && last > start ; if ( quoted ) { start ++ ; last -- ; } else { firstUnescapedSpecial = start ; } } if ( firstUnescapedSpecial < 0 ) { if ( quoted ) { for ( int i = start ; i <= last ; i ++ ) { if ( isDoubleQuote ( value . charAt ( i ) ) ) { if ( i == last || ! isDoubleQuote ( value . charAt ( i + 1 ) ) ) { firstUnescapedSpecial = i ; break ; } i ++ ; } } } else { for ( int i = start ; i <= last ; i ++ ) { char c = value . charAt ( i ) ; if ( c == LINE_FEED || c == CARRIAGE_RETURN || c == COMMA ) { firstUnescapedSpecial = i ; break ; } if ( isDoubleQuote ( c ) ) { if ( i == last || ! isDoubleQuote ( value . charAt ( i + 1 ) ) ) { firstUnescapedSpecial = i ; break ; } i ++ ; } } } if ( firstUnescapedSpecial < 0 ) { // Special characters is not found or all of them already escaped .
// In the most cases returns a same string . New string will be instantiated ( via StringBuilder )
// only if it really needed . It ' s important to prevent GC extra load .
return quoted ? value . subSequence ( start - 1 , last + 2 ) : value . subSequence ( start , last + 1 ) ; } } StringBuilder result = new StringBuilder ( last - start + 1 + CSV_NUMBER_ESCAPE_CHARACTERS ) ; result . append ( DOUBLE_QUOTE ) . append ( value , start , firstUnescapedSpecial ) ; for ( int i = firstUnescapedSpecial ; i <= last ; i ++ ) { char c = value . charAt ( i ) ; if ( isDoubleQuote ( c ) ) { result . append ( DOUBLE_QUOTE ) ; if ( i < last && isDoubleQuote ( value . charAt ( i + 1 ) ) ) { i ++ ; } } result . append ( c ) ; } return result . append ( DOUBLE_QUOTE ) ; |
public class LPPresolver { /** * Manages :
* - ) free column singletons
* - ) doubleton equations combined with a column singleton
* - ) implied free column singletons */
private void checkColumnSingletons ( DoubleMatrix1D c , DoubleMatrix2D A , DoubleMatrix1D b , DoubleMatrix1D lb , DoubleMatrix1D ub , DoubleMatrix1D ylb , DoubleMatrix1D yub , DoubleMatrix1D zlb , DoubleMatrix1D zub ) { } } | for ( short col = 0 ; col < this . vColPositions . length ; col ++ ) { if ( vColPositions [ col ] . length == 1 ) { short row = vColPositions [ col ] [ 0 ] ; log . debug ( "found column singleton at row " + row + ", col " + col ) ; short [ ] vRowPositionsRow = vRowPositions [ row ] ; if ( vRowPositionsRow . length < 2 ) { continue ; // this is a fixed variable
} double ARcol = A . getQuick ( row , col ) ; double cCol = c . getQuick ( col ) ; boolean isCColNz = ! isZero ( cCol ) ; double lbCol = lb . getQuick ( col ) ; double ubCol = ub . getQuick ( col ) ; boolean isLBUnbounded = isLBUnbounded ( lbCol ) ; boolean isUBUnbounded = isUBUnbounded ( ubCol ) ; if ( isLBUnbounded || isUBUnbounded ) { // bound on one of the optimal Lagrange multipliers .
if ( isLBUnbounded ) { if ( isUBUnbounded ) { // table 2 , row 1
zlb . setQuick ( col , 0 ) ; zub . setQuick ( col , 0 ) ; ylb . setQuick ( row , cCol / ARcol ) ; yub . setQuick ( row , cCol / ARcol ) ; } else { if ( ARcol > 0 ) { // table 2 , row 4
zub . setQuick ( col , 0 ) ; ylb . setQuick ( row , cCol / ARcol ) ; } else { // table 2 , row 5
zub . setQuick ( col , 0 ) ; yub . setQuick ( row , cCol / ARcol ) ; } } } else { if ( isUBUnbounded ) { if ( ARcol > 0 ) { // table 2 , row 2
zlb . setQuick ( col , 0 ) ; yub . setQuick ( row , cCol / ARcol ) ; } else { // table 2 , row 3
zlb . setQuick ( col , 0 ) ; ylb . setQuick ( row , cCol / ARcol ) ; } } } if ( isLBUnbounded && isUBUnbounded ) { // free column singleton : one constraint andone variable
// is removed from the problem without generating any fill - ins in A ,
// althoughthe objective function is modified
log . debug ( "free column singleton" ) ; // substitution into the objective function
short [ ] xi = new short [ vRowPositionsRow . length - 1 ] ; double [ ] mi = new double [ vRowPositionsRow . length - 1 ] ; int cntXi = 0 ; for ( int j = 0 ; j < vRowPositionsRow . length ; j ++ ) { short nzJ = vRowPositionsRow [ j ] ; if ( nzJ != col ) { xi [ cntXi ] = nzJ ; mi [ cntXi ] = - A . getQuick ( row , nzJ ) / ARcol ; cntXi ++ ; if ( isCColNz ) { c . setQuick ( nzJ , c . getQuick ( nzJ ) - cCol * A . getQuick ( row , nzJ ) / ARcol ) ; } } } // see Andersen & Andersen , eq ( 10 ) [ that is incorrect ! ]
addToPresolvingStack ( new LinearDependency ( col , xi , mi , b . getQuick ( row ) ) ) ; for ( short j = 0 ; j < vRowPositionsRow . length ; j ++ ) { short column = vRowPositionsRow [ j ] ; // the nz column index
if ( column != col && vColPositions [ column ] . length == 1 ) { // this is also a column singleton , we do not want an empty final column
// so we fix the value of the variable
// @ TODO : fix this for unbounded bounds
if ( c . getQuick ( column ) < 0 ) { lb . setQuick ( column , ub . getQuick ( column ) ) ; } else if ( c . getQuick ( column ) > 0 ) { ub . setQuick ( column , lb . getQuick ( column ) ) ; } else { ub . setQuick ( column , lb . getQuick ( column ) ) ; } log . debug ( "found fixed variables: x[" + column + "]=" + lb . getQuick ( column ) ) ; addToPresolvingStack ( new LinearDependency ( column , null , null , lb . getQuick ( column ) ) ) ; pruneFixedVariable ( column , c , A , b , lb , ub , ylb , yub , zlb , zub ) ; } changeColumnsLengthPosition ( column , vColPositions [ column ] . length , vColPositions [ column ] . length - 1 ) ; vColPositions [ column ] = removeElementFromSortedArray ( vColPositions [ column ] , row ) ; A . setQuick ( row , column , 0. ) ; } changeRowsLengthPosition ( row , vRowPositions [ row ] . length , 0 ) ; vRowPositions [ row ] = new short [ ] { } ; if ( vColPositions [ col ] . length > 0 ) { log . debug ( "Expected empty column " + col + " but was not empty" ) ; throw new IllegalStateException ( "Expected empty column " + col + " but was not empty" ) ; } vColPositions [ col ] = new short [ ] { } ; ylb . setQuick ( row , cCol / ARcol ) ; // ok , but jet stated above
yub . setQuick ( row , cCol / ARcol ) ; // ok , but jet stated above
b . setQuick ( row , 0 ) ; lb . setQuick ( col , this . unboundedLBValue ) ; ub . setQuick ( col , this . unboundedUBValue ) ; c . setQuick ( col , 0 ) ; this . someReductionDone = true ; continue ; } } double impliedL ; double impliedU ; if ( ARcol > 0 ) { impliedL = ( b . getQuick ( row ) - h [ row ] ) / ARcol + ubCol ; impliedU = ( b . getQuick ( row ) - g [ row ] ) / ARcol + lbCol ; } else { impliedL = ( b . getQuick ( row ) - g [ row ] ) / ARcol + ubCol ; impliedU = ( b . getQuick ( row ) - h [ row ] ) / ARcol + lbCol ; } boolean ifl = impliedL > lbCol ; // do not use = , it will cause a loop
boolean ifu = impliedU < ubCol ; // do not use = , it will cause a loop
if ( ifl ) { lb . setQuick ( col , impliedL ) ; // tighten the bounds
lbCol = impliedL ; this . someReductionDone = true ; } if ( ifu ) { ub . setQuick ( col , impliedU ) ; // tighten the bounds
ubCol = impliedU ; this . someReductionDone = true ; } boolean isImpliedFree = ( ifl && ifu ) || ( isZero ( impliedL - lbCol ) && isZero ( impliedU - ubCol ) ) ; if ( vRowPositionsRow . length == 2 || isImpliedFree ) { // substitution
short y = - 1 ; double q = 0. , m = 0. ; short [ ] xi = new short [ vRowPositionsRow . length - 1 ] ; double [ ] mi = new double [ vRowPositionsRow . length - 1 ] ; StringBuffer sb = new StringBuffer ( "x[" + col + "]=" ) ; q = b . getQuick ( row ) / ARcol ; sb . append ( q ) ; int cntXi = 0 ; for ( int j = 0 ; j < vRowPositionsRow . length ; j ++ ) { short nzJ = vRowPositionsRow [ j ] ; if ( nzJ != col ) { double ARnzJ = A . getQuick ( row , nzJ ) ; m = - ARnzJ / ARcol ; xi [ cntXi ] = nzJ ; mi [ cntXi ] = m ; cntXi ++ ; sb . append ( " + " + m + "*x[" + nzJ + "]" ) ; if ( isCColNz ) { // the objective function is modified
double cc = c . getQuick ( col ) * ARnzJ / ARcol ; c . setQuick ( nzJ , c . getQuick ( nzJ ) - cc ) ; } y = nzJ ; } } addToPresolvingStack ( new LinearDependency ( col , xi , mi , q ) ) ; if ( vRowPositionsRow . length == 2 ) { // NOTE : the row and the column are removed
log . debug ( "doubleton equation combined with a column singleton: " + sb . toString ( ) ) ; // x = m * y + q , x column singleton
// addToDoubletonMap ( col , y , m , q ) ;
// the bounds on the variable y are modified sothat the feasible region is unchanged even if the bounds on x are removed
// y = x / m - q / m
double lbY = lb . getQuick ( y ) ; double ubY = ub . getQuick ( y ) ; boolean isLBYUnbounded = isLBUnbounded ( lbY ) ; boolean isUBYUnbounded = isLBUnbounded ( ubY ) ; if ( m > 0 ) { if ( ! isLBUnbounded ) { double l = lbCol / m - q / m ; lb . setQuick ( y , ( isLBYUnbounded ) ? l : Math . max ( lbY , l ) ) ; } if ( ! isUBUnbounded ) { double u = ubCol / m - q / m ; ub . setQuick ( y , ( isUBYUnbounded ) ? u : Math . min ( ubY , u ) ) ; } } else { if ( ! isUBUnbounded ) { double u = ubCol / m - q / m ; lb . setQuick ( y , ( isLBYUnbounded ) ? u : Math . max ( lbY , u ) ) ; } if ( ! isLBUnbounded ) { double l = lbCol / m - q / m ; ub . setQuick ( y , ( isUBYUnbounded ) ? l : Math . min ( ubY , l ) ) ; } } if ( vColPositions [ y ] . length == 1 ) { // this is also a column singleton , we do not want an empty final column
// so we fix the value of the variable
if ( c . getQuick ( y ) < 0 ) { if ( isUBUnbounded ( ub . getQuick ( y ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } lb . setQuick ( y , ub . getQuick ( y ) ) ; } else if ( c . getQuick ( y ) > 0 ) { if ( isLBUnbounded ( lb . getQuick ( y ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } ub . setQuick ( y , lb . getQuick ( y ) ) ; } else { // any value is good
if ( isLBUnbounded ( lb . getQuick ( y ) ) && isUBUnbounded ( ub . getQuick ( y ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } else if ( ! isLBUnbounded ( lb . getQuick ( y ) ) && ! isUBUnbounded ( ub . getQuick ( y ) ) ) { double d = ( ub . getQuick ( y ) - lb . getQuick ( y ) ) / 2 ; lb . setQuick ( y , d ) ; ub . setQuick ( y , d ) ; } else if ( ! isLBUnbounded ( lb . getQuick ( y ) ) ) { ub . setQuick ( y , lb . getQuick ( y ) ) ; } else { lb . setQuick ( y , ub . getQuick ( y ) ) ; } } } // remove the bounds on col
lb . setQuick ( col , this . unboundedLBValue ) ; ub . setQuick ( col , this . unboundedUBValue ) ; // remove the variable
A . setQuick ( row , col , 0. ) ; A . setQuick ( row , y , 0. ) ; b . setQuick ( row , 0. ) ; changeColumnsLengthPosition ( col , vColPositions [ col ] . length , 0 ) ; vColPositions [ col ] = new short [ ] { } ; vRowPositionsRow = removeElementFromSortedArray ( vRowPositionsRow , col ) ; // just to have vRowPositionsRow [ 0]
changeRowsLengthPosition ( row , vRowPositionsRow . length + 1 , vRowPositionsRow . length ) ; changeColumnsLengthPosition ( vRowPositionsRow [ 0 ] , vColPositions [ vRowPositionsRow [ 0 ] ] . length , vColPositions [ vRowPositionsRow [ 0 ] ] . length - 1 ) ; vColPositions [ vRowPositionsRow [ 0 ] ] = removeElementFromSortedArray ( vColPositions [ vRowPositionsRow [ 0 ] ] , row ) ; vRowPositions [ row ] = new short [ ] { } ; this . someReductionDone = true ; continue ; } else { // NOTE : one constraint andone variable is removed from the problem
// without generating any fill - ins in A ,
// although the objective function is modified
log . debug ( "implied free column singletons: " + sb . toString ( ) ) ; ylb . setQuick ( row , c . getQuick ( col ) / A . getQuick ( row , col ) ) ; // ok , but already stated above
yub . setQuick ( row , c . getQuick ( col ) / A . getQuick ( row , col ) ) ; // ok , but already stated above
for ( short cc = 0 ; cc < vRowPositions [ row ] . length ; cc ++ ) { short column = vRowPositions [ row ] [ cc ] ; if ( column == col ) { continue ; } if ( vColPositions [ column ] . length == 1 ) { // this is also a column singleton , we do not want an empty final column
// so we fix the value of the variable
if ( c . getQuick ( column ) < 0 ) { // no problem of unbounded bound , this is an implied free column
if ( isUBUnbounded ( ub . getQuick ( column ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } lb . setQuick ( column , ub . getQuick ( column ) ) ; } else if ( c . getQuick ( column ) > 0 ) { // no problem of unbounded bound , this is an implied free column
if ( isLBUnbounded ( lb . getQuick ( column ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } ub . setQuick ( column , lb . getQuick ( column ) ) ; } else { // no problem of unbounded bound , this is an implied free column
if ( isLBUnbounded ( lb . getQuick ( column ) ) || isUBUnbounded ( ub . getQuick ( column ) ) ) { throw new RuntimeException ( "unbounded problem" ) ; } double d = ( ub . getQuick ( y ) - lb . getQuick ( y ) ) / 2 ; lb . setQuick ( y , d ) ; ub . setQuick ( y , d ) ; } log . debug ( "found fixed variables: x[" + column + "]=" + lb . getQuick ( column ) ) ; addToPresolvingStack ( new LinearDependency ( column , null , null , lb . getQuick ( column ) ) ) ; pruneFixedVariable ( column , c , A , b , lb , ub , ylb , yub , zlb , zub ) ; } changeColumnsLengthPosition ( column , vColPositions [ column ] . length , vColPositions [ column ] . length - 1 ) ; vColPositions [ column ] = removeElementFromSortedArray ( vColPositions [ column ] , row ) ; A . setQuick ( row , column , 0. ) ; } A . setQuick ( row , col , 0. ) ; b . setQuick ( row , 0 ) ; lb . setQuick ( col , this . unboundedLBValue ) ; ub . setQuick ( col , this . unboundedUBValue ) ; c . setQuick ( col , 0 ) ; changeColumnsLengthPosition ( col , vColPositions [ col ] . length , 0 ) ; vColPositions [ col ] = new short [ ] { } ; changeRowsLengthPosition ( row , vRowPositions [ row ] . length , 0 ) ; vRowPositions [ row ] = new short [ ] { } ; this . someReductionDone = true ; // checkProgress ( c , A , b , lb , ub , ylb , yub , zlb , zub ) ;
continue ; } } } } |
public class Validators { /** * The input parameter must contain the string c . if yes , the check passes
* @ param c contained strings
* @ param msg error message after verification failed
* @ return Validation */
public static Validation < String > contains ( String c , String msg ) { } } | return notEmpty ( ) . and ( SimpleValidation . from ( ( s ) -> s . contains ( c ) , format ( msg , c ) ) ) ; |
public class GetLoggingLevelCmd { /** * Executes the GetLoggingLevelCmd TANGO command */
public Any execute ( DeviceImpl device , Any in_any ) throws DevFailed { } } | Util . out4 . println ( "GetLoggingLevelCmd::execute(): arrived" ) ; String [ ] dvsa = null ; try { dvsa = extract_DevVarStringArray ( in_any ) ; } catch ( DevFailed df ) { Util . out3 . println ( "GetLoggingLevelCmd::execute() --> Wrong argument type" ) ; Except . re_throw_exception ( df , "API_IncompatibleCmdArgumentType" , "Imcompatible command argument type, expected type is : DevVarStringArray" , "GetLoggingLevelCmd.execute" ) ; } Any out_any = insert ( Logging . instance ( ) . get_logging_level ( dvsa ) ) ; Util . out4 . println ( "Leaving GetLoggingLevelCmd.execute()" ) ; return out_any ; |
public class WSThreadLocal { /** * doesn ' t ) and that it resets the value to its initial value */
public void remove ( ) { } } | Thread thread = Thread . currentThread ( ) ; if ( thread instanceof ThreadPool . Worker ) { Object [ ] wsLocals = getThreadLocals ( ( ThreadPool . Worker ) thread ) ; wsLocals [ index ] = null ; } else { super . remove ( ) ; } |
public class StandardBullhornData { /** * { @ inheritDoc } */
@ Override public ParsedResume parseResumeFile ( MultipartFile resume , ResumeFileParseParams params ) { } } | return this . handleParseResumeFile ( resume , params ) ; |
public class CFFFontSubset { /** * Calculates how many byte it took to write the offset for the subrs in a specific
* private dict .
* @ param Offset The Offset for the private dict
* @ param Size The size of the private dict
* @ return The size of the offset of the subrs in the private dict */
int CalcSubrOffsetSize ( int Offset , int Size ) { } } | // Set the size to 0
int OffsetSize = 0 ; // Go to the beginning of the private dict
seek ( Offset ) ; // Go until the end of the private dict
while ( getPosition ( ) < Offset + Size ) { int p1 = getPosition ( ) ; getDictItem ( ) ; int p2 = getPosition ( ) ; // When reached to the subrs offset
if ( key == "Subrs" ) { // The Offsize ( minus the subrs key )
OffsetSize = p2 - p1 - 1 ; } // All other keys are ignored
} // return the size
return OffsetSize ; |
public class JSRemoteConsumerPoint { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . ConsumerPoint # notifyReceiveAllowed ( boolean ) */
public void notifyReceiveAllowed ( boolean isAllowed ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "notifyReceiveAllowed" , new Object [ ] { Boolean . valueOf ( isAllowed ) } ) ; ArrayList < AORequestedTick > satisfiedTicks = null ; try { this . lock ( ) ; try { if ( ( ! closed ) && isAllowed ) { temporarilyStopped = false ; // no longer stopped
// while isAllowed was false , many messages may have been made available but they were missed
// so loop till either there are no more waiting requests , or there are no more available messages
satisfiedTicks = processQueuedMsgs ( null ) ; } // end if ( isAllowed )
else { temporarilyStopped = true ; isready = false ; ck . notReady ( ) ; } } // end this . lock ( )
finally { this . unlock ( ) ; } } catch ( SINotPossibleInCurrentConfigurationException e ) { // No FFDC code needed
notifyException ( e ) ; } // inform parent about satisfied ticks - outside lock
if ( satisfiedTicks != null ) { int length = satisfiedTicks . size ( ) ; for ( int i = 0 ; i < length ; i ++ ) { AORequestedTick aotick = ( AORequestedTick ) satisfiedTicks . get ( i ) ; parent . satisfiedRequest ( aotick . tick , aotick . getMessage ( ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "notifyReceiveAllowed" ) ; |
public class AppsInner { /** * Update the metadata of an IoT Central application .
* @ param resourceGroupName The name of the resource group that contains the IoT Central application .
* @ param resourceName The ARM resource name of the IoT Central application .
* @ param appPatch The IoT Central application metadata and security metadata .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the AppInner object */
public Observable < AppInner > beginUpdateAsync ( String resourceGroupName , String resourceName , AppPatch appPatch ) { } } | return beginUpdateWithServiceResponseAsync ( resourceGroupName , resourceName , appPatch ) . map ( new Func1 < ServiceResponse < AppInner > , AppInner > ( ) { @ Override public AppInner call ( ServiceResponse < AppInner > response ) { return response . body ( ) ; } } ) ; |
public class HostDirectives { /** * Check if any of the rules say anything about the specified path
* @ param path The path to check
* @ return One of ALLOWED , DISALLOWED or UNDEFINED */
public int checkAccess ( String path ) { } } | timeLastAccessed = System . currentTimeMillis ( ) ; int result = UNDEFINED ; String myUA = config . getUserAgentName ( ) ; boolean ignoreUADisc = config . getIgnoreUADiscrimination ( ) ; // When checking rules , the list of rules is already ordered based on the
// match of the user - agent of the clause with the user - agent of the crawler .
// The most specific match should come first .
// Only the most specific match is obeyed , unless ignoreUADiscrimination is
// enabled . In that case , any matching non - wildcard clause that explicitly
// disallows the path is obeyed . If no such rule exists and any UA in the list
// is allowed access , that rule is obeyed .
for ( UserAgentDirectives ua : rules ) { int score = ua . match ( myUA ) ; // If ignoreUADisc is disabled and the current UA doesn ' t match ,
// the rest will not match so we are done here .
if ( score == 0 && ! ignoreUADisc ) { break ; } // Match the rule to the path
result = ua . checkAccess ( path , userAgent ) ; // If the result is ALLOWED or UNDEFINED , or if
// this is a wildcard rule and ignoreUADisc is disabled ,
// this is the final verdict .
if ( result != DISALLOWED || ( ! ua . isWildcard ( ) || ! ignoreUADisc ) ) { break ; } // This is a wildcard rule that disallows access . The verdict is stored ,
// but the other rules will also be checked to see if any specific UA is allowed
// access to this path . If so , that positive UA discrimination is ignored
// and we crawl the page anyway .
} return result ; |
public class ShapeRenderer { /** * Draw the the given shape filled in with a texture . Only the vertices are set .
* The colour has to be set independently of this method .
* @ param shape The shape to texture .
* @ param image The image to tile across the shape
* @ param scaleX The scale to apply on the x axis for texturing
* @ param scaleY The scale to apply on the y axis for texturing
* @ param fill The fill to apply */
public static final void texture ( final Shape shape , final Image image , final float scaleX , final float scaleY , final ShapeFill fill ) { } } | if ( ! validFill ( shape ) ) { return ; } Texture t = TextureImpl . getLastBind ( ) ; image . getTexture ( ) . bind ( ) ; final float center [ ] = shape . getCenter ( ) ; fill ( shape , new PointCallback ( ) { public float [ ] preRenderPoint ( Shape shape , float x , float y ) { fill . colorAt ( shape , x - center [ 0 ] , y - center [ 1 ] ) . bind ( ) ; Vector2f offset = fill . getOffsetAt ( shape , x , y ) ; x += offset . x ; y += offset . y ; float tx = x * scaleX ; float ty = y * scaleY ; tx = image . getTextureOffsetX ( ) + ( image . getTextureWidth ( ) * tx ) ; ty = image . getTextureOffsetY ( ) + ( image . getTextureHeight ( ) * ty ) ; GL . glTexCoord2f ( tx , ty ) ; return new float [ ] { offset . x + x , offset . y + y } ; } } ) ; if ( t == null ) { TextureImpl . bindNone ( ) ; } else { t . bind ( ) ; } |
public class DifferentialFunction { /** * Set the value for this function .
* Note that if value is null an { @ link ND4JIllegalStateException }
* will be thrown .
* @ param target the target field
* @ param value the value to set */
public void setValueFor ( Field target , Object value ) { } } | if ( value == null && target . getType ( ) . isPrimitive ( ) ) { throw new ND4JIllegalStateException ( "Unable to set primitive field " + target + " of type " + target . getClass ( ) + " using null value!" ) ; } if ( value != null ) { value = ensureProperType ( target , value ) ; } if ( isConfigProperties ( ) ) { String propertyName = configFieldName ( ) ; if ( propertyName == null ) propertyName = "config" ; Field f = null ; Class < ? > currClass = getClass ( ) ; try { f = currClass . getDeclaredField ( propertyName ) ; } catch ( NoSuchFieldException e ) { // OK , try superclass
} while ( f == null && currClass . getSuperclass ( ) != null ) { currClass = currClass . getSuperclass ( ) ; try { f = currClass . getDeclaredField ( propertyName ) ; } catch ( NoSuchFieldException e ) { // OK , try superclass
} } if ( f == null ) { throw new IllegalStateException ( "Could not find field \"" + propertyName + "\" for class " + getClass ( ) . getName ( ) ) ; } try { f . setAccessible ( true ) ; Object o = f . get ( this ) ; if ( o == null ) { // Null config class - try to create one . . .
Class < ? > c = f . getType ( ) ; try { o = c . newInstance ( ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( "Error creating new instance of configuration object type " + c . getName ( ) , e ) ; } f . set ( this , o ) ; } target . set ( o , value ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Error setting configuration field \"" + propertyName + "\" for config field \"" + propertyName + "\" on class " + getClass ( ) . getName ( ) ) ; } } else { try { target . set ( this , value ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( "Error setting property for function " + getClass ( ) . getName ( ) , e ) ; } } |
public class SimpleAttachable { /** * { @ inheritDoc } */
public synchronized < T > List < T > getAttachmentList ( AttachmentKey < ? extends List < T > > key ) { } } | if ( key == null ) { return null ; } List < T > list = key . cast ( attachments . get ( key ) ) ; if ( list == null ) { return Collections . emptyList ( ) ; } return list ; |
public class SimpleSectionSkin { /** * * * * * * Initialization * * * * * */
private void initGraphics ( ) { } } | // Set initial size
if ( Double . compare ( gauge . getPrefWidth ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getPrefHeight ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getWidth ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getHeight ( ) , 0.0 ) <= 0 ) { if ( gauge . getPrefWidth ( ) > 0 && gauge . getPrefHeight ( ) > 0 ) { gauge . setPrefSize ( gauge . getPrefWidth ( ) , gauge . getPrefHeight ( ) ) ; } else { gauge . setPrefSize ( PREFERRED_WIDTH , PREFERRED_HEIGHT ) ; } } sectionCanvas = new Canvas ( PREFERRED_WIDTH , PREFERRED_HEIGHT ) ; sectionCtx = sectionCanvas . getGraphicsContext2D ( ) ; barBackground = new Arc ( PREFERRED_WIDTH * 0.5 , PREFERRED_HEIGHT * 0.5 , PREFERRED_WIDTH * 0.4 , PREFERRED_HEIGHT * 0.4 , gauge . getStartAngle ( ) + 150 , ANGLE_RANGE ) ; barBackground . setType ( ArcType . OPEN ) ; barBackground . setStroke ( gauge . getBarBackgroundColor ( ) ) ; barBackground . setStrokeWidth ( PREFERRED_WIDTH * 0.125 ) ; barBackground . setStrokeLineCap ( StrokeLineCap . BUTT ) ; barBackground . setFill ( null ) ; bar = new Arc ( PREFERRED_WIDTH * 0.5 , PREFERRED_HEIGHT * 0.5 , PREFERRED_WIDTH * 0.4 , PREFERRED_HEIGHT * 0.4 , gauge . getStartAngle ( ) + 90 , 0 ) ; bar . setType ( ArcType . OPEN ) ; bar . setStroke ( gauge . getBarColor ( ) ) ; bar . setStrokeWidth ( PREFERRED_WIDTH * 0.125 ) ; bar . setStrokeLineCap ( StrokeLineCap . BUTT ) ; bar . setFill ( null ) ; titleText = new Text ( gauge . getTitle ( ) ) ; titleText . setFill ( gauge . getTitleColor ( ) ) ; Helper . enableNode ( titleText , ! gauge . getTitle ( ) . isEmpty ( ) ) ; valueText = new Text ( ) ; valueText . setStroke ( null ) ; valueText . setFill ( gauge . getValueColor ( ) ) ; Helper . enableNode ( valueText , gauge . isValueVisible ( ) ) ; unitText = new Text ( ) ; unitText . setStroke ( null ) ; unitText . setFill ( gauge . getUnitColor ( ) ) ; Helper . enableNode ( unitText , gauge . isValueVisible ( ) && ! gauge . getUnit ( ) . isEmpty ( ) ) ; pane = new Pane ( barBackground , sectionCanvas , titleText , valueText , unitText , bar ) ; getChildren ( ) . setAll ( pane ) ; |
public class PNCounterProxy { /** * Invokes the { @ code operation } recursively on viable replica addresses
* until successful or the list of viable replicas is exhausted .
* Replicas with addresses contained in the { @ code excludedAddresses } are
* skipped . If there are no viable replicas , this method will throw the
* { @ code lastException } if not { @ code null } or a
* { @ link NoDataMemberInClusterException } if the { @ code lastException } is
* { @ code null } .
* @ param operation the operation to invoke on a CRDT replica
* @ param excludedAddresses the addresses to exclude when choosing a replica
* address , must not be { @ code null }
* @ param lastException the exception thrown from the last invocation of
* the { @ code operation } on a replica , may be { @ code null }
* @ return the result of the operation invocation on a replica
* @ throws NoDataMemberInClusterException if there are no replicas and the
* { @ code lastException } is { @ code null } */
private long invokeInternal ( Operation operation , List < Address > excludedAddresses , HazelcastException lastException ) { } } | final Address target = getCRDTOperationTarget ( excludedAddresses ) ; if ( target == null ) { throw lastException != null ? lastException : new NoDataMemberInClusterException ( "Cannot invoke operations on a CRDT because the cluster does not contain any data members" ) ; } try { final InvocationBuilder builder = getNodeEngine ( ) . getOperationService ( ) . createInvocationBuilder ( SERVICE_NAME , operation , target ) ; if ( operationTryCount > 0 ) { builder . setTryCount ( operationTryCount ) ; } final InternalCompletableFuture < CRDTTimestampedLong > future = builder . invoke ( ) ; final CRDTTimestampedLong result = future . join ( ) ; updateObservedReplicaTimestamps ( result . getVectorClock ( ) ) ; return result . getValue ( ) ; } catch ( HazelcastException e ) { logger . fine ( "Exception occurred while invoking operation on target " + target + ", choosing different target" , e ) ; if ( excludedAddresses == EMPTY_ADDRESS_LIST ) { excludedAddresses = new ArrayList < Address > ( ) ; } excludedAddresses . add ( target ) ; return invokeInternal ( operation , excludedAddresses , e ) ; } |
public class Stack { /** * The capabilities allowed in the stack .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setCapabilities ( java . util . Collection ) } or { @ link # withCapabilities ( java . util . Collection ) } if you want to
* override the existing values .
* @ param capabilities
* The capabilities allowed in the stack .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see Capability */
public Stack withCapabilities ( String ... capabilities ) { } } | if ( this . capabilities == null ) { setCapabilities ( new com . amazonaws . internal . SdkInternalList < String > ( capabilities . length ) ) ; } for ( String ele : capabilities ) { this . capabilities . add ( ele ) ; } return this ; |
public class Error { /** * Factory method for Error . Returns Error object from id .
* @ param client the client
* @ param id error id
* @ return information about one user error
* @ throws IOException unexpected error . */
public static Error get ( final BandwidthClient client , final String id ) throws Exception { } } | final String errorsUri = client . getUserResourceInstanceUri ( BandwidthConstants . ERRORS_URI_PATH , id ) ; final JSONObject jsonObject = toJSONObject ( client . get ( errorsUri , null ) ) ; return new Error ( client , errorsUri , jsonObject ) ; |
public class StreamEx { /** * Returns an { @ link EntryStream } consisting of the { @ link Entry } objects
* which keys are elements of this stream and values are results of applying
* the given function to the elements of this stream .
* This is an < a href = " package - summary . html # StreamOps " > intermediate < / a >
* operation .
* @ param < V > The { @ code Entry } value type
* @ param valueMapper a non - interfering , stateless function to apply to each
* element
* @ return the new stream */
public < V > EntryStream < T , V > mapToEntry ( Function < ? super T , ? extends V > valueMapper ) { } } | return new EntryStream < > ( stream ( ) . map ( e -> new SimpleImmutableEntry < > ( e , valueMapper . apply ( e ) ) ) , context ) ; |
public class TfidfVectorizer { /** * Vectorizes the passed in text treating it as one document
* @ param text the text to vectorize
* @ param label the label of the text
* @ return a dataset with a transform of weights ( relative to impl ; could be word counts or tfidf scores ) */
@ Override public DataSet vectorize ( String text , String label ) { } } | INDArray input = transform ( text ) ; INDArray labelMatrix = FeatureUtil . toOutcomeVector ( labelsSource . indexOf ( label ) , labelsSource . size ( ) ) ; return new DataSet ( input , labelMatrix ) ; |
public class Cron4jJob { @ Override public OptionalThingIfPresentAfter ifExecutingNow ( Consumer < SnapshotExecState > oneArgLambda ) { } } | return mapExecutingNow ( execState -> { oneArgLambda . accept ( execState ) ; return ( OptionalThingIfPresentAfter ) ( processor -> { } ) ; } ) . orElseGet ( ( ) -> { return processor -> processor . process ( ) ; } ) ; |
public class TargetSpecifications { /** * { @ link Specification } for retrieving { @ link Target } s that are overdue . A
* target is overdue if it did not respond during the configured
* intervals : < br >
* < em > poll _ itvl + overdue _ itvl < / em >
* @ param overdueTimestamp
* the calculated timestamp to compare with the last respond of a
* target ( lastTargetQuery ) . < br >
* The < code > overdueTimestamp < / code > has to be calculated with
* the following expression : < br >
* < em > overdueTimestamp = nowTimestamp - poll _ itvl -
* overdue _ itvl < / em >
* @ return the { @ link Target } { @ link Specification } */
public static Specification < JpaTarget > isOverdue ( final long overdueTimestamp ) { } } | return ( targetRoot , query , cb ) -> cb . lessThanOrEqualTo ( targetRoot . get ( JpaTarget_ . lastTargetQuery ) , overdueTimestamp ) ; |
public class BackendCleanup { /** * Default organization must never be deleted */
private static void truncateOrganizations ( String tableName , Statement ddlStatement , Connection connection ) throws SQLException { } } | try ( PreparedStatement preparedStatement = connection . prepareStatement ( "delete from organizations where kee <> ?" ) ) { preparedStatement . setString ( 1 , "default-organization" ) ; preparedStatement . execute ( ) ; // commit is useless on some databases
connection . commit ( ) ; } |
public class IPAddressDivision { /** * Produces a string to represent the segment , favouring wildcards and range characters over the network prefix to represent subnets .
* If it exists , the segment CIDR prefix is ignored and the explicit range is printed .
* @ return */
@ Override public String getWildcardString ( ) { } } | String result = cachedWildcardString ; if ( result == null ) { synchronized ( this ) { result = cachedWildcardString ; if ( result == null ) { if ( ! isPrefixed ( ) || ! isMultiple ( ) ) { result = getString ( ) ; } else if ( isFullRange ( ) ) { result = IPAddress . SEGMENT_WILDCARD_STR ; } else { result = getDefaultRangeString ( ) ; } cachedWildcardString = result ; } } } return result ; |
public class CheckpointStatsCache { /** * Try to add the checkpoint to the cache .
* @ param checkpoint Checkpoint to be added . */
public void tryAdd ( AbstractCheckpointStats checkpoint ) { } } | // Don ' t add in progress checkpoints as they will be replaced by their
// completed / failed version eventually .
if ( cache != null && checkpoint != null && ! checkpoint . getStatus ( ) . isInProgress ( ) ) { cache . put ( checkpoint . getCheckpointId ( ) , checkpoint ) ; } |
public class DataSourceService { /** * Utility method that converts transaction isolation level constant names
* to the corresponding int value .
* @ param wProps WAS data source properties , including the configured isolationLevel property .
* @ param vendorImplClassName name of the vendor data source or driver implementation class .
* @ return Integer transaction isolation level constant value . If unknown , then the original String value . */
private static final void parseIsolationLevel ( NavigableMap < String , Object > wProps , String vendorImplClassName ) { } } | // Convert isolationLevel constant name to integer
Object isolationLevel = wProps . get ( DataSourceDef . isolationLevel . name ( ) ) ; if ( isolationLevel instanceof String ) { isolationLevel = "TRANSACTION_READ_COMMITTED" . equals ( isolationLevel ) ? Connection . TRANSACTION_READ_COMMITTED : "TRANSACTION_REPEATABLE_READ" . equals ( isolationLevel ) ? Connection . TRANSACTION_REPEATABLE_READ : "TRANSACTION_SERIALIZABLE" . equals ( isolationLevel ) ? Connection . TRANSACTION_SERIALIZABLE : "TRANSACTION_READ_UNCOMMITTED" . equals ( isolationLevel ) ? Connection . TRANSACTION_READ_UNCOMMITTED : "TRANSACTION_NONE" . equals ( isolationLevel ) ? Connection . TRANSACTION_NONE : "TRANSACTION_SNAPSHOT" . equals ( isolationLevel ) ? ( vendorImplClassName . startsWith ( "com.microsoft." ) ? 4096 : 16 ) : isolationLevel ; wProps . put ( DataSourceDef . isolationLevel . name ( ) , isolationLevel ) ; } |
public class GlobalTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case DroolsPackage . GLOBAL_TYPE__IDENTIFIER : return IDENTIFIER_EDEFAULT == null ? identifier != null : ! IDENTIFIER_EDEFAULT . equals ( identifier ) ; case DroolsPackage . GLOBAL_TYPE__TYPE : return TYPE_EDEFAULT == null ? type != null : ! TYPE_EDEFAULT . equals ( type ) ; } return super . eIsSet ( featureID ) ; |
public class Filtering { /** * Creates an iterator yielding values from the source iterator up until the
* passed predicate matches . E . g :
* < code > takeWhile ( [ 2 , 4 , 3 , 6 ] , isEven ) - > [ 2 , 4 ] < / code >
* @ param < E > the iterator element type
* @ param iterator the source iterator
* @ param predicate the predicate to be evaluated
* @ return the resulting iterator */
public static < E > Iterator < E > takeWhile ( Iterator < E > iterator , Predicate < E > predicate ) { } } | return new TakeWhileIterator < E > ( iterator , predicate ) ; |
public class MapFileHeader { /** * Reads and validates the header block from the map file .
* @ param readBuffer the ReadBuffer for the file data .
* @ param fileSize the size of the map file in bytes .
* @ throws IOException if an error occurs while reading the file . */
public void readHeader ( ReadBuffer readBuffer , long fileSize ) throws IOException { } } | RequiredFields . readMagicByte ( readBuffer ) ; RequiredFields . readRemainingHeader ( readBuffer ) ; MapFileInfoBuilder mapFileInfoBuilder = new MapFileInfoBuilder ( ) ; RequiredFields . readFileVersion ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readFileSize ( readBuffer , fileSize , mapFileInfoBuilder ) ; RequiredFields . readMapDate ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readBoundingBox ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readTilePixelSize ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readProjectionName ( readBuffer , mapFileInfoBuilder ) ; OptionalFields . readOptionalFields ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readPoiTags ( readBuffer , mapFileInfoBuilder ) ; RequiredFields . readWayTags ( readBuffer , mapFileInfoBuilder ) ; readSubFileParameters ( readBuffer , fileSize , mapFileInfoBuilder ) ; this . mapFileInfo = mapFileInfoBuilder . build ( ) ; |
public class BeanDefinitionParser { /** * Return a typed String value Object for the given ' idref ' element .
* @ param ele a { @ link org . w3c . dom . Element } object .
* @ return a { @ link java . lang . Object } object . */
public Object parseIdRefElement ( Element ele ) { } } | // A generic reference to any name of any bean .
String refName = ele . getAttribute ( BEAN_REF_ATTRIBUTE ) ; if ( ! StringUtils . hasLength ( refName ) ) { // A reference to the id of another bean in the same XML file .
refName = ele . getAttribute ( LOCAL_REF_ATTRIBUTE ) ; if ( ! StringUtils . hasLength ( refName ) ) { error ( "Either 'bean' or 'local' is required for <idref> element" , ele ) ; return null ; } } if ( ! StringUtils . hasText ( refName ) ) { error ( "<idref> element contains empty target attribute" , ele ) ; return null ; } RuntimeBeanNameReference ref = new RuntimeBeanNameReference ( refName ) ; ref . setSource ( extractSource ( ele ) ) ; return ref ; |
public class MaybeT { /** * { @ inheritDoc } */
@ Override public < B > Lazy < MaybeT < M , B > > lazyZip ( Lazy < ? extends Applicative < Function < ? super A , ? extends B > , MonadT < M , Maybe < ? > , ? > > > lazyAppFn ) { } } | return new Compose < > ( mma ) . lazyZip ( lazyAppFn . fmap ( maybeT -> new Compose < > ( maybeT . < MaybeT < M , Function < ? super A , ? extends B > > > coerce ( ) . < Maybe < Function < ? super A , ? extends B > > , Monad < Maybe < Function < ? super A , ? extends B > > , M > > run ( ) ) ) ) . fmap ( compose -> maybeT ( compose . getCompose ( ) ) ) ; |
public class Assert { /** * Assert that a collection contains elements ; that is , it must not be { @ code null } and must
* contain at least one element .
* < pre class = " code " >
* Assert . notEmpty ( collection , ( ) - & gt ; " The " + collectionType + " collection must contain elements " ) ;
* < / pre >
* @ param collection the collection to check
* @ param messageSupplier a supplier for the exception message to use if the assertion fails
* @ throws IllegalArgumentException if the collection is { @ code null } or contains no elements
* @ since 5.0 */
public static void notEmpty ( @ Nullable final Collection < ? > collection , final Supplier < String > messageSupplier ) { } } | if ( CollectionUtils . isEmpty ( collection ) ) { throw new IllegalArgumentException ( Assert . nullSafeGet ( messageSupplier ) ) ; } |
public class TemplateCompiler { /** * Returns the list of classes needed to implement this template .
* < p > For each template , we generate :
* < ul >
* < li > A { @ link com . google . template . soy . jbcsrc . shared . CompiledTemplate . Factory }
* < li > A { @ link CompiledTemplate }
* < li > A DetachableSoyValueProvider subclass for each { @ link LetValueNode } and { @ link
* CallParamValueNode }
* < li > A DetachableContentProvider subclass for each { @ link LetContentNode } and { @ link
* CallParamContentNode }
* < p > Note : This will < em > not < / em > generate classes for other templates , only the template
* configured in the constructor . But it will generate classes that < em > reference < / em > the
* classes that are generated for other templates . It is the callers responsibility to
* ensure that all referenced templates are generated and available in the classloader that
* ultimately loads the returned classes . */
Iterable < ClassData > compile ( ) { } } | List < ClassData > classes = new ArrayList < > ( ) ; // first generate the factory
if ( templateNode . getVisibility ( ) != Visibility . PRIVATE ) { // Don ' t generate factory if the template is private . The factories are only
// useful to instantiate templates for calls from java . Soy - > Soy calls should invoke
// constructors directly .
new TemplateFactoryCompiler ( template , templateNode , innerClasses ) . compile ( ) ; } writer = SoyClassWriter . builder ( template . typeInfo ( ) ) . setAccess ( Opcodes . ACC_PUBLIC + Opcodes . ACC_SUPER + Opcodes . ACC_FINAL ) . implementing ( TEMPLATE_TYPE ) . sourceFileName ( templateNode . getSourceLocation ( ) . getFileName ( ) ) . build ( ) ; generateTemplateMetadata ( ) ; generateKindMethod ( ) ; stateField . defineField ( writer ) ; paramsField . defineField ( writer ) ; ijField . defineField ( writer ) ; for ( FieldRef field : paramFields . values ( ) ) { field . defineField ( writer ) ; } ImmutableMap < TemplateParam , SoyExpression > defaultParamInitializers = generateRenderMethod ( ) ; generateConstructor ( defaultParamInitializers ) ; innerClasses . registerAllInnerClasses ( writer ) ; writer . visitEnd ( ) ; classes . add ( writer . toClassData ( ) ) ; classes . addAll ( innerClasses . getInnerClassData ( ) ) ; writer = null ; return classes ; |
public class AddressUpdater { /** * Add the requested post parameters to the Request .
* @ param request Request to add post params to */
private void addPostParams ( final Request request ) { } } | if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( customerName != null ) { request . addPostParam ( "CustomerName" , customerName ) ; } if ( street != null ) { request . addPostParam ( "Street" , street ) ; } if ( city != null ) { request . addPostParam ( "City" , city ) ; } if ( region != null ) { request . addPostParam ( "Region" , region ) ; } if ( postalCode != null ) { request . addPostParam ( "PostalCode" , postalCode ) ; } if ( emergencyEnabled != null ) { request . addPostParam ( "EmergencyEnabled" , emergencyEnabled . toString ( ) ) ; } if ( autoCorrectAddress != null ) { request . addPostParam ( "AutoCorrectAddress" , autoCorrectAddress . toString ( ) ) ; } |
public class UtilValidate { /** * isUSPhoneAreaCode returns true if string s is a valid U . S . Phone Area Code . Must be 3 digits . */
public static boolean isUSPhoneAreaCode ( String s ) { } } | if ( isEmpty ( s ) ) return defaultEmptyOK ; String normalizedPhone = stripCharsInBag ( s , phoneNumberDelimiters ) ; return ( isInteger ( normalizedPhone ) && normalizedPhone . length ( ) == digitsInUSPhoneAreaCode ) ; |
public class BTCTradeMarketDataService { /** * { @ inheritDoc } */
@ Override public Trades getTrades ( CurrencyPair currencyPair , Object ... args ) throws IOException { } } | final BTCTradeTrade [ ] trades ; if ( args == null || args . length == 0 ) { trades = getBTCTradeTrades ( ) ; } else { trades = getBTCTradeTrades ( toLong ( args [ 0 ] ) ) ; } return BTCTradeAdapters . adaptTrades ( trades , currencyPair ) ; |
public class Utils { /** * Reads properties from a file .
* @ param file a properties file
* @ return a { @ link Properties } instance
* @ throws IOException if reading failed */
public static Properties readPropertiesFile ( File file ) throws IOException { } } | Properties result = new Properties ( ) ; InputStream in = null ; try { in = new FileInputStream ( file ) ; result . load ( in ) ; } finally { closeQuietly ( in ) ; } return result ; |
public class UpdateManager { /** * Finds the { @ link FileDownloader } to use for this repository .
* @ param pluginId the plugin we wish to download
* @ return FileDownloader instance */
protected FileDownloader getFileDownloader ( String pluginId ) { } } | for ( UpdateRepository ur : repositories ) { if ( ur . getPlugin ( pluginId ) != null && ur . getFileDownloader ( ) != null ) { return ur . getFileDownloader ( ) ; } } return new SimpleFileDownloader ( ) ; |
public class XBlockExpressionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case XbasePackage . XBLOCK_EXPRESSION__EXPRESSIONS : getExpressions ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ; |
public class HtmlForm { /** * < p > Set the value of the < code > accept < / code > property . < / p > */
public void setAccept ( java . lang . String accept ) { } } | getStateHelper ( ) . put ( PropertyKeys . accept , accept ) ; handleAttribute ( "accept" , accept ) ; |
public class RowListMetaData { /** * { @ inheritDoc } */
public boolean isSigned ( final int column ) throws SQLException { } } | final int type = getColumnType ( column ) ; if ( type == - 1 ) { return false ; } // end of if
final Boolean s = Defaults . jdbcTypeSigns . get ( type ) ; return ( s == null ) ? false : s ; |
public class ActionSupport { /** * Add action error .
* @ param msgKey
* @ param args */
protected final void addError ( String msgKey , Object ... args ) { } } | getFlash ( ) . addErrorNow ( getTextInternal ( msgKey , args ) ) ; |
public class CommerceWarehouseItemLocalServiceWrapper { /** * Updates the commerce warehouse item in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceWarehouseItem the commerce warehouse item
* @ return the commerce warehouse item that was updated */
@ Override public com . liferay . commerce . model . CommerceWarehouseItem updateCommerceWarehouseItem ( com . liferay . commerce . model . CommerceWarehouseItem commerceWarehouseItem ) { } } | return _commerceWarehouseItemLocalService . updateCommerceWarehouseItem ( commerceWarehouseItem ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.