signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class XmlUtil { /** * 在已有节点上创建子节点 * @ param node 节点 * @ param tagName 标签名 * @ return 子节点 * @ since 4.0.9 */ public static Element appendChild ( Node node , String tagName ) { } }
Document doc = ( node instanceof Document ) ? ( Document ) node : node . getOwnerDocument ( ) ; Element child = doc . createElement ( tagName ) ; node . appendChild ( child ) ; return child ;
public class Version { /** * < pre > * Automatic scaling is based on request rate , response latencies , and other * application metrics . * < / pre > * < code > . google . appengine . v1 . AutomaticScaling automatic _ scaling = 3 ; < / code > */ public com . google . appengine . v1 . AutomaticScalingOrBuilder getAutomaticScalingOrBuilder ( ) { } }
if ( scalingCase_ == 3 ) { return ( com . google . appengine . v1 . AutomaticScaling ) scaling_ ; } return com . google . appengine . v1 . AutomaticScaling . getDefaultInstance ( ) ;
public class CmsSubscriptionManager { /** * Mark the given resource as visited by the user . < p > * @ param cms the current users context * @ param resourcePath the name of the resource to mark as visited * @ param user the user that visited the resource * @ throws CmsException if something goes wrong */ public void markResourceAsVisitedBy ( CmsObject cms , String resourcePath , CmsUser user ) throws CmsException { } }
CmsResource resource = cms . readResource ( resourcePath , CmsResourceFilter . ALL ) ; markResourceAsVisitedBy ( cms , resource , user ) ;
public class ContainerDefinition { /** * A list of DNS search domains that are presented to the container . This parameter maps to < code > DnsSearch < / code > * in the < a href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > * section of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the * < code > - - dns - search < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > . * < note > * This parameter is not supported for Windows containers . * < / note > * @ param dnsSearchDomains * A list of DNS search domains that are presented to the container . This parameter maps to * < code > DnsSearch < / code > in the < a * href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section * of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the * < code > - - dns - search < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker * run < / a > . < / p > < note > * This parameter is not supported for Windows containers . */ public void setDnsSearchDomains ( java . util . Collection < String > dnsSearchDomains ) { } }
if ( dnsSearchDomains == null ) { this . dnsSearchDomains = null ; return ; } this . dnsSearchDomains = new com . amazonaws . internal . SdkInternalList < String > ( dnsSearchDomains ) ;
public class SavedRevision { /** * The contents of this revision of the document . * Any keys in the dictionary that begin with " _ " , such as " _ id " and " _ rev " , contain * CouchbaseLite metadata . * @ return contents of this revision of the document . */ @ Override @ InterfaceAudience . Public public Map < String , Object > getProperties ( ) { } }
Map < String , Object > properties = revisionInternal . getProperties ( ) ; if ( ! checkedProperties ) { if ( properties == null ) { if ( loadProperties ( ) == true ) { properties = revisionInternal . getProperties ( ) ; } } checkedProperties = true ; } return properties != null ? Collections . unmodifiableMap ( properties ) : null ;
public class ExplodeAnimation { /** * Initializes the animation with the attributes of the given explode info object . */ protected void init ( ExplodeInfo info , int x , int y , int width , int height ) { } }
_info = info ; _ox = x ; _oy = y ; _owid = width ; _ohei = height ; _info . rvel = ( float ) ( ( 2.0f * Math . PI ) * _info . rvel ) ; _chunkcount = ( _info . xchunk * _info . ychunk ) ; _cxpos = new int [ _chunkcount ] ; _cypos = new int [ _chunkcount ] ; _sxvel = new float [ _chunkcount ] ; _syvel = new float [ _chunkcount ] ; // determine chunk dimensions _cwid = _owid / _info . xchunk ; _chei = _ohei / _info . ychunk ; _hcwid = _cwid / 2 ; _hchei = _chei / 2 ; // initialize all chunks for ( int ii = 0 ; ii < _chunkcount ; ii ++ ) { // initialize chunk position int xpos = ii % _info . xchunk ; int ypos = ii / _info . xchunk ; _cxpos [ ii ] = _ox + ( xpos * _cwid ) ; _cypos [ ii ] = _oy + ( ypos * _chei ) ; // initialize chunk velocity _sxvel [ ii ] = RandomUtil . getFloat ( _info . xvel ) * ( ( xpos < ( _info . xchunk / 2 ) ) ? - 1.0f : 1.0f ) ; _syvel [ ii ] = - ( RandomUtil . getFloat ( _info . yvel ) ) ; } // initialize the chunk rotation angle _angle = 0.0f ;
public class JavaDocument { /** * Overide bracket matching for other languages */ protected String addWhiteSpace ( int offset ) throws BadLocationException { } }
StringBuilder whiteSpace = new StringBuilder ( ) ; int line = _root . getElementIndex ( offset ) ; int i = _root . getElement ( line ) . getStartOffset ( ) ; while ( true ) { String temp = getText ( i , 1 ) ; if ( temp . equals ( " " ) || temp . equals ( "\t" ) ) { whiteSpace . append ( temp ) ; i ++ ; } else { break ; } } return "\n" + whiteSpace ;
public class JMThread { /** * Gets thread queue . * @ param executorService the executor service * @ return the thread queue */ public static BlockingQueue < Runnable > getThreadQueue ( ExecutorService executorService ) { } }
if ( executorService instanceof ThreadPoolExecutor ) return ( ( ThreadPoolExecutor ) executorService ) . getQueue ( ) ; throw JMExceptionManager . handleExceptionAndReturnRuntimeEx ( log , new IllegalArgumentException ( "Unsupported ExecutorService - Use ThrJMThread.newThreadPool Or newSingleThreadPool To Get ExecutorService !!!" ) , "getThreadQueue" , executorService ) ;
public class DisassociateConnectionFromLagRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DisassociateConnectionFromLagRequest disassociateConnectionFromLagRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( disassociateConnectionFromLagRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disassociateConnectionFromLagRequest . getConnectionId ( ) , CONNECTIONID_BINDING ) ; protocolMarshaller . marshall ( disassociateConnectionFromLagRequest . getLagId ( ) , LAGID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ChaiUtility { /** * Derives a unique entry name for an ldap container . Assumes CN as the naming attribute . * @ param baseName A text name that will be used for the base of the obejct name . Punctuation and spaces will be stripped . * @ param containerDN Directory container in which to check for a unique name * @ param provider ChaiProvider to use for ldap connection * @ return Fully qualified unique object name for the container specified . * @ throws ChaiOperationException If there is an error during the operation * @ throws ChaiUnavailableException If the directory server ( s ) are unavailable */ public static String findUniqueName ( final String baseName , final String containerDN , final ChaiProvider provider ) throws ChaiOperationException , ChaiUnavailableException { } }
char ch ; final StringBuilder cnStripped = new StringBuilder ( ) ; final String effectiveBasename = ( baseName == null ) ? "" : baseName ; // First boil down the root name . Preserve only the alpha - numerics . for ( int i = 0 ; i < effectiveBasename . length ( ) ; i ++ ) { ch = effectiveBasename . charAt ( i ) ; if ( Character . isLetterOrDigit ( ch ) ) { cnStripped . append ( ch ) ; } } if ( cnStripped . length ( ) == 0 ) { // Generate a random seed to runServer with , how about the current date cnStripped . append ( System . currentTimeMillis ( ) ) ; } // Now we have a base name , let ' s runServer testing it . . . String uniqueCN ; StringBuilder filter ; final Random randomNumber = new Random ( ) ; String stringCounter = null ; // Start with a random 3 digit number int counter = randomNumber . nextInt ( ) % 1000 ; while ( true ) { // Initialize the String Buffer and Unique DN . filter = new StringBuilder ( 64 ) ; if ( stringCounter != null ) { uniqueCN = cnStripped . append ( stringCounter ) . toString ( ) ; } else { uniqueCN = cnStripped . toString ( ) ; } filter . append ( "(" ) . append ( ChaiConstant . ATTR_LDAP_COMMON_NAME ) . append ( "=" ) . append ( uniqueCN ) . append ( ")" ) ; final Map < String , Map < String , String > > results = provider . search ( containerDN , filter . toString ( ) , null , SearchScope . ONE ) ; if ( results . size ( ) == 0 ) { // No object found ! break ; } else { // Increment it every time stringCounter = Integer . toString ( counter ++ ) ; } } return uniqueCN ;
public class MonitorRegistry { /** * Returns all service monitors . */ public List < ServiceMonitor > getServiceMonitors ( ) { } }
List < ServiceMonitor > serviceMonitors = new ArrayList < > ( ) ; serviceMonitors . addAll ( getDynamicServices ( ServiceMonitor . class ) ) ; return serviceMonitors ;
public class PreauthorizationService { /** * Returns and refresh data of a specific { @ link Preauthorization } . * @ param preauthorization * A { @ link Preauthorization } with Id . * @ return Refreshed instance of the given { @ link Preauthorization } . */ public Preauthorization get ( final Preauthorization preauthorization ) { } }
return RestfulUtils . show ( PreauthorizationService . PATH , preauthorization , Preauthorization . class , super . httpClient ) ;
public class StreamSegmentService { /** * region StreamSegmentStore Implementation */ @ Override public CompletableFuture < Void > append ( String streamSegmentName , byte [ ] data , Collection < AttributeUpdate > attributeUpdates , Duration timeout ) { } }
return invoke ( streamSegmentName , container -> container . append ( streamSegmentName , data , attributeUpdates , timeout ) , "append" , streamSegmentName , data . length , attributeUpdates ) ;
public class TopologyBuilder { /** * Define a new bolt in this topology with the specified amount of parallelism . * @ param id the id of this component . This id is referenced by other components that want to consume this bolt ' s outputs . * @ param bolt the bolt * @ param parallelismHint the number of tasks that should be assigned to execute this bolt . Each task will run on a thread in a process somewhere around the cluster . * @ return use the returned object to declare the inputs to this component */ public BoltDeclarer setBolt ( String id , IRichBolt bolt , Number parallelismHint ) { } }
validateComponentName ( id ) ; BoltDeclarer b = new BoltDeclarer ( id , bolt , parallelismHint ) ; bolts . put ( id , b ) ; return b ;
public class AkubraLowlevelStorage { /** * Overwrites the content of the given blob in a way that guarantees the * original content is not destroyed until the replacement is successfully * put in its place . */ private static void safeOverwrite ( Blob origBlob , InputStream content ) { } }
BlobStoreConnection connection = origBlob . getConnection ( ) ; String origId = origBlob . getId ( ) . toString ( ) ; // write new content to origId / new Blob newBlob = null ; try { newBlob = connection . getBlob ( new URI ( origId + "/new" ) , null ) ; copy ( content , newBlob . openOutputStream ( - 1 , false ) ) ; } catch ( Throwable th ) { // any error or exception here is an unrecoverable fault throw new FaultException ( th ) ; } // At this point , we have origId ( with old content ) and origId / new // rename origId to origId / old Blob oldBlob = null ; try { oldBlob = rename ( origBlob , origId + "/old" ) ; } finally { if ( oldBlob == null ) { // rename failed ; attempt recovery before throwing the fault try { delete ( newBlob ) ; } catch ( Throwable th ) { logger . error ( "Failed to delete " + newBlob . getId ( ) + " while" + " recovering from rename failure during safe" + " overwrite" , th ) ; } } } // At this point , we have origId / old and origId / new // rename origId / new to origId boolean successful = false ; try { rename ( newBlob , origId ) ; successful = true ; } finally { if ( ! successful ) { // rename failed ; attempt recovery before throwing the fault try { rename ( oldBlob , origId ) ; } catch ( Throwable th ) { logger . error ( "Failed to rename " + oldBlob . getId ( ) + " to " + origId + " while recovering from rename" + " failure during safe overwrite" , th ) ; } try { newBlob . delete ( ) ; } catch ( Throwable th ) { logger . error ( "Failed to delete " + newBlob . getId ( ) + " while recovering from rename" + " failure during safe overwrite" , th ) ; } } } // At this point , we have origId ( with new content ) and origId / old // remove origId / old ; we don ' t need it anymore try { delete ( oldBlob ) ; } catch ( Throwable th ) { logger . error ( "Failed to delete " + oldBlob . getId ( ) + " while cleaning up after committed" + " safe overwrite" , th ) ; }
public class Hasher { /** * Hashes byte array . * @ return Hash of the given byte array . */ protected String hashByteArray ( byte [ ] data ) { } }
if ( mCRC32 != null ) { return hashCRC32 ( data ) ; } else if ( mHashAlgorithm != null ) { return messageDigest ( data ) ; } else { // Default . return hashCRC32 ( data ) ; }
public class PrivacyMetric { /** * < code > . google . privacy . dlp . v2 . PrivacyMetric . KAnonymityConfig k _ anonymity _ config = 3 ; < / code > */ public com . google . privacy . dlp . v2 . PrivacyMetric . KAnonymityConfig getKAnonymityConfig ( ) { } }
if ( typeCase_ == 3 ) { return ( com . google . privacy . dlp . v2 . PrivacyMetric . KAnonymityConfig ) type_ ; } return com . google . privacy . dlp . v2 . PrivacyMetric . KAnonymityConfig . getDefaultInstance ( ) ;
public class HelperBase { /** * Copy values from delegate RepositoryOperation to this ServiceOperation */ private void addDefaultValues ( ServiceOperation operation ) { } }
if ( operation . getDelegate ( ) != null ) { copyFromDelegate ( operation , operation . getDelegate ( ) , true ) ; } else if ( operation . getServiceDelegate ( ) != null ) { // make sure that the service delegate has been populated first addDefaultValues ( operation . getServiceDelegate ( ) ) ; // recursive call // ( circular dependencies not allowed ) copyFromDelegate ( operation , operation . getServiceDelegate ( ) , true ) ; }
public class MeanVariance { /** * Create and initialize a new array of MeanVariance * @ param dimensionality Dimensionality * @ return New and initialized Array */ public static MeanVariance [ ] newArray ( int dimensionality ) { } }
MeanVariance [ ] arr = new MeanVariance [ dimensionality ] ; for ( int i = 0 ; i < dimensionality ; i ++ ) { arr [ i ] = new MeanVariance ( ) ; } return arr ;
public class AggregateMojo { /** * Scans the dependencies of the projects in aggregate . * @ param engine the engine used to perform the scanning * @ return a collection of exceptions * @ throws MojoExecutionException thrown if a fatal exception occurs */ @ Override protected ExceptionCollection scanDependencies ( final Engine engine ) throws MojoExecutionException { } }
ExceptionCollection exCol = scanArtifacts ( getProject ( ) , engine , true ) ; for ( MavenProject childProject : getDescendants ( this . getProject ( ) ) ) { final ExceptionCollection ex = scanArtifacts ( childProject , engine , true ) ; if ( ex != null ) { if ( exCol == null ) { exCol = ex ; } exCol . getExceptions ( ) . addAll ( ex . getExceptions ( ) ) ; if ( ex . isFatal ( ) ) { exCol . setFatal ( true ) ; final String msg = String . format ( "Fatal exception(s) analyzing %s" , childProject . getName ( ) ) ; if ( this . isFailOnError ( ) ) { throw new MojoExecutionException ( msg , exCol ) ; } getLog ( ) . error ( msg ) ; if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( exCol ) ; } } } } return exCol ;
public class ApiOvhIp { /** * Your OVH IPs * REST : GET / ip * @ param description [ required ] Filter the value of description property ( like ) * @ param ip [ required ] Filter the value of ip property ( contains or equals ) * @ param routedTo _ serviceName [ required ] Filter the value of routedTo . serviceName property ( like ) * @ param type [ required ] Filter the value of type property ( = ) */ public ArrayList < String > GET ( String description , String ip , String routedTo_serviceName , OvhIpTypeEnum type ) throws IOException { } }
String qPath = "/ip" ; StringBuilder sb = path ( qPath ) ; query ( sb , "description" , description ) ; query ( sb , "ip" , ip ) ; query ( sb , "routedTo.serviceName" , routedTo_serviceName ) ; query ( sb , "type" , type ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ;
public class TldVarianceFilter { /** * Computes the variance inside the specified rectangle . * @ return variance */ protected double computeVarianceSafe ( int x0 , int y0 , int x1 , int y1 ) { } }
// can use unsafe operations here since x0 > 0 and y0 > 0 double square = GIntegralImageOps . block_zero ( integralSq , x0 - 1 , y0 - 1 , x1 - 1 , y1 - 1 ) ; double area = ( x1 - x0 ) * ( y1 - y0 ) ; double mean = GIntegralImageOps . block_zero ( integral , x0 - 1 , y0 - 1 , x1 - 1 , y1 - 1 ) / area ; return square / area - mean * mean ;
public class MultiPartFaxJob2HTTPRequestConverter { /** * Creates a HTTP request with the common data . * @ param faxClientSpi * The HTTP fax client SPI * @ param faxActionType * The fax action type * @ return The HTTP request to send */ protected HTTPRequest createCommonHTTPRequest ( HTTPFaxClientSpi faxClientSpi , FaxActionType faxActionType ) { } }
// setup common request data HTTPRequest httpRequest = new HTTPRequest ( ) ; String resource = faxClientSpi . getHTTPResource ( faxActionType ) ; httpRequest . setResource ( resource ) ; String urlParameters = faxClientSpi . getHTTPURLParameters ( ) ; httpRequest . setParametersText ( urlParameters ) ; return httpRequest ;
public class PkRSS { /** * Saves / Deletes an { @ link Article } object to the favorites database . * @ param article Article object to save . * @ param favorite Whether to save or delete . { @ code true } to save ; { @ code false } to delete . * @ return { @ code true } if successful , { @ code false } if otherwise . */ public boolean saveFavorite ( Article article , boolean favorite ) { } }
long time = System . currentTimeMillis ( ) ; log ( "Adding article " + article . getId ( ) + " to favorites..." ) ; try { if ( favorite ) favoriteDatabase . add ( article ) ; else favoriteDatabase . delete ( article ) ; } catch ( Exception e ) { log ( "Error " + ( favorite ? "saving article to" : "deleting article from" ) + " favorites database." , Log . ERROR ) ; } log ( "Saving article " + article . getId ( ) + " to favorites took " + ( System . currentTimeMillis ( ) - time ) + "ms" ) ; return true ;
public class RandomStringUtils { /** * < p > Creates a random string based on a variety of options , using * supplied source of randomness . < / p > * < p > If start and end are both { @ code 0 } , start and end are set * to { @ code ' ' } and { @ code ' z ' } , the ASCII printable * characters , will be used , unless letters and numbers are both * { @ code false } , in which case , start and end are set to * { @ code 0 } and { @ link Character # MAX _ CODE _ POINT } . * < p > If set is not { @ code null } , characters between start and * end are chosen . < / p > * < p > This method accepts a user - supplied { @ link Random } * instance to use as a source of randomness . By seeding a single * { @ link Random } instance with a fixed seed and using it for each call , * the same random sequence of strings can be generated repeatedly * and predictably . < / p > * @ param count the length of random string to create * @ param start the position in set of chars to start at ( inclusive ) * @ param end the position in set of chars to end before ( exclusive ) * @ param letters only allow letters ? * @ param numbers only allow numbers ? * @ param chars the set of chars to choose randoms from , must not be empty . * If { @ code null } , then it will use the set of all chars . * @ param random a source of randomness . * @ return the random string * @ throws ArrayIndexOutOfBoundsException if there are not * { @ code ( end - start ) + 1 } characters in the set array . * @ throws IllegalArgumentException if { @ code count } & lt ; 0 or the provided chars array is empty . * @ since 2.0 */ public static String random ( int count , int start , int end , final boolean letters , final boolean numbers , final char [ ] chars , final Random random ) { } }
if ( count == 0 ) { return StringUtils . EMPTY ; } else if ( count < 0 ) { throw new IllegalArgumentException ( "Requested random string length " + count + " is less than 0." ) ; } if ( chars != null && chars . length == 0 ) { throw new IllegalArgumentException ( "The chars array must not be empty" ) ; } if ( start == 0 && end == 0 ) { if ( chars != null ) { end = chars . length ; } else { if ( ! letters && ! numbers ) { end = Character . MAX_CODE_POINT ; } else { end = 'z' + 1 ; start = ' ' ; } } } else { if ( end <= start ) { throw new IllegalArgumentException ( "Parameter end (" + end + ") must be greater than start (" + start + ")" ) ; } } final int zero_digit_ascii = 48 ; final int first_letter_ascii = 65 ; if ( chars == null && ( numbers && end <= zero_digit_ascii || letters && end <= first_letter_ascii ) ) { throw new IllegalArgumentException ( "Parameter end (" + end + ") must be greater then (" + zero_digit_ascii + ") for generating digits " + "or greater then (" + first_letter_ascii + ") for generating letters." ) ; } final StringBuilder builder = new StringBuilder ( count ) ; final int gap = end - start ; while ( count -- != 0 ) { int codePoint ; if ( chars == null ) { codePoint = random . nextInt ( gap ) + start ; switch ( Character . getType ( codePoint ) ) { case Character . UNASSIGNED : case Character . PRIVATE_USE : case Character . SURROGATE : count ++ ; continue ; } } else { codePoint = chars [ random . nextInt ( gap ) + start ] ; } final int numberOfChars = Character . charCount ( codePoint ) ; if ( count == 0 && numberOfChars > 1 ) { count ++ ; continue ; } if ( letters && Character . isLetter ( codePoint ) || numbers && Character . isDigit ( codePoint ) || ! letters && ! numbers ) { builder . appendCodePoint ( codePoint ) ; if ( numberOfChars == 2 ) { count -- ; } } else { count ++ ; } } return builder . toString ( ) ;
public class ProxyServlet { /** * Called by { @ code init } to set the remote port . Must be a number . * Default is { @ code 80 } . * @ param pRemotePort */ public void setRemotePort ( String pRemotePort ) { } }
try { remotePort = Integer . parseInt ( pRemotePort ) ; } catch ( NumberFormatException e ) { log ( "RemotePort must be a number!" , e ) ; }
public class MyExceptionHandler { /** * TypeMismatchException中获取到参数错误类型 * @ param e */ private ModelAndView getParamErrors ( TypeMismatchException e ) { } }
Throwable t = e . getCause ( ) ; if ( t instanceof ConversionFailedException ) { ConversionFailedException x = ( ConversionFailedException ) t ; TypeDescriptor type = x . getTargetType ( ) ; Annotation [ ] annotations = type != null ? type . getAnnotations ( ) : new Annotation [ 0 ] ; Map < String , String > errors = new HashMap < String , String > ( ) ; for ( Annotation a : annotations ) { if ( a instanceof RequestParam ) { errors . put ( ( ( RequestParam ) a ) . value ( ) , "parameter type error!" ) ; } } if ( errors . size ( ) > 0 ) { return paramError ( errors , ErrorCode . TYPE_MIS_MATCH ) ; } } JsonObjectBase jsonObject = JsonObjectUtils . buildGlobalError ( "parameter type error!" , ErrorCode . TYPE_MIS_MATCH ) ; return JsonObjectUtils . JsonObjectError2ModelView ( ( JsonObjectError ) jsonObject ) ;
public class CommerceRegionUtil { /** * Returns all the commerce regions where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ return the matching commerce regions */ public static List < CommerceRegion > findByUuid_C ( String uuid , long companyId ) { } }
return getPersistence ( ) . findByUuid_C ( uuid , companyId ) ;
public class FnBigDecimal { /** * It rounds the input number with the given scale and { @ link RoundingMode } . The input * number will be converted into a { @ link BigDecimal } with the given scale and * roundingMode * @ param scale the scale * @ param roundingMode the { @ link RoundingMode } * @ return the { @ link BigDecimal } representation of the input rounded based on * the given parameters */ public static final Function < BigDecimal , BigDecimal > roundBigDecimal ( final int scale , final RoundingMode roundingMode ) { } }
return FnNumber . roundBigDecimal ( scale , roundingMode ) ;
public class LongPersistedValueData { /** * { @ inheritDoc } */ public void writeExternal ( ObjectOutput out ) throws IOException { } }
out . writeInt ( orderNumber ) ; out . writeLong ( value ) ;
public class DummyLogOutput { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . LogOutput # writeNext ( com . ibm . ws . objectManager . LogRecord , long , boolean ) */ protected long writeNext ( LogRecord logRecord , long reservedDelta , boolean checkSpace , boolean flush ) throws ObjectManagerException { } }
final String methodName = "writeNext" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , methodName , new Object [ ] { logRecord , new Long ( reservedDelta ) , new Boolean ( checkSpace ) , new Boolean ( flush ) } ) ; long usableLogSequenceNumber ; // Take a lock on the log and increment the log sequence number . synchronized ( logSequenceNumberLock ) { logSequenceNumber ++ ; // Set the current Sequence number . usableLogSequenceNumber = logSequenceNumber ; } // synchronized ( LogSequeunceNumberLock ) . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , methodName , new Object [ ] { new Long ( usableLogSequenceNumber ) } ) ; return usableLogSequenceNumber ;
public class HistoricDate { /** * / * [ deutsch ] * < p > Konstruiert ein neues Tupel aus den angegebenen historischen Zeitkomponenten . < / p > * < p > Hinweis : Eine detaillierte Validierung wird nicht gemacht . Das ist stattdessen Aufgabe * der { @ code ChronoHistory } . < / p > * @ param era historic era * @ param yearOfEra positive year of era which will be interpreted according to given year definition * @ param month historic month ( 1-12) * @ param dom historic day of month ( 1-31) * @ param yearDefinition defines a strategy how to interprete year of era * @ param newYearStrategy strategy how to determine New Year * @ return new historic date ( not yet validated ) * @ throws IllegalArgumentException if any argument is out of required maximum range or inconsistent * @ since 3.18/4.14 * @ see ChronoHistory # isValid ( HistoricDate ) */ public static HistoricDate of ( HistoricEra era , int yearOfEra , int month , int dom , YearDefinition yearDefinition , NewYearStrategy newYearStrategy ) { } }
if ( era == null ) { throw new NullPointerException ( "Missing historic era." ) ; } else if ( dom < 1 || dom > 31 ) { throw new IllegalArgumentException ( "Day of month out of range: " + toString ( era , yearOfEra , month , dom ) ) ; } else if ( month < 1 || month > 12 ) { throw new IllegalArgumentException ( "Month out of range: " + toString ( era , yearOfEra , month , dom ) ) ; } else if ( era == HistoricEra . BYZANTINE ) { if ( ( yearOfEra < 0 ) || ( ( yearOfEra == 0 ) && ( month < 9 ) ) ) { throw new IllegalArgumentException ( "Before creation of the world: " + toString ( era , yearOfEra , month , dom ) ) ; } } else if ( yearOfEra < 1 ) { throw new IllegalArgumentException ( "Year of era must be positive: " + toString ( era , yearOfEra , month , dom ) ) ; } if ( ! yearDefinition . equals ( YearDefinition . DUAL_DATING ) ) { // here we interprete yearOfEra as yearOfDisplay and have to translate it to standard calendar year NewYearRule rule = newYearStrategy . rule ( era , yearOfEra ) ; yearOfEra = rule . standardYear ( ( yearDefinition == YearDefinition . AFTER_NEW_YEAR ) , newYearStrategy , era , yearOfEra , month , dom ) ; } return new HistoricDate ( era , yearOfEra , month , dom ) ;
public class FilesConfigurationSource { /** * Get configuration set for a given { @ code environment } from this source in a form of { @ link Properties } . * { @ link Environment } name is prepended to all file paths from { @ link ConfigFilesProvider } * to form an absolute configuration file path . If environment name is empty paths are treated as relative * to the user ' s home directory location . * @ param environment environment to use * @ return configuration set for { @ code environment } * @ throws MissingEnvironmentException when requested environment couldn ' t be found * @ throws IllegalStateException when unable to fetch configuration */ @ Override public Properties getConfiguration ( Environment environment ) { } }
Properties properties = new Properties ( ) ; Path rootPath ; if ( environment . getName ( ) . trim ( ) . isEmpty ( ) ) { rootPath = Paths . get ( System . getProperty ( "user.home" ) ) ; } else { rootPath = Paths . get ( environment . getName ( ) ) ; } if ( ! rootPath . toFile ( ) . exists ( ) ) { throw new MissingEnvironmentException ( "Directory doesn't exist: " + rootPath ) ; } List < Path > paths = new ArrayList < > ( ) ; for ( Path path : configFilesProvider . getConfigFiles ( ) ) { paths . add ( rootPath . resolve ( path ) ) ; } for ( Path path : paths ) { try ( InputStream input = new FileInputStream ( path . toFile ( ) ) ) { PropertiesProvider provider = propertiesProviderSelector . getProvider ( path . getFileName ( ) . toString ( ) ) ; properties . putAll ( provider . getProperties ( input ) ) ; } catch ( IOException e ) { throw new IllegalStateException ( "Unable to load properties from file: " + path , e ) ; } } return properties ;
public class IntStream { /** * Returns a sequential ordered { @ code IntStream } from { @ code startInclusive } * ( inclusive ) to { @ code endInclusive } ( inclusive ) by an incremental step of * { @ code 1 } . * @ param startInclusive the ( inclusive ) initial value * @ param endInclusive the inclusive upper bound * @ return a sequential { @ code IntStream } for the range of { @ code int } * elements */ @ NotNull public static IntStream rangeClosed ( final int startInclusive , final int endInclusive ) { } }
if ( startInclusive > endInclusive ) { return empty ( ) ; } else if ( startInclusive == endInclusive ) { return of ( startInclusive ) ; } else { return new IntStream ( new IntRangeClosed ( startInclusive , endInclusive ) ) ; }
public class DataTableRenderer { /** * This methods generates the HTML code of the current b : dataTable . * < code > encodeBegin < / code > generates the start of the component . After the , the * JSF framework calls < code > encodeChildren ( ) < / code > to generate the HTML code * between the beginning and the end of the component . For instance , in the case * of a panel component the content of the panel is generated by * < code > encodeChildren ( ) < / code > . After that , < code > encodeEnd ( ) < / code > is called * to generate the rest of the HTML code . * @ param context the FacesContext . * @ param component the current b : dataTable . * @ throws IOException thrown if something goes wrong when writing the HTML * code . */ @ Override public void encodeEnd ( FacesContext context , UIComponent component ) throws IOException { } }
if ( ! component . isRendered ( ) ) { return ; } DataTable dataTable = ( DataTable ) component ; Map < Integer , String > columnSortOrder = dataTable . getColumnSortOrderMap ( ) ; int pageLength = dataTable . getPageLength ( ) ; String orderString = "[]" ; if ( columnSortOrder != null ) { StringBuilder sb = new StringBuilder ( ) ; int i = 0 ; for ( Map . Entry < Integer , String > entry : columnSortOrder . entrySet ( ) ) { String separator = ( i > 0 ) ? "," : "" ; sb . append ( separator ) . append ( "[" ) . append ( entry . getKey ( ) ) . append ( "," ) . append ( "'" ) . append ( entry . getValue ( ) ) . append ( "'" ) . append ( "]" ) ; i ++ ; } orderString = sb . toString ( ) ; } ResponseWriter rw = context . getResponseWriter ( ) ; String clientIdRaw = dataTable . getClientId ( ) ; String clientId = clientIdRaw . replace ( ":" , "" ) ; String widgetVar = dataTable . getWidgetVar ( ) ; if ( null == widgetVar ) { widgetVar = BsfUtils . widgetVarName ( clientIdRaw ) ; } String lang = determineLanguage ( context , dataTable ) ; rw . endElement ( "table" ) ; endDisabledFieldset ( dataTable , rw ) ; String responsiveStyle = Responsive . getResponsiveStyleClass ( dataTable , false ) ; if ( null != responsiveStyle && responsiveStyle . trim ( ) . length ( ) > 0 ) { rw . endElement ( "div" ) ; } if ( dataTable . isScrollHorizontally ( ) ) { rw . endElement ( "div" ) ; } Tooltip . activateTooltips ( context , dataTable ) ; rw . startElement ( "script" , component ) ; // # Start enclosure rw . writeText ( "$(document).ready(function() {" , null ) ; // # Enclosure - scoped variable initialization String options = "" ; options = addOptions ( "fixedHeader: " + dataTable . isFixedHeader ( ) , options ) ; options = addOptions ( "responsive: " + dataTable . isResponsive ( ) , options ) ; options = addOptions ( "paging: " + dataTable . isPaginated ( ) , options ) ; if ( ! dataTable . isInfo ( ) ) { options = addOptions ( "info: false" , options ) ; } options = addOptions ( "pageLength: " + pageLength , options ) ; options = addOptions ( "lengthMenu: " + getPageLengthMenu ( dataTable ) , options ) ; options = addOptions ( "searching: " + dataTable . isSearching ( ) , options ) ; options = addOptions ( "order: " + orderString , options ) ; options = addOptions ( "stateSave: " + dataTable . isSaveState ( ) , options ) ; options = addOptions ( "mark: true" , options ) ; if ( dataTable . isSelect ( ) && dataTable . getSelectionMode2 ( ) == null ) { String json = "" ; String items = dataTable . getSelectedItems ( ) ; if ( "column" . equals ( items ) || "columns" . equals ( items ) ) { json += "items:'column'," ; } else if ( "cell" . equals ( items ) || "cells" . equals ( items ) ) { json += "items:'cell'," ; } if ( "single" . equalsIgnoreCase ( dataTable . getSelectionMode ( ) ) ) { json += "style:'single'," ; } else { json += "style:'os'," ; } if ( ! dataTable . isSelectionInfo ( ) ) { json += "info:false," ; } if ( dataTable . isDeselectOnBackdropClick ( ) ) { json += "blurable:true," ; } if ( json . length ( ) > 1 ) { json = "select:{" + json . substring ( 0 , json . length ( ) - 1 ) + "}" ; } else { json = "select:true" ; } options = addOptions ( json , options ) ; } options = addOptions ( generateScrollOptions ( dataTable ) , options ) ; String customOptions = dataTable . getCustomOptions ( ) ; if ( BsfUtils . isStringValued ( lang ) ) { boolean languageAdded = false ; if ( customOptions != null && customOptions . contains ( "language" ) ) { int start = customOptions . indexOf ( "language" + "language" . length ( ) ) ; start = customOptions . indexOf ( "{" , start ) + 1 ; if ( start > 0 ) { customOptions = customOptions . substring ( 0 , start ) + "url: '" + lang + "'," + customOptions . substring ( start ) ; languageAdded = true ; } } if ( ! languageAdded ) { options = addOptions ( " language: { url: '" + lang + "' } " , options ) ; } } options = addOptions ( generateColumnInfos ( dataTable . getColumnInfo ( ) ) , options ) ; options = addOptions ( customOptions , options ) ; options = addOptions ( generateColumnDefs ( dataTable ) , options ) ; options = addOptions ( getButtons ( dataTable ) , options ) ; String selectCommand = "" ; Object selectedRow = dataTable . getSelectedRow ( ) ; if ( null != selectedRow ) { String selector = "'.bf-selected-row'" ; if ( selectedRow instanceof String ) { try { Integer . parseInt ( ( String ) selectedRow ) ; selector = ( String ) selectedRow ; } catch ( NumberFormatException itIsAString ) { selector = "'" + selectedRow + "'" ; } } else if ( selectedRow instanceof Number ) { selector = selectedRow . toString ( ) ; } selectCommand = widgetVar + ".DataTable().rows(" + selector + ").select(); " ; } Object selectedColumn = dataTable . getSelectedColumn ( ) ; if ( null != selectedColumn ) { String selector = "'.bf-selected-column'" ; if ( selectedColumn instanceof String ) { try { Integer . parseInt ( ( String ) selectedColumn ) ; selector = ( String ) selectedColumn ; } catch ( NumberFormatException itIsAString ) { selector = "'" + selectedColumn + "'" ; } } else if ( selectedColumn instanceof Number ) { selector = selectedColumn . toString ( ) ; } selectCommand += widgetVar + ".DataTable().columns(" + selector + ").select(); " ; } if ( selectCommand . length ( ) > 0 ) { options = addOptions ( "'initComplete': function( settings, json ) { " + selectCommand + "}" , options ) ; } if ( dataTable . getRowGroup ( ) != null ) { String rowGroup = dataTable . getRowGroup ( ) ; try { Integer . parseInt ( rowGroup ) ; options = addOptions ( "orderFixed: [" + rowGroup + ", 'asc']" , options ) ; rowGroup = "rowGroup:{dataSrc:" + rowGroup + "}" ; } catch ( NumberFormatException itsJson ) { // consider it a Json object } options = addOptions ( rowGroup , options ) ; } rw . writeText ( widgetVar + " = $('." + clientId + "Table" + "');" + // # Get instance of wrapper , and replace it with the unwrapped table . "var wrapper = $('#" + clientIdRaw . replace ( ":" , "\\\\:" ) + "_wrapper');" + "wrapper.replaceWith(" + widgetVar + ");" + "var table = " + widgetVar + ".DataTable({" + options + "});" , null ) ; if ( dataTable . isMultiColumnSearch ( ) ) { // # Footer stuff : // https : / / datatables . net / examples / api / multi _ filter . html // # Convert footer column text to input textfields String filter = "<div class=\"form-group has-feedback\">" ; filter += "<input class=\"form-control input-sm datatable-filter-field\" type=\"text\" placeholder=\"' + title + '\" />" ; filter += "<i class=\"fa fa-search form-control-feedback\"></i>" ; filter += "</div>" ; rw . writeText ( widgetVar + ".find('.bf-multisearch').each(function(){" + "var title=$(this).text();" + "$(this).html('" + filter + "');" + "});" , null ) ; // # Add event listeners for each multisearch input rw . writeText ( "var inputs=$(" + widgetVar + ".find('.bf-multisearch input'));" , null ) ; rw . writeText ( "table.columns().every( function(col) {" + "var that=this;if(col<inputs.length){" + "inputs[col].value=table.columns(col).search()[0];" + "$(inputs[col]).on('keyup change', function(){if(that.search()!==this.value){" + "that.search(this.value).draw('page');}});}" , null ) ; rw . writeText ( "});" , null ) ; int col = 0 ; for ( UIComponent column : dataTable . getChildren ( ) ) { if ( ! column . isRendered ( ) ) { continue ; } String searchValue = null ; if ( ( column instanceof DataTableColumn ) ) { searchValue = ( ( DataTableColumn ) column ) . getSearchValue ( ) ; if ( ! ( ( DataTableColumn ) column ) . isSearchable ( ) ) { continue ; } } else { Object sv = column . getAttributes ( ) . get ( "searchValue" ) ; if ( sv != null && ( ! "" . equals ( sv ) ) ) { searchValue = sv . toString ( ) ; } } if ( null != searchValue && searchValue . length ( ) > 0 ) { rw . writeText ( "inputs[" + col + "].value='" + searchValue + "';" , null ) ; rw . writeText ( "table.columns(" + col + ").search('" + searchValue + "').draw('page');" , null ) ; } col ++ ; } } // # End enclosure rw . writeText ( "} );" , null ) ; rw . endElement ( "script" ) ;
public class GraniteUi { /** * From the list of resource types get the first one that exists . * @ param resourceResolver Resource resolver * @ param resourceTypes ResourceTypes * @ return Existing resource type */ public static @ Nullable String getExistingResourceType ( @ NotNull ResourceResolver resourceResolver , @ NotNull String @ NotNull . . . resourceTypes ) { } }
for ( String path : resourceTypes ) { if ( resourceResolver . getResource ( path ) != null ) { return path ; } } return null ;
public class Scheduler { /** * 1 seconds = 1/60 minutes * @ param start between time * @ param end finish time * @ return duration in minutes */ public static double durationMinutes ( LocalDateTime start , LocalDateTime end ) { } }
return Duration . between ( start , end ) . toMinutes ( ) ;
public class WebFacesConfigDescriptorImpl { /** * If not already created , a new < code > factory < / code > element will be created and returned . * Otherwise , the first existing < code > factory < / code > element will be returned . * @ return the instance defined for the element < code > factory < / code > */ public FacesConfigFactoryType < WebFacesConfigDescriptor > getOrCreateFactory ( ) { } }
List < Node > nodeList = model . get ( "factory" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigFactoryTypeImpl < WebFacesConfigDescriptor > ( this , "factory" , model , nodeList . get ( 0 ) ) ; } return createFactory ( ) ;
public class DecoratedUserContext { /** * Decorates a newly - created UserContext ( as would be returned by * getUserContext ( ) ) , invoking the decorate ( ) function of the given * AuthenticationProvider to apply an additional layer of decoration . If the * AuthenticationProvider originated the given UserContext , this function * has no effect . * @ param authProvider * The AuthenticationProvider which should be used to decorate the * given UserContext . * @ param userContext * The UserContext to decorate . * @ param authenticatedUser * The AuthenticatedUser identifying the user associated with the given * UserContext . * @ param credentials * The credentials associated with the request which produced the given * UserContext . * @ return * A UserContext instance which has been decorated ( wrapped ) by the * given AuthenticationProvider , or the original UserContext if the * given AuthenticationProvider originated the UserContext . * @ throws GuacamoleException * If the given AuthenticationProvider fails while decorating the * UserContext . */ private static UserContext decorate ( AuthenticationProvider authProvider , UserContext userContext , AuthenticatedUser authenticatedUser , Credentials credentials ) throws GuacamoleException { } }
// Skip the AuthenticationProvider which produced the UserContext // being decorated if ( authProvider != userContext . getAuthenticationProvider ( ) ) { // Apply layer of wrapping around UserContext UserContext decorated = authProvider . decorate ( userContext , authenticatedUser , credentials ) ; // Do not allow misbehaving extensions to wipe out the // UserContext entirely if ( decorated != null ) return decorated ; } return userContext ;
public class Math { /** * Determines if the polygon contains the specified coordinates . * @ param x the specified x coordinate . * @ param y the specified y coordinate . * @ return true if the Polygon contains the specified coordinates ; false otherwise . */ public static boolean contains ( double [ ] [ ] polygon , double x , double y ) { } }
if ( polygon . length <= 2 ) { return false ; } int hits = 0 ; int n = polygon . length ; double lastx = polygon [ n - 1 ] [ 0 ] ; double lasty = polygon [ n - 1 ] [ 1 ] ; double curx , cury ; // Walk the edges of the polygon for ( int i = 0 ; i < n ; lastx = curx , lasty = cury , i ++ ) { curx = polygon [ i ] [ 0 ] ; cury = polygon [ i ] [ 1 ] ; if ( cury == lasty ) { continue ; } double leftx ; if ( curx < lastx ) { if ( x >= lastx ) { continue ; } leftx = curx ; } else { if ( x >= curx ) { continue ; } leftx = lastx ; } double test1 , test2 ; if ( cury < lasty ) { if ( y < cury || y >= lasty ) { continue ; } if ( x < leftx ) { hits ++ ; continue ; } test1 = x - curx ; test2 = y - cury ; } else { if ( y < lasty || y >= cury ) { continue ; } if ( x < leftx ) { hits ++ ; continue ; } test1 = x - lastx ; test2 = y - lasty ; } if ( test1 < ( test2 / ( lasty - cury ) * ( lastx - curx ) ) ) { hits ++ ; } } return ( ( hits & 1 ) != 0 ) ;
public class BatchPutScheduledUpdateGroupActionRequest { /** * One or more scheduled actions . The maximum number allowed is 50. * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setScheduledUpdateGroupActions ( java . util . Collection ) } or * { @ link # withScheduledUpdateGroupActions ( java . util . Collection ) } if you want to override the existing values . * @ param scheduledUpdateGroupActions * One or more scheduled actions . The maximum number allowed is 50. * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchPutScheduledUpdateGroupActionRequest withScheduledUpdateGroupActions ( ScheduledUpdateGroupActionRequest ... scheduledUpdateGroupActions ) { } }
if ( this . scheduledUpdateGroupActions == null ) { setScheduledUpdateGroupActions ( new com . amazonaws . internal . SdkInternalList < ScheduledUpdateGroupActionRequest > ( scheduledUpdateGroupActions . length ) ) ; } for ( ScheduledUpdateGroupActionRequest ele : scheduledUpdateGroupActions ) { this . scheduledUpdateGroupActions . add ( ele ) ; } return this ;
public class RoomInfoImpl { /** * / * ( non - Javadoc ) * @ see com . tvd12 . ezyfox . core . command . UpdateRoomInfo # removeAllVariables ( ) */ @ Override public void removeAllVariables ( ) { } }
List < RoomVariable > vars = room . getVariables ( ) ; for ( RoomVariable var : vars ) var . setNull ( ) ; api . setRoomVariables ( null , room , vars , true , true , false ) ;
public class Ui { /** * Get color array values form array resource * @ param resources Resources * @ param resId Resources id * @ return color array */ public static int [ ] getColorsFromArrayRes ( Resources resources , int resId ) { } }
try { @ SuppressLint ( "Recycle" ) TypedArray array = resources . obtainTypedArray ( resId ) ; if ( array . length ( ) > 0 ) { final int len = array . length ( ) ; final int [ ] colors = new int [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { colors [ i ] = array . getColor ( i , 0 ) ; } return colors ; } } catch ( Resources . NotFoundException ignored ) { } return null ;
public class NextGenRTLogger { public void persistToFile ( ) throws IOException { } }
if ( logFile != null ) { FileOutputStream fos = new FileOutputStream ( logFile + ".rtbin" ) ; ObjectOutputStream out = new ObjectOutputStream ( fos ) ; out . writeObject ( cpuMap ) ; out . writeObject ( busMap ) ; out . writeObject ( objectMap ) ; out . writeObject ( classDefMap ) ; out . writeObject ( operationMap ) ; out . writeObject ( busMessage ) ; out . writeObject ( threadMap ) ; out . writeObject ( events ) ; out . flush ( ) ; out . close ( ) ; }
public class IntervalComparator { /** * ~ Methoden - - - - - */ @ Override public int compare ( ChronoInterval < T > o1 , ChronoInterval < T > o2 ) { } }
Boundary < T > bs1 = o1 . getStart ( ) ; Boundary < T > bs2 = o2 . getStart ( ) ; if ( bs1 . isInfinite ( ) ) { if ( bs2 . isInfinite ( ) ) { return this . compareEnd ( o1 , o2 ) ; } else { return - 1 ; } } else if ( bs2 . isInfinite ( ) ) { return 1 ; } T start1 = bs1 . getTemporal ( ) ; T start2 = bs2 . getTemporal ( ) ; if ( bs1 . isOpen ( ) ) { start1 = this . axis . stepForward ( start1 ) ; } if ( bs2 . isOpen ( ) ) { start2 = this . axis . stepForward ( start2 ) ; } // open max condition ( rare edge case ) if ( start1 == null ) { if ( start2 == null ) { return 0 ; } else { return 1 ; } } else if ( start2 == null ) { return - 1 ; } int delta = this . axis . compare ( start1 , start2 ) ; if ( delta == 0 ) { delta = this . compareEnd ( o1 , o2 ) ; } return delta ;
public class DRL6Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 622:1 : parExpression returns [ BaseDescr result ] : LEFT _ PAREN expr = expression RIGHT _ PAREN ; */ public final BaseDescr parExpression ( ) throws RecognitionException { } }
BaseDescr result = null ; ParserRuleReturnScope expr = null ; try { // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 623:5 : ( LEFT _ PAREN expr = expression RIGHT _ PAREN ) // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 623:7 : LEFT _ PAREN expr = expression RIGHT _ PAREN { match ( input , LEFT_PAREN , FOLLOW_LEFT_PAREN_in_parExpression3631 ) ; if ( state . failed ) return result ; pushFollow ( FOLLOW_expression_in_parExpression3635 ) ; expr = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; match ( input , RIGHT_PAREN , FOLLOW_RIGHT_PAREN_in_parExpression3637 ) ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { if ( buildDescr ) { result = ( expr != null ? ( ( DRL6Expressions . expression_return ) expr ) . result : null ) ; if ( result instanceof AtomicExprDescr ) { ( ( AtomicExprDescr ) result ) . setExpression ( "(" + ( ( AtomicExprDescr ) result ) . getExpression ( ) + ")" ) ; } } } } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving } return result ;
public class rnat6 { /** * Use this API to clear rnat6 resources . */ public static base_responses clear ( nitro_service client , rnat6 resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { rnat6 clearresources [ ] = new rnat6 [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { clearresources [ i ] = new rnat6 ( ) ; clearresources [ i ] . name = resources [ i ] . name ; } result = perform_operation_bulk_request ( client , clearresources , "clear" ) ; } return result ;
public class Parser { /** * A { @ link Parser } that returns { @ code defaultValue } if { @ code this } fails with no partial match . */ public final Parser < T > optional ( T defaultValue ) { } }
return Parsers . or ( this , Parsers . constant ( defaultValue ) ) ;
public class VCardManager { /** * Returns true if the given entity understands the vCard - XML format and allows the exchange of such . * @ param jid * @ param connection * @ return true if the given entity understands the vCard - XML format and exchange . * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException * @ deprecated use { @ link # isSupported ( Jid ) } instead . */ @ Deprecated public static boolean isSupported ( Jid jid , XMPPConnection connection ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return VCardManager . getInstanceFor ( connection ) . isSupported ( jid ) ;
public class ApiOvhRouter { /** * Add a VPN to your router * REST : POST / router / { serviceName } / vpn * @ param serverPrivNet [ required ] Server ' s private network * @ param psk [ required ] Your PSK key * @ param clientIp [ required ] IP you will be connecting from / NULL ( allow all ) * @ param clientPrivNet [ required ] Client ' s private network * @ param serviceName [ required ] The internal name of your Router offer */ public OvhVpn serviceName_vpn_POST ( String serviceName , String clientIp , String clientPrivNet , String psk , String serverPrivNet ) throws IOException { } }
String qPath = "/router/{serviceName}/vpn" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "clientIp" , clientIp ) ; addBody ( o , "clientPrivNet" , clientPrivNet ) ; addBody ( o , "psk" , psk ) ; addBody ( o , "serverPrivNet" , serverPrivNet ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhVpn . class ) ;
public class Retryer { /** * Sets the strategy that sleeps a fixed amount of time after the first * failed attempt and in incrementing amounts of time after each additional failed attempt . * @ param initialSleepTime * @ param initialSleepUnit * @ param increment * @ param incrementUnit * @ return */ public Retryer < R > incrementingWait ( long initialSleepTime , TimeUnit initialSleepUnit , long increment , TimeUnit incrementUnit ) { } }
return incrementingWait ( checkNotNull ( initialSleepUnit ) . toMillis ( initialSleepTime ) , checkNotNull ( incrementUnit ) . toMillis ( increment ) ) ;
public class GraphHopper { /** * Potentially wraps the specified weighting into a TurnWeighting instance . */ public Weighting createTurnWeighting ( Graph graph , Weighting weighting , TraversalMode tMode ) { } }
FlagEncoder encoder = weighting . getFlagEncoder ( ) ; if ( encoder . supports ( TurnWeighting . class ) && ! tMode . equals ( TraversalMode . NODE_BASED ) ) return new TurnWeighting ( weighting , ( TurnCostExtension ) graph . getExtension ( ) ) ; return weighting ;
public class WebDriverTool { /** * Finds the first element . Uses the internal { @ link WebElementFinder } , which tries to apply * the specified { @ code condition } until it times out . * @ param by * the { @ link By } used to locate the element * @ param condition * a condition the found element must meet * @ return the element * @ deprecated Use { @ link # findElement ( By , Predicate ) } instead */ @ Deprecated public WebElement find ( final By by , final Predicate < WebElement > condition ) { } }
return findElement ( by , condition ) ;
public class AmazonQuickSightClient { /** * Generates a server - side embeddable URL and authorization code . Before this can work properly , first you need to * configure the dashboards and user permissions . For more information , see < a * href = " https : / / docs . aws . amazon . com / en _ us / quicksight / latest / user / embedding . html " > Embedding Amazon QuickSight * Dashboards < / a > . * Currently , you can use < code > GetDashboardEmbedURL < / code > only from the server , not from the user ’ s browser . * < b > CLI Sample : < / b > * Assume the role with permissions enabled for actions : < code > quickSight : RegisterUser < / code > and * < code > quicksight : GetDashboardEmbedURL < / code > . You can use assume - role , assume - role - with - web - identity , or * assume - role - with - saml . * < code > aws sts assume - role - - role - arn " arn : aws : iam : : 111122223333 : role / embedding _ quicksight _ dashboard _ role " - - role - session - name embeddingsession < / code > * If the user does not exist in QuickSight , register the user : * < code > aws quicksight register - user - - aws - account - id 111122223333 - - namespace default - - identity - type IAM - - iam - arn " arn : aws : iam : : 111122223333 : role / embedding _ quicksight _ dashboard _ role " - - user - role READER - - session - name " embeddingsession " - - email user123 @ example . com - - region us - east - 1 < / code > * Get the URL for the embedded dashboard * < code > aws quicksight get - dashboard - embed - url - - aws - account - id 111122223333 - - dashboard - id 1a1ac2b2-3fc3-4b44-5e5d - c6db6778df89 - - identity - type IAM < / code > * @ param getDashboardEmbedUrlRequest * @ return Result of the GetDashboardEmbedUrl operation returned by the service . * @ throws AccessDeniedException * You don ' t have access to this . The provided credentials couldn ' t be validated . You might not be * authorized to carry out the request . Ensure that your account is authorized to use the Amazon QuickSight * service , that your policies have the correct permissions , and that you are using the correct access keys . * @ throws InvalidParameterValueException * One or more parameters don ' t have a valid value . * @ throws ResourceExistsException * The resource specified doesn ' t exist . * @ throws ResourceNotFoundException * One or more resources can ' t be found . * @ throws ThrottlingException * Access is throttled . * @ throws PreconditionNotMetException * One or more preconditions aren ' t met . * @ throws DomainNotWhitelistedException * The domain specified is not on the allowlist . All domains for embedded dashboards must be added to the * approved list by an Amazon QuickSight admin . * @ throws QuickSightUserNotFoundException * The user is not found . This error can happen in any operation that requires finding a user based on a * provided user name , such as < code > DeleteUser < / code > , < code > DescribeUser < / code > , and so on . * @ throws IdentityTypeNotSupportedException * The identity type specified is not supported . Supported identity types include IAM and QUICKSIGHT . * @ throws SessionLifetimeInMinutesInvalidException * The number of minutes specified for the lifetime of a session is not valid . The session lifetime must be * from 15 to 600 minutes . * @ throws UnsupportedUserEditionException * This error indicates that you are calling an operation on an Amazon QuickSight subscription where the * edition doesn ' t include support for that operation . Amazon QuickSight currently has Standard Edition and * Enterprise Edition . Not every operation and capability is available in every edition . * @ throws InternalFailureException * An internal failure occurred . * @ throws ResourceUnavailableException * This resource is currently unavailable . * @ sample AmazonQuickSight . GetDashboardEmbedUrl * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / quicksight - 2018-04-01 / GetDashboardEmbedUrl " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetDashboardEmbedUrlResult getDashboardEmbedUrl ( GetDashboardEmbedUrlRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetDashboardEmbedUrl ( request ) ;
public class SearchExpressionFacade { /** * Resolves a { @ link UIComponent } for the given expression . * @ param context The { @ link FacesContext } . * @ param source The source component . E . g . a button . * @ param expression The search expression . * @ return A resolved { @ link UIComponent } or < code > null < / code > . */ public static UIComponent resolveComponent ( FacesContext context , UIComponent source , String expression ) { } }
return resolveComponent ( context , source , expression , SearchExpressionHint . NONE ) ;
public class IoUtil { /** * 从流中读取内容 , 读到输出流中 * @ param < T > 读取对象的类型 * @ param in 输入流 * @ return 输出流 * @ throws IORuntimeException IO异常 * @ throws UtilException ClassNotFoundException包装 */ public static < T > T readObj ( InputStream in ) throws IORuntimeException , UtilException { } }
if ( in == null ) { throw new IllegalArgumentException ( "The InputStream must not be null" ) ; } ObjectInputStream ois = null ; try { ois = new ObjectInputStream ( in ) ; @ SuppressWarnings ( "unchecked" ) // may fail with CCE if serialised form is incorrect final T obj = ( T ) ois . readObject ( ) ; return obj ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; } catch ( ClassNotFoundException e ) { throw new UtilException ( e ) ; }
public class WebDriverTool { /** * Opens a new window and switches to it . The window to switch to is determined by diffing * the given { @ code existingWindowHandles } with the current ones . The difference must be * exactly one window handle which is then used to switch to . * @ param openCommand * logic for opening the new window * @ param timeoutSeconds * the timeout in seconds to wait for the new window to open * @ return the handle of the window that opened the new window */ public String openNewWindow ( final Runnable openCommand , final long timeoutSeconds ) { } }
String oldHandle = webDriver . getWindowHandle ( ) ; final Set < String > existingWindowHandles = webDriver . getWindowHandles ( ) ; openCommand . run ( ) ; Function < WebDriver , String > function = new Function < WebDriver , String > ( ) { @ Override public String apply ( final WebDriver input ) { Set < String > newWindowHandles = webDriver . getWindowHandles ( ) ; SetView < String > newWindows = difference ( newWindowHandles , existingWindowHandles ) ; if ( newWindows . isEmpty ( ) ) { throw new NotFoundException ( "No new window found." ) ; } return getOnlyElement ( newWindows ) ; } @ Override public String toString ( ) { return "new window to open" ; } } ; String newHandle = waitFor ( function , timeoutSeconds ) ; webDriver . switchTo ( ) . window ( newHandle ) ; return oldHandle ;
public class Utils { /** * Replies the version of the SARL library on the classpath . * @ param typeReferences - the accessor to the types . * @ param context the context that is providing the access to the classpath . * @ param version the version of the SARL library that was found , according to the returned error code . * @ return the version , or < code > null < / code > if the SARL library cannot be found or * is too old . */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) public static SarlLibraryErrorCode getSARLLibraryVersionOnClasspath ( TypeReferences typeReferences , Notifier context , OutParameter < String > version ) { } }
if ( checkSarlVersionClass ) { checkSarlVersionClass = false ; try { final Object v = SARLVersion . class . getDeclaredField ( SARL_VERSION_FIELD_NAME_STR ) ; if ( v == null ) { return SarlLibraryErrorCode . INVALID_SARL_VERSION_BYTECODE ; } } catch ( Throwable e ) { return SarlLibraryErrorCode . INVALID_SARL_VERSION_BYTECODE ; } } final JvmType type ; try { type = typeReferences . findDeclaredType ( SARLVersion . class , context ) ; } catch ( Throwable exception ) { return SarlLibraryErrorCode . NO_SARL_VERSION_CLASS ; } if ( type == null ) { return SarlLibraryErrorCode . NO_SARL_VERSION_CLASS ; } if ( ! ( type instanceof JvmDeclaredType ) ) { return SarlLibraryErrorCode . NO_SARL_VERSION_DECLARED_TYPE ; } final JvmDeclaredType sarlVersionType = ( JvmDeclaredType ) type ; JvmField versionField = null ; final Iterator < JvmField > iterator = sarlVersionType . getDeclaredFields ( ) . iterator ( ) ; while ( versionField == null && iterator . hasNext ( ) ) { final JvmField field = iterator . next ( ) ; if ( SARL_VERSION_FIELD_NAME_STR . equals ( field . getSimpleName ( ) ) ) { versionField = field ; } } if ( versionField == null ) { return SarlLibraryErrorCode . NO_SARL_VERSION_FIELD ; } final String value = versionField . getConstantValueAsString ( ) ; if ( Strings . isNullOrEmpty ( value ) ) { return SarlLibraryErrorCode . NO_SARL_VERSION_VALUE ; } if ( version != null ) { version . set ( value ) ; } return SarlLibraryErrorCode . SARL_FOUND ;
public class TemplateFilterPageTreeProvider { /** * Searches for page content nodes under the { @ code pRootPath } with given * template It uses a XPATH query and return the node iterator of results . * @ param templates * @ param rootPath * @ return results node iterator * @ throws RepositoryException */ @ SuppressWarnings ( "null" ) private NodeIterator searchNodesByTemplate ( String [ ] templates , String rootPath , SlingHttpServletRequest request ) throws RepositoryException { } }
String queryString = "/jcr:root" + ISO9075 . encodePath ( rootPath ) + "//*" + "[@cq:template='" + StringUtils . join ( escapeXPathQueryExpressions ( templates ) , "' or @cq:template='" ) + "']" ; QueryManager queryManager = request . getResourceResolver ( ) . adaptTo ( Session . class ) . getWorkspace ( ) . getQueryManager ( ) ; @ SuppressWarnings ( "deprecation" ) Query query = queryManager . createQuery ( queryString , Query . XPATH ) ; QueryResult result = query . execute ( ) ; return result . getNodes ( ) ;
public class GosuStringUtil { /** * Replaces all occurrences of Strings within another String . * A < code > null < / code > reference passed to this method is a no - op , or if * any " search string " or " string to replace " is null , that replace will be * ignored . This will not repeat . For repeating replaces , call the * overloaded method . * < pre > * GosuStringUtil . replaceEach ( null , * , * ) = null * GosuStringUtil . replaceEach ( " " , * , * ) = " " * GosuStringUtil . replaceEach ( " aba " , null , null ) = " aba " * GosuStringUtil . replaceEach ( " aba " , new String [ 0 ] , null ) = " aba " * GosuStringUtil . replaceEach ( " aba " , null , new String [ 0 ] ) = " aba " * GosuStringUtil . replaceEach ( " aba " , new String [ ] { " a " } , null ) = " aba " * GosuStringUtil . replaceEach ( " aba " , new String [ ] { " a " } , new String [ ] { " " } ) = " b " * GosuStringUtil . replaceEach ( " aba " , new String [ ] { null } , new String [ ] { " a " } ) = " aba " * GosuStringUtil . replaceEach ( " abcde " , new String [ ] { " ab " , " d " } , new String [ ] { " w " , " t " } ) = " wcte " * ( example of how it does not repeat ) * GosuStringUtil . replaceEach ( " abcde " , new String [ ] { " ab " , " d " } , new String [ ] { " d " , " t " } ) = " dcte " * < / pre > * @ param text * text to search and replace in , no - op if null * @ param searchList * the Strings to search for , no - op if null * @ param replacementList * the Strings to replace them with , no - op if null * @ return the text with any replacements processed , < code > null < / code > if * null String input * @ throws IndexOutOfBoundsException * if the lengths of the arrays are not the same ( null is ok , * and / or size 0) * @ since 2.4 */ public static String replaceEach ( String text , String [ ] searchList , String [ ] replacementList ) { } }
return replaceEach ( text , searchList , replacementList , false , 0 ) ;
public class AbstractIoBuffer { /** * { @ inheritDoc } */ @ Override public final IoBuffer compact ( ) { } }
int remaining = remaining ( ) ; int capacity = capacity ( ) ; if ( capacity == 0 ) { return this ; } if ( isAutoShrink ( ) && remaining <= capacity >>> 2 && capacity > minimumCapacity ) { int newCapacity = capacity ; int minCapacity = Math . max ( minimumCapacity , remaining << 1 ) ; for ( ; ; ) { if ( newCapacity >>> 1 < minCapacity ) { break ; } newCapacity >>>= 1 ; } newCapacity = Math . max ( minCapacity , newCapacity ) ; if ( newCapacity == capacity ) { return this ; } // Shrink and compact : // / / Save the state . ByteOrder bo = order ( ) ; // / / Sanity check . if ( remaining > newCapacity ) { throw new IllegalStateException ( "The amount of the remaining bytes is greater than " + "the new capacity." ) ; } // / / Reallocate . ByteBuffer oldBuf = buf ( ) ; ByteBuffer newBuf = getAllocator ( ) . allocateNioBuffer ( newCapacity , isDirect ( ) ) ; newBuf . put ( oldBuf ) ; buf ( newBuf ) ; // / / Restore the state . buf ( ) . order ( bo ) ; } else { buf ( ) . compact ( ) ; } mark = - 1 ; return this ;
public class Contexts { /** * Creates a new context with the given property mappings . * @ param mappings The property mappings for the context * @ return the context */ public static Context createContext ( Collection < PropertyMapping < ? > > mappings ) { } }
IdentityHashMap < ContextProperty < ? > , Object > properties = new IdentityHashMap < ContextProperty < ? > , Object > ( mappings . size ( ) ) ; for ( final PropertyMapping < ? > mapping : mappings ) { properties . put ( mapping . property , mapping . value ) ; } return ImmutableContext . createUnsafe ( properties ) ;
public class Predict { /** * < p > < b > Note : The streams are NOT closed < / b > < / p > */ static void doPredict ( BufferedReader reader , Writer writer , Model model ) throws IOException { } }
int correct = 0 ; int total = 0 ; double error = 0 ; double sump = 0 , sumt = 0 , sumpp = 0 , sumtt = 0 , sumpt = 0 ; int nr_class = model . getNrClass ( ) ; double [ ] prob_estimates = null ; int n ; int nr_feature = model . getNrFeature ( ) ; if ( model . bias >= 0 ) n = nr_feature + 1 ; else n = nr_feature ; if ( flag_predict_probability && ! model . isProbabilityModel ( ) ) { throw new IllegalArgumentException ( "probability output is only supported for logistic regression" ) ; } Formatter out = new Formatter ( writer ) ; if ( flag_predict_probability ) { int [ ] labels = model . getLabels ( ) ; prob_estimates = new double [ nr_class ] ; printf ( out , "labels" ) ; for ( int j = 0 ; j < nr_class ; j ++ ) printf ( out , " %d" , labels [ j ] ) ; printf ( out , "\n" ) ; } String line = null ; while ( ( line = reader . readLine ( ) ) != null ) { List < Feature > x = new ArrayList < Feature > ( ) ; StringTokenizer st = new StringTokenizer ( line , " \t\n" ) ; double target_label ; try { String label = st . nextToken ( ) ; target_label = atof ( label ) ; } catch ( NoSuchElementException e ) { throw new RuntimeException ( "Wrong input format at line " + ( total + 1 ) , e ) ; } while ( st . hasMoreTokens ( ) ) { String [ ] split = COLON . split ( st . nextToken ( ) , 2 ) ; if ( split == null || split . length < 2 ) { throw new RuntimeException ( "Wrong input format at line " + ( total + 1 ) ) ; } try { int idx = atoi ( split [ 0 ] ) ; double val = atof ( split [ 1 ] ) ; // feature indices larger than those in training are not used if ( idx <= nr_feature ) { Feature node = new FeatureNode ( idx , val ) ; x . add ( node ) ; } } catch ( NumberFormatException e ) { throw new RuntimeException ( "Wrong input format at line " + ( total + 1 ) , e ) ; } } if ( model . bias >= 0 ) { Feature node = new FeatureNode ( n , model . bias ) ; x . add ( node ) ; } Feature [ ] nodes = new Feature [ x . size ( ) ] ; nodes = x . toArray ( nodes ) ; double predict_label ; if ( flag_predict_probability ) { assert prob_estimates != null ; predict_label = Linear . predictProbability ( model , nodes , prob_estimates ) ; printf ( out , "%g" , predict_label ) ; for ( int j = 0 ; j < model . nr_class ; j ++ ) printf ( out , " %g" , prob_estimates [ j ] ) ; printf ( out , "\n" ) ; } else { predict_label = Linear . predict ( model , nodes ) ; printf ( out , "%g\n" , predict_label ) ; } if ( predict_label == target_label ) { ++ correct ; } error += ( predict_label - target_label ) * ( predict_label - target_label ) ; sump += predict_label ; sumt += target_label ; sumpp += predict_label * predict_label ; sumtt += target_label * target_label ; sumpt += predict_label * target_label ; ++ total ; } if ( model . solverType . isSupportVectorRegression ( ) ) { info ( "Mean squared error = %g (regression)%n" , error / total ) ; info ( "Squared correlation coefficient = %g (regression)%n" , ( ( total * sumpt - sump * sumt ) * ( total * sumpt - sump * sumt ) ) / ( ( total * sumpp - sump * sump ) * ( total * sumtt - sumt * sumt ) ) ) ; } else { info ( "Accuracy = %g%% (%d/%d)%n" , ( double ) correct / total * 100 , correct , total ) ; }
public class TransformersLogger { /** * Log a warning for the resource at the provided address and the given attributes . The detail message is a default * ' Attributes are not understood in the target model version and this resource will need to be ignored on the target host . ' * @ param address where warning occurred * @ param attributes attributes we are warning about */ public void logAttributeWarning ( PathAddress address , Set < String > attributes ) { } }
logAttributeWarning ( address , null , null , attributes ) ;
public class Download { /** * Starts downloading * @ throws IOException if the file could not downloaded */ @ TaskAction public void download ( ) throws IOException { } }
action . execute ( ) ; // handle ' upToDate ' try { if ( action . isUpToDate ( ) ) { Method getState = this . getClass ( ) . getMethod ( "getState" ) ; Object state = getState . invoke ( this ) ; try { // prior to Gradle 3.2 we needed to do this Method upToDate = state . getClass ( ) . getMethod ( "upToDate" ) ; upToDate . invoke ( state ) ; } catch ( NoSuchMethodException e ) { // since Gradle 3.2 we need to do this Method setDidWork = state . getClass ( ) . getMethod ( "setDidWork" , boolean . class ) ; setDidWork . invoke ( state , false ) ; } } } catch ( Exception e ) { // just ignore }
public class IonTextUtils { /** * Prints a single Unicode code point for use in an ASCII - safe JSON string . * @ param out the stream to receive the data . * @ param codePoint a Unicode code point . */ public static void printJsonCodePoint ( Appendable out , int codePoint ) throws IOException { } }
// JSON only allows double - quote strings . printCodePoint ( out , codePoint , EscapeMode . JSON ) ;
public class FunctionArgumentInjector { /** * Get a mapping for function parameter names to call arguments . */ static ImmutableMap < String , Node > getFunctionCallParameterMap ( final Node fnNode , Node callNode , Supplier < String > safeNameIdSupplier ) { } }
checkNotNull ( fnNode ) ; // Create an argName - > expression map ImmutableMap . Builder < String , Node > argMap = ImmutableMap . builder ( ) ; // CALL NODE : [ NAME , ARG1 , ARG2 , . . . ] Node cArg = callNode . getSecondChild ( ) ; if ( cArg != null && NodeUtil . isFunctionObjectCall ( callNode ) ) { argMap . put ( THIS_MARKER , cArg ) ; cArg = cArg . getNext ( ) ; } else { // ' apply ' isn ' t supported yet . checkState ( ! NodeUtil . isFunctionObjectApply ( callNode ) , callNode ) ; argMap . put ( THIS_MARKER , NodeUtil . newUndefinedNode ( callNode ) ) ; } for ( Node fnParam : NodeUtil . getFunctionParameters ( fnNode ) . children ( ) ) { if ( cArg != null ) { if ( fnParam . isRest ( ) ) { checkState ( fnParam . getOnlyChild ( ) . isName ( ) , fnParam . getOnlyChild ( ) ) ; Node array = IR . arraylit ( ) ; array . useSourceInfoIfMissingFromForTree ( cArg ) ; while ( cArg != null ) { array . addChildToBack ( cArg . cloneTree ( ) ) ; cArg = cArg . getNext ( ) ; } argMap . put ( fnParam . getOnlyChild ( ) . getString ( ) , array ) ; return argMap . build ( ) ; } else { checkState ( fnParam . isName ( ) , fnParam ) ; argMap . put ( fnParam . getString ( ) , cArg ) ; } cArg = cArg . getNext ( ) ; } else { // cArg ! = null if ( fnParam . isRest ( ) ) { checkState ( fnParam . getOnlyChild ( ) . isName ( ) , fnParam ) ; // No arguments for REST parameters Node array = IR . arraylit ( ) ; argMap . put ( fnParam . getOnlyChild ( ) . getString ( ) , array ) ; } else { checkState ( fnParam . isName ( ) , fnParam ) ; Node srcLocation = callNode ; argMap . put ( fnParam . getString ( ) , NodeUtil . newUndefinedNode ( srcLocation ) ) ; } } } // Add temp names for arguments that don ' t have named parameters in the // called function . while ( cArg != null ) { String uniquePlaceholder = getUniqueAnonymousParameterName ( safeNameIdSupplier ) ; argMap . put ( uniquePlaceholder , cArg ) ; cArg = cArg . getNext ( ) ; } return argMap . build ( ) ;
public class AbstractJaxbMojo { /** * Acquires the staleFile for this execution * @ return the staleFile ( used to define where ) for this execution */ protected final File getStaleFile ( ) { } }
final String staleFileName = "." + ( getExecution ( ) == null ? "nonExecutionJaxb" : getExecution ( ) . getExecutionId ( ) ) + "-" + getStaleFileName ( ) ; return new File ( staleFileDirectory , staleFileName ) ;
public class BaseFFDCService { /** * PM39875 - use dumpAlgorithm from legacy FFDC ( See com . ibm . ws . ffdc . FFDCFilter in WAS61 release ) to determine * when a summary table should be dumped . */ synchronized private boolean dumpAlgorithm ( long currentTime ) { } }
boolean dumpTable = false ; numberOfEntiesProcessed ++ ; // Increment the number of entries updated in the table if ( ( lastTimeOfDump == Long . MIN_VALUE ) // Note we check this since if currentTime is zero or more , the line below will be false due to arthimetic overflow || ( currentTime - lastTimeOfDump > highWaterTime ) ) { // Dump the content of the hash regardless of the // number of entries which have been seen . dumpTable = true ; } else { if ( ( numberOfEntiesProcessed > normalDumpThreshold ) && ( currentTime - lastTimeOfDump > lowWaterTime ) ) { dumpTable = true ; } } if ( dumpTable == true ) { lastTimeOfDump = currentTime ; numberOfEntiesProcessed = 0 ; } return dumpTable ;
public class CmsClientSitemapEntry { /** * Sets the site path . < p > * @ param sitepath the site path to set */ public void setSitePath ( String sitepath ) { } }
if ( ! isLeafType ( ) && ! sitepath . endsWith ( "/" ) ) { sitepath = sitepath + "/" ; } m_sitePath = sitepath ;
public class JBBPDslBuilder { /** * Add named signed short field . * @ param name name of the field , can be null for anonymous one * @ return the builder instance , must not be null */ public JBBPDslBuilder Short ( final String name ) { } }
final Item item = new Item ( BinType . SHORT , name , this . byteOrder ) ; this . addItem ( item ) ; return this ;
public class VerificationCodeImage { /** * 生成一张背景为白色的空白图片 * @ return 返回此图片 */ private BufferedImage generateBlankImg ( ) { } }
BufferedImage image = new BufferedImage ( w , h , BufferedImage . TYPE_INT_RGB ) ; Graphics2D g2 = ( Graphics2D ) image . getGraphics ( ) ; // 设置图片背景是白色 g2 . setColor ( Color . WHITE ) ; g2 . fillRect ( 0 , 0 , w , h ) ; g2 . dispose ( ) ; return image ;
public class ClassUtils { /** * Determine the common ancestor of the given classes , if any . * @ param clazz1 the class to introspect * @ param clazz2 the other class to introspect * @ return the common ancestor ( i . e . common superclass , one interface * extending the other ) , or { @ code null } if none found . If any of the * given classes is { @ code null } , the other class will be returned . * @ since 2.0 */ public static Class < ? > determineCommonAncestor ( Class < ? > clazz1 , Class < ? > clazz2 ) { } }
if ( clazz1 == null ) { return clazz2 ; } if ( clazz2 == null ) { return clazz1 ; } if ( clazz1 . isAssignableFrom ( clazz2 ) ) { return clazz1 ; } if ( clazz2 . isAssignableFrom ( clazz1 ) ) { return clazz2 ; } Class < ? > ancestor = clazz1 ; do { ancestor = ancestor . getSuperclass ( ) ; if ( ancestor == null || Object . class . equals ( ancestor ) ) { return null ; } } while ( ! ancestor . isAssignableFrom ( clazz2 ) ) ; return ancestor ;
public class Conditions { /** * 获得条件的绑定参数映射 * @ param conditions */ public static Map < String , Object > getParamMap ( final Collection < Condition > conditions ) { } }
final Map < String , Object > params = new HashMap < String , Object > ( ) ; for ( final Condition con : conditions ) { params . putAll ( getParamMap ( con ) ) ; } return params ;
public class Base64 { /** * Encodes an input stream to base64 , returning bytes . * The stream is guaranteed to be closed when this method returns , whether * successful or not . * @ param in stream to encode * @ return encoded bytes , or null if there ' s an error reading the stream */ public static byte [ ] encode ( InputStream in ) { } }
Base64OutputStream out = null ; try { ByteArrayOutputStream bytes = new ByteArrayOutputStream ( ) ; out = new Base64OutputStream ( bytes ) ; byte [ ] buf = new byte [ 4096 ] ; int len ; while ( ( len = in . read ( buf ) ) > 0 ) { out . write ( buf , 0 , len ) ; } out . flush ( ) ; return bytes . toByteArray ( ) ; } catch ( IOException e ) { return null ; } finally { try { in . close ( ) ; if ( out != null ) out . close ( ) ; } catch ( IOException e ) { throw new FaultException ( e ) ; } }
public class LineItemSummary { /** * Gets the creationDateTime value for this LineItemSummary . * @ return creationDateTime * This attribute may be { @ code null } for line items created before * this feature was introduced . */ public com . google . api . ads . admanager . axis . v201808 . DateTime getCreationDateTime ( ) { } }
return creationDateTime ;
public class Transloadit { /** * Returns a list of all templates under the user account * @ param options { @ link Map } extra options to send along with the request . * @ return { @ link ListResponse } * @ throws RequestException if request to transloadit server fails . * @ throws LocalOperationException if something goes wrong while running non - http operations . */ public ListResponse listTemplates ( Map < String , Object > options ) throws RequestException , LocalOperationException { } }
Request request = new Request ( this ) ; return new ListResponse ( request . get ( "/templates" , options ) ) ;
public class LibGdxCliOutputBuffer { /** * Add a single line to this buffer . * The line may contain a ' \ n ' character , and it will be honored , but this is discouraged . * @ param text Line text . * @ param color Line color . */ public void println ( String text , Color color ) { } }
final Label label = new Label ( text , skin , "outputEntry" ) ; label . setColor ( color ) ; label . setWrap ( true ) ; addLabel ( label ) ;
public class UtilImpl_IdentityStringSet { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) @ Override public < S > S [ ] toArray ( S [ ] targetArray ) { } }
if ( this . size > targetArray . length ) { Class < ? > componentType = this . storage . getClass ( ) . getComponentType ( ) ; targetArray = ( S [ ] ) Array . newInstance ( componentType , this . size ) ; } System . arraycopy ( this . storage , 0 , targetArray , 0 , this . size ) ; if ( this . size < targetArray . length ) { for ( int offset = this . size ; offset < targetArray . length ; offset ++ ) { targetArray [ offset ] = null ; } } return targetArray ;
public class BoneCPDataSource { /** * 获取数据库连接 * @ return 数据库连接 */ public Connection getConnection ( ) throws IOException { } }
Connection conn = null ; if ( this . datasource != null ) { try { conn = this . datasource . getConnection ( ) ; } catch ( SQLException e ) { throw new IOException ( e ) ; } } return conn ;
public class VirtualNetworkPeeringsInner { /** * Gets all virtual network peerings in a virtual network . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; VirtualNetworkPeeringInner & gt ; object */ public Observable < Page < VirtualNetworkPeeringInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < VirtualNetworkPeeringInner > > , Page < VirtualNetworkPeeringInner > > ( ) { @ Override public Page < VirtualNetworkPeeringInner > call ( ServiceResponse < Page < VirtualNetworkPeeringInner > > response ) { return response . body ( ) ; } } ) ;
public class ZPoller { /** * gets all the items of this poller regarding the given input */ protected Iterable < ItemHolder > items ( final Object socketOrChannel ) { } }
final CompositePollItem aggregate = items . get ( socketOrChannel ) ; if ( aggregate == null ) { return Collections . emptySet ( ) ; } return aggregate . holders ;
public class Query { /** * < pre > * { $ and : [ expressions ] } * { $ or : [ expressions ] } * < / pre > */ public static Query logical ( LogOp op , Collection < Query > expressions ) { } }
Query q = new Query ( true ) ; for ( Query x : expressions ) { ( ( ArrayNode ) q . node ) . add ( x . toJson ( ) ) ; } Query a = new Query ( false ) ; a . add ( op . toString ( ) , q . toJson ( ) ) ; return a ;
public class RequestFactory { /** * Create a request builder for a create operation for a multiple items * @ param items The item to update * @ return The request builder */ public UpdateCollectionRequestBuilder < T , ID > update ( List < T > items ) { } }
return new UpdateCollectionRequestBuilder < T , ID > ( version , type , entityName , items ) ;
public class HopkinsStatisticClusteringTendency { /** * Runs the algorithm in the timed evaluation part . * @ param database Database context * @ param relation Relation to analyze */ public Result run ( Database database , Relation < NumberVector > relation ) { } }
final int dim = RelationUtil . dimensionality ( relation ) ; final DistanceQuery < NumberVector > distanceQuery = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; final KNNQuery < NumberVector > knnQuery = database . getKNNQuery ( distanceQuery , k + 1 ) ; final double [ ] min = new double [ dim ] , extend = new double [ dim ] ; initializeDataExtends ( relation , dim , min , extend ) ; if ( ! LOG . isStatistics ( ) ) { LOG . warning ( "This algorithm must be used with at least logging level " + Level . STATISTICS ) ; } MeanVariance hmean = new MeanVariance ( ) , umean = new MeanVariance ( ) , wmean = new MeanVariance ( ) ; // compute the hopkins value several times and use the average value for a // more stable result for ( int j = 0 ; j < this . rep ; j ++ ) { // Compute NN distances for random objects from within the database double w = computeNNForRealData ( knnQuery , relation , dim ) ; // Compute NN distances for randomly created new uniform objects double u = computeNNForUniformData ( knnQuery , min , extend ) ; // compute hopkins statistik double h = u / ( u + w ) ; // = a / ( 1 + a ) hmean . put ( h ) ; umean . put ( u ) ; wmean . put ( w ) ; } final String prefix = this . getClass ( ) . getName ( ) ; LOG . statistics ( new LongStatistic ( prefix + ".samplesize" , sampleSize ) ) ; LOG . statistics ( new LongStatistic ( prefix + ".dim" , dim ) ) ; LOG . statistics ( new LongStatistic ( prefix + ".hopkins.nearest-neighbor" , k ) ) ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.h.mean" , hmean . getMean ( ) ) ) ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.u.mean" , umean . getMean ( ) ) ) ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.w.mean" , wmean . getMean ( ) ) ) ; if ( rep > 1 ) { LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.h.std" , hmean . getSampleStddev ( ) ) ) ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.u.std" , umean . getSampleStddev ( ) ) ) ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.w.std" , wmean . getSampleStddev ( ) ) ) ; } // Evaluate : double x = hmean . getMean ( ) ; // See Hopkins for a proof that x is supposedly Beta distributed . double ix = BetaDistribution . regularizedIncBeta ( x , sampleSize , sampleSize ) ; double p = ( x > .5 ) ? ( 1. - ix ) : ix ; LOG . statistics ( new DoubleStatistic ( prefix + ".hopkins.p" , p ) ) ; return null ;
public class JPAApplInfo { /** * Close all EntityManagerFactories in all scopes in this application . */ void closeAllScopeModules ( ) { } }
synchronized ( puScopes ) { for ( String module : puScopes . keySet ( ) ) { close ( module , false ) ; // PK59717 } puScopes . clear ( ) ; }
public class ExclusionRandomizerRegistry { /** * { @ inheritDoc } */ @ Override public Randomizer < ? > getRandomizer ( Field field ) { } }
for ( Predicate < Field > fieldPredicate : fieldPredicates ) { if ( fieldPredicate . test ( field ) ) { return new SkipRandomizer ( ) ; } } return null ;
public class ReloadableResourceBundleMessageSource { /** * Calculate all filenames for the given bundle basename and Locale . * Will calculate filenames for the given Locale , the system Locale * ( if applicable ) , and the default file . * @ param basename the basename of the bundle * @ param locale the locale * @ return the List of filenames to check * @ see # setFallbackToSystemLocale * @ see # calculateFilenamesForLocale */ protected List < Pair < String , Resource > > calculateAllFilenames ( final String basename , final Locale locale ) { } }
Pair < String , Locale > cacheKey = new Pair < String , Locale > ( basename , locale ) ; return CacheEntry . getValue ( cachedFilenames , cacheKey , cacheMillis , new Callable < List < Pair < String , Resource > > > ( ) { @ Override public List < Pair < String , Resource > > call ( ) throws Exception { List < String > filenames = new ArrayList < String > ( 7 ) ; filenames . addAll ( calculateFilenamesForLocale ( basename , locale ) ) ; if ( fallbackToSystemLocale && ! locale . equals ( Locale . getDefault ( ) ) ) { List < String > fallbackFilenames = calculateFilenamesForLocale ( basename , Locale . getDefault ( ) ) ; for ( String fallbackFilename : fallbackFilenames ) { if ( ! filenames . contains ( fallbackFilename ) ) { // Entry for fallback locale that isn ' t already in filenames list . filenames . add ( fallbackFilename ) ; } } } filenames . add ( basename ) ; List < Pair < String , Resource > > filenamesAndResources = new ArrayList < Pair < String , Resource > > ( filenames . size ( ) ) ; for ( String filename : filenames ) { filenamesAndResources . add ( new Pair < String , Resource > ( filename , locateResource ( filename ) ) ) ; } return filenamesAndResources ; } } ) ;
public class SwaptionAnalyticApproximationRebonato { /** * This function calculate the partial derivative < i > d log ( S ) / d log ( L < sub > k < / sub > ) < / i > for * a given swap rate with respect to a vector of forward rates ( on a given forward rate tenor ) . * It also returns some useful other quantities like the corresponding discout factors and swap annuities . * @ param liborPeriodDiscretization The libor period discretization . * @ param discountCurveInterface The discount curve . If this parameter is null , the discount curve will be calculated from the forward curve . * @ param forwardCurveInterface The forward curve . * @ param swapTenor The swap tenor . * @ return A map containing the partial derivatives ( key " value " ) , the discount factors ( key " discountFactors " ) and the annuities ( key " annuities " ) as vectors of double [ ] ( indexed by forward rate tenor index starting at swap start ) */ public static Map < String , double [ ] > getLogSwaprateDerivative ( TimeDiscretizationInterface liborPeriodDiscretization , DiscountCurveInterface discountCurveInterface , ForwardCurveInterface forwardCurveInterface , double [ ] swapTenor ) { } }
/* * Small workaround for the case that the discount curve is not set . This part will be removed later . */ AnalyticModel model = null ; if ( discountCurveInterface == null ) { discountCurveInterface = new DiscountCurveFromForwardCurve ( forwardCurveInterface . getName ( ) ) ; model = new AnalyticModel ( new CurveInterface [ ] { forwardCurveInterface , discountCurveInterface } ) ; } double swapStart = swapTenor [ 0 ] ; double swapEnd = swapTenor [ swapTenor . length - 1 ] ; // Get the indices of the swap start and end on the forward rate tenor int swapStartIndex = liborPeriodDiscretization . getTimeIndex ( swapStart ) ; int swapEndIndex = liborPeriodDiscretization . getTimeIndex ( swapEnd ) ; // Precalculate forward rates and discount factors . Note : the swap contains swapEndIndex - swapStartIndex forward rates double [ ] forwardRates = new double [ swapEndIndex - swapStartIndex + 1 ] ; double [ ] discountFactors = new double [ swapEndIndex - swapStartIndex + 1 ] ; // Calculate discount factor at swap start discountFactors [ 0 ] = discountCurveInterface . getDiscountFactor ( model , swapStart ) ; // Calculate discount factors for swap period ends ( used for swap annuity ) for ( int liborPeriodIndex = swapStartIndex ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { double libor = forwardCurveInterface . getForward ( null , liborPeriodDiscretization . getTime ( liborPeriodIndex ) ) ; forwardRates [ liborPeriodIndex - swapStartIndex ] = libor ; discountFactors [ liborPeriodIndex - swapStartIndex + 1 ] = discountCurveInterface . getDiscountFactor ( model , liborPeriodDiscretization . getTime ( liborPeriodIndex + 1 ) ) ; } // Precalculate swap annuities double [ ] swapAnnuities = new double [ swapTenor . length - 1 ] ; double swapAnnuity = 0.0 ; for ( int swapPeriodIndex = swapTenor . length - 2 ; swapPeriodIndex >= 0 ; swapPeriodIndex -- ) { int periodEndIndex = liborPeriodDiscretization . getTimeIndex ( swapTenor [ swapPeriodIndex + 1 ] ) ; swapAnnuity += discountFactors [ periodEndIndex - swapStartIndex ] * ( swapTenor [ swapPeriodIndex + 1 ] - swapTenor [ swapPeriodIndex ] ) ; swapAnnuities [ swapPeriodIndex ] = swapAnnuity ; } int [ ] liborPeriodsInSwapPeriods = new int [ swapTenor . length - 1 ] ; Arrays . fill ( liborPeriodsInSwapPeriods , 0 ) ; for ( int liborPeriodIndex = swapStartIndex , swapPeriodIndex = 0 ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { if ( liborPeriodDiscretization . getTime ( liborPeriodIndex ) >= swapTenor [ swapPeriodIndex + 1 ] ) { swapPeriodIndex ++ ; } liborPeriodsInSwapPeriods [ swapPeriodIndex ] ++ ; } // Precalculate weights : The formula is the one by Rebonato ( note : this formula can be improved easily , we provide it rather for testing / illustration . double [ ] swapCovarianceWeights = new double [ swapEndIndex - swapStartIndex ] ; for ( int liborPeriodIndex = swapStartIndex , swapPeriodIndex = 0 ; liborPeriodIndex < swapEndIndex ; liborPeriodIndex ++ ) { if ( liborPeriodDiscretization . getTime ( liborPeriodIndex ) >= swapTenor [ swapPeriodIndex + 1 ] ) { swapPeriodIndex ++ ; } double swapAnnuityCurrent = swapAnnuities [ swapPeriodIndex ] ; double swapAnnuityNext = swapPeriodIndex < swapAnnuities . length - 1 ? swapAnnuities [ swapPeriodIndex + 1 ] : 0 ; swapCovarianceWeights [ liborPeriodIndex - swapStartIndex ] = ( swapAnnuityCurrent - swapAnnuityNext ) / swapAnnuity / liborPeriodsInSwapPeriods [ swapPeriodIndex ] ; } // Return results Map < String , double [ ] > results = new HashMap < String , double [ ] > ( ) ; results . put ( "values" , swapCovarianceWeights ) ; results . put ( "discountFactors" , discountFactors ) ; results . put ( "swapAnnuities" , swapAnnuities ) ; return results ;
public class XFunctionTypeRefImplCustom { /** * TODO should we update the type as soon as the number of argument types changes ? */ @ Override public JvmType getType ( ) { } }
if ( this . type == null ) { // / / make sure scoping has taken place and installed an IJvmTypeProvider // if ( returnType ! = null ) // returnType . getType ( ) ; JvmType newType = TypesFactory . eINSTANCE . createJvmVoid ( ) ; ( ( InternalEObject ) newType ) . eSetProxyURI ( computeTypeUri ( isProcedure ( ) ) ) ; type = ( JvmType ) eResolveProxy ( ( InternalEObject ) newType ) ; } return super . getType ( ) ;
public class HttpRequestUtils { /** * parse a string as number and throws exception if parsed value is not a valid integer * @ throws ExecutorManagerException if paramName is not a valid integer */ public static boolean validateIntegerParam ( final Map < String , String > params , final String paramName ) throws ExecutorManagerException { } }
if ( params != null && params . containsKey ( paramName ) && ! StringUtils . isNumeric ( params . get ( paramName ) ) ) { throw new ExecutorManagerException ( paramName + " should be an integer" ) ; } return true ;
public class CommonOps_DDRM { /** * < p > Performs matrix scalar subtraction : < br > * < br > * c = val - a < br > * c < sub > ij < / sub > = val - a < sub > ij < / sub > < br > * @ param val ( input ) The value that ' s subtracted to each element . * @ param a ( input ) A matrix . Not modified . * @ param c ( Output ) A matrix . Modified . */ public static void subtract ( double val , DMatrixD1 a , DMatrixD1 c ) { } }
c . reshape ( a . numRows , a . numCols ) ; final int length = a . getNumElements ( ) ; for ( int i = 0 ; i < length ; i ++ ) { c . data [ i ] = val - a . data [ i ] ; }
public class BsfUtils { /** * Selects the Date Pattern to use based on the given Locale if the input * format is null * @ param locale * Locale ( may be the result of a call to selectLocale ) * @ param momentJSFormat * Input format String * @ return moment . js Date Pattern eg . DD / MM / YYYY */ public static String selectMomentJSDateTimeFormat ( Locale locale , String momentJSFormat , boolean withDate , boolean withTime ) { } }
if ( momentJSFormat == null ) { String dateFormat = "" ; if ( withDate ) { dateFormat = ( ( SimpleDateFormat ) DateFormat . getDateInstance ( DateFormat . SHORT , locale ) ) . toPattern ( ) ; } String timeFormat = "" ; if ( withTime ) { timeFormat = ( ( SimpleDateFormat ) DateFormat . getTimeInstance ( DateFormat . MEDIUM , locale ) ) . toPattern ( ) ; } // Since DateFormat . SHORT is silly , return a smart format if ( dateFormat . equals ( "M/d/yy" ) ) { dateFormat = "MM/dd/yyyy" ; } else if ( dateFormat . equals ( "d/M/yy" ) ) { dateFormat = "dd/MM/yyyy" ; } String result = LocaleUtils . javaToMomentFormat ( ( dateFormat + " " + timeFormat ) . trim ( ) ) ; // System . out . println ( result ) ; return result ; } else { return momentJSFormat ; }
public class PrincipalUserDto { /** * Converts list of alert entity objects to list of alertDto objects . * @ param users alerts List of alert entities . Cannot be null . * @ return List of alertDto objects . * @ throws WebApplicationException If an error occurs . */ public static List < PrincipalUserDto > transformToDto ( List < PrincipalUser > users ) { } }
if ( users == null ) { throw new WebApplicationException ( "Null entity object cannot be converted to Dto object." , Status . INTERNAL_SERVER_ERROR ) ; } List < PrincipalUserDto > result = new ArrayList < > ( ) ; for ( PrincipalUser user : users ) { result . add ( transformToDto ( user ) ) ; } return result ;
public class ByteArray { /** * Convert a byte array to a long . * @ param value The array to convert . * @ return The long value . */ public static long fromBytes ( byte [ ] value ) { } }
if ( value == null ) return 0 ; long out = 0 ; int length = value . length ; if ( length > 8 ) length = 8 ; for ( int i = 0 ; i < length ; i ++ ) { out = ( out << 8 ) + ( value [ i ] & 0xff ) ; } return out ;
public class URIUtils { /** * Construct a new uri by replacing query parameters in initialUri with the query parameters provided . * @ param initialUri the initial / template URI * @ param queryParams the new query parameters . */ public static URI setQueryParams ( final URI initialUri , final Multimap < String , String > queryParams ) { } }
StringBuilder queryString = new StringBuilder ( ) ; for ( Map . Entry < String , String > entry : queryParams . entries ( ) ) { if ( queryString . length ( ) > 0 ) { queryString . append ( "&" ) ; } queryString . append ( entry . getKey ( ) ) . append ( "=" ) . append ( entry . getValue ( ) ) ; } try { if ( initialUri . getHost ( ) == null && initialUri . getAuthority ( ) != null ) { return new URI ( initialUri . getScheme ( ) , initialUri . getAuthority ( ) , initialUri . getPath ( ) , queryString . toString ( ) , initialUri . getFragment ( ) ) ; } else { return new URI ( initialUri . getScheme ( ) , initialUri . getUserInfo ( ) , initialUri . getHost ( ) , initialUri . getPort ( ) , initialUri . getPath ( ) , queryString . toString ( ) , initialUri . getFragment ( ) ) ; } } catch ( URISyntaxException e ) { throw ExceptionUtils . getRuntimeException ( e ) ; }
public class ThinTableModel { /** * Change the tableheader to display this sort column and order . */ public void setSortedByColumn ( JTableHeader tableHeader , int iViewColumn , boolean bOrder ) { } }
if ( ! ( tableHeader . getDefaultRenderer ( ) instanceof SortableHeaderRenderer ) ) tableHeader . setDefaultRenderer ( new SortableHeaderRenderer ( tableHeader . getDefaultRenderer ( ) ) ) ; // Set up header renderer the first time ( ( SortableHeaderRenderer ) tableHeader . getDefaultRenderer ( ) ) . setSortedByColumn ( tableHeader , iViewColumn , bOrder ) ;
public class CssEscape { /** * Perform a CSS String level 1 ( only basic set ) < strong > escape < / strong > operation * on a < tt > String < / tt > input . * < em > Level 1 < / em > means this method will only escape the CSS String basic escape set : * < ul > * < li > The < em > Backslash Escapes < / em > : * < tt > & # 92 ; & quot ; < / tt > ( < tt > U + 0022 < / tt > ) and * < tt > & # 92 ; & # 39 ; < / tt > ( < tt > U + 0027 < / tt > ) . * < / li > * < li > * Two ranges of non - displayable , control characters : < tt > U + 0000 < / tt > to < tt > U + 001F < / tt > * and < tt > U + 007F < / tt > to < tt > U + 009F < / tt > . * < / li > * < / ul > * This escape will be performed by using Backslash escapes whenever possible . For escaped * characters that do not have an associated Backslash , default to < tt > & # 92 ; FF < / tt > * Hexadecimal Escapes . * This method calls { @ link # escapeCssString ( String , CssStringEscapeType , CssStringEscapeLevel ) } * with the following preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link CssStringEscapeType # BACKSLASH _ ESCAPES _ DEFAULT _ TO _ COMPACT _ HEXA } < / li > * < li > < tt > level < / tt > : * { @ link CssStringEscapeLevel # LEVEL _ 1 _ BASIC _ ESCAPE _ SET } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact * same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and * no additional < tt > String < / tt > objects will be created during processing ) . Will * return < tt > null < / tt > if input is < tt > null < / tt > . */ public static String escapeCssStringMinimal ( final String text ) { } }
return escapeCssString ( text , CssStringEscapeType . BACKSLASH_ESCAPES_DEFAULT_TO_COMPACT_HEXA , CssStringEscapeLevel . LEVEL_1_BASIC_ESCAPE_SET ) ;
public class FileLastModifiedFilter { /** * Factory method to create an instance of the { @ link FileLastModifiedFilter } that evaluates and filters { @ link File } s * based on whether they were last modified before the given date / time . * @ param dateTime { @ link LocalDateTime } used to filter { @ link File } ' s that were last modified before this date / time . * @ return an instance of the { @ link FileLastModifiedFilter } . * @ see java . time . LocalDateTime * @ see # before ( long ) */ public static FileLastModifiedFilter before ( LocalDateTime dateTime ) { } }
return before ( dateTime . atZone ( ZoneId . systemDefault ( ) ) . toInstant ( ) . toEpochMilli ( ) ) ;
public class ClassWriter { /** * Given a symbol , return its name - and - type . */ NameAndType nameType ( Symbol sym ) { } }
return new NameAndType ( fieldName ( sym ) , retrofit ? sym . erasure ( types ) : sym . externalType ( types ) , types ) ; // if we retrofit , then the NameAndType has been read in as is // and no change is necessary . If we compile normally , the // NameAndType is generated from a symbol reference , and the // adjustment of adding an additional this $ n parameter needs to be made .