signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RestSignUtil { /** * 生成API传参签名 * @ param param * @ param secretkey * @ return */ public static < T > String generateSign ( RestRequestParam < T > param , String secretkey ) { } }
TreeMap < String , String > paramsMap = new TreeMap < String , String > ( ) ; paramsMap . put ( "accesskey" , param . getCommon ( ) . getAccesskey ( ) ) ; paramsMap . put ( "timestamp" , Long . toString ( param . getCommon ( ) . getTimestamp ( ) ) ) ; // for ( Map . Entry < String , String > entry : param . getData ( ) ) // paramsMap . put ( entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; paramsMap . put ( "secretkey" , secretkey ) ; return generateSignByRule ( paramsMap ) ;
public class X509Factory { /** * Parses the data in the given input stream as a sequence of DER encoded * X . 509 CRLs ( in binary or base 64 encoded format ) OR as a single PKCS # 7 * encoded blob ( in binary or base 64 encoded format ) . */ private Collection < ? extends java . security . cert . CRL > parseX509orPKCS7CRL ( InputStream is ) throws CRLException , IOException { } }
Collection < X509CRLImpl > coll = new ArrayList < > ( ) ; byte [ ] data = readOneBlock ( is ) ; if ( data == null ) { return new ArrayList < > ( 0 ) ; } try { PKCS7 pkcs7 = new PKCS7 ( data ) ; X509CRL [ ] crls = pkcs7 . getCRLs ( ) ; // CRLs are optional in PKCS # 7 if ( crls != null ) { return Arrays . asList ( crls ) ; } else { // no crls provided return new ArrayList < > ( 0 ) ; } } catch ( ParsingException e ) { while ( data != null ) { coll . add ( new X509CRLImpl ( data ) ) ; data = readOneBlock ( is ) ; } } return coll ;
public class FileSystemAdminShellUtils { /** * Checks if the master client service is available . * Throws an exception if fails to determine that the master client service is running . * @ param alluxioConf Alluxio configuration */ public static void checkMasterClientService ( AlluxioConfiguration alluxioConf ) throws IOException { } }
try ( CloseableResource < FileSystemMasterClient > client = FileSystemContext . create ( ClientContext . create ( alluxioConf ) ) . acquireMasterClientResource ( ) ) { InetSocketAddress address = client . get ( ) . getAddress ( ) ; List < InetSocketAddress > addresses = Arrays . asList ( address ) ; MasterInquireClient inquireClient = new PollingMasterInquireClient ( addresses , ( ) -> new ExponentialBackoffRetry ( 50 , 100 , 2 ) , alluxioConf ) ; inquireClient . getPrimaryRpcAddress ( ) ; } catch ( UnavailableException e ) { throw new IOException ( "Cannot connect to Alluxio leader master." ) ; }
public class InetAddresses { /** * Returns the Teredo information embedded in a Teredo address . * @ param ip { @ link Inet6Address } to be examined for embedded Teredo information * @ return extracted { @ code TeredoInfo } * @ throws IllegalArgumentException if the argument is not a valid IPv6 Teredo address */ public static TeredoInfo getTeredoInfo ( Inet6Address ip ) { } }
Preconditions . checkArgument ( isTeredoAddress ( ip ) , "Address '%s' is not a Teredo address." , toAddrString ( ip ) ) ; byte [ ] bytes = ip . getAddress ( ) ; Inet4Address server = getInet4Address ( Arrays . copyOfRange ( bytes , 4 , 8 ) ) ; int flags = ByteStreams . newDataInput ( bytes , 8 ) . readShort ( ) & 0xffff ; // Teredo obfuscates the mapped client port , per section 4 of the RFC . int port = ~ ByteStreams . newDataInput ( bytes , 10 ) . readShort ( ) & 0xffff ; byte [ ] clientBytes = Arrays . copyOfRange ( bytes , 12 , 16 ) ; for ( int i = 0 ; i < clientBytes . length ; i ++ ) { // Teredo obfuscates the mapped client IP , per section 4 of the RFC . clientBytes [ i ] = ( byte ) ~ clientBytes [ i ] ; } Inet4Address client = getInet4Address ( clientBytes ) ; return new TeredoInfo ( server , client , port , flags ) ;
public class CmsCopyMoveDialog { /** * Preselects the target folder . < p > * @ param structureId the target structure id * @ throws CmsException in case the target can not be read or is not a folder */ public void setTargetFolder ( CmsUUID structureId ) throws CmsException { } }
CmsObject cms = A_CmsUI . getCmsObject ( ) ; CmsResource res = cms . readResource ( structureId ) ; setTargetForlder ( res ) ;
public class RandomUtil { /** * 获得指定范围内的随机数 * @ param min 最小数 ( 包含 ) * @ param max 最大数 ( 不包含 ) * @ param scale 保留小数位数 * @ param roundingMode 保留小数的模式 { @ link RoundingMode } * @ return 随机数 * @ since 4.0.8 */ public static double randomDouble ( double min , double max , int scale , RoundingMode roundingMode ) { } }
return NumberUtil . round ( randomDouble ( min , max ) , scale , roundingMode ) . doubleValue ( ) ;
public class Nfs3 { /** * / * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . Nfs # wrapped _ getPathconf ( com . emc . ecs . nfsclient . nfs . NfsPathconfRequest ) */ public Nfs3PathconfResponse wrapped_getPathconf ( NfsPathconfRequest request ) throws IOException { } }
NfsResponseHandler < Nfs3PathconfResponse > responseHandler = new NfsResponseHandler < Nfs3PathconfResponse > ( ) { /* ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . rpc . RpcResponseHandler # makeNewResponse ( ) */ protected Nfs3PathconfResponse makeNewResponse ( ) { return new Nfs3PathconfResponse ( ) ; } } ; _rpcWrapper . callRpcWrapped ( request , responseHandler ) ; return responseHandler . getResponse ( ) ;
public class UnregisterWebAppVisitorWC { /** * Unregisters listeners from web container . * @ throws NullArgumentException if listener is null * @ see WebAppVisitor # visit ( WebAppListener ) */ public void visit ( final WebAppListener webAppListener ) { } }
NullArgumentException . validateNotNull ( webAppListener , "Web app listener" ) ; final EventListener listener = webAppListener . getListener ( ) ; if ( listener != null ) { // CHECKSTYLE : OFF try { webContainer . unregisterEventListener ( listener ) ; } catch ( Exception ignore ) { LOG . warn ( "Unregistration exception. Skipping." , ignore ) ; } // CHECKSTYLE : ON }
public class NumberStyleHelper { /** * Append the attributes of the number * @ param util an util * @ param appendable the destination * @ throws IOException if an I / O error occurs */ public void appendNumberAttribute ( final XMLUtil util , final Appendable appendable ) throws IOException { } }
this . appendMinIntegerDigitsAttribute ( util , appendable ) ; this . appendGroupingAttribute ( util , appendable ) ;
public class PrepareCoordinator { /** * Starts a transaction . * @ return { @ code true } if the transaction can be started , { @ code false } otherwise . */ public boolean startTransaction ( ) { } }
EmbeddedTransaction tx = new EmbeddedTransaction ( EmbeddedTransactionManager . getInstance ( ) ) ; tx . setXid ( xid ) ; LocalTransaction localTransaction = transactionTable . getOrCreateLocalTransaction ( tx , false , this :: newGlobalTransaction ) ; if ( createGlobalState ( localTransaction . getGlobalTransaction ( ) ) != Status . OK ) { // no need to rollback . nothing is enlisted in the transaction . transactionTable . removeLocalTransaction ( localTransaction ) ; return false ; } else { this . tx = tx ; this . localTxInvocationContext = new LocalTxInvocationContext ( localTransaction ) ; perCacheTxTable . createLocalTx ( xid , tx ) ; transactionTable . enlistClientTransaction ( tx , localTransaction ) ; return true ; }
public class Alterable { /** * Set maximum reachable value . The maximum value can not be lower than 0 . Current value clamp between { @ value # MIN } * and max if over max set . * @ param max The maximum reachable value . */ public void setMax ( int max ) { } }
this . max = max ; if ( this . max < Alterable . MIN ) { this . max = Alterable . MIN ; } set ( cur ) ;
public class HessianFromGradient { /** * Computes the hessian given an image ' s gradient using a three derivative operator . * @ param inputDerivX Already computed image x - derivative . * @ param inputDerivY Already computed image y - derivative . * @ param derivXX Output second XX partial derivative . * @ param derivYY Output second YY partial derivative . * @ param derivXY Output second XY partial derivative . * @ param border Specifies how the image border is handled . If null the border is not processed . */ public static void hessianThree ( GrayF32 inputDerivX , GrayF32 inputDerivY , GrayF32 derivXX , GrayF32 derivYY , GrayF32 derivXY , ImageBorder_F32 border ) { } }
InputSanityCheck . reshapeOneIn ( inputDerivX , inputDerivY , derivXX , derivYY , derivXY ) ; GradientThree . process ( inputDerivX , derivXX , derivXY , border ) ; if ( border != null ) ConvolveImage . vertical ( GradientThree . kernelDeriv_F32 , inputDerivY , derivYY , new ImageBorder1D_F32 ( BorderIndex1D_Extend . class ) ) ; else ConvolveImageNoBorder . vertical ( GradientThree . kernelDeriv_F32 , inputDerivY , derivYY ) ;
public class TzdbZoneRulesCompiler { /** * Reads a set of TZDB files and builds a single combined data file . * @ param args the arguments */ public static void main ( String [ ] args ) { } }
if ( args . length < 2 ) { outputHelp ( ) ; return ; } // parse args String version = null ; File baseSrcDir = null ; File dstDir = null ; boolean unpacked = false ; boolean verbose = false ; // parse options int i ; for ( i = 0 ; i < args . length ; i ++ ) { String arg = args [ i ] ; if ( arg . startsWith ( "-" ) == false ) { break ; } if ( "-srcdir" . equals ( arg ) ) { if ( baseSrcDir == null && ++ i < args . length ) { baseSrcDir = new File ( args [ i ] ) ; continue ; } } else if ( "-dstdir" . equals ( arg ) ) { if ( dstDir == null && ++ i < args . length ) { dstDir = new File ( args [ i ] ) ; continue ; } } else if ( "-version" . equals ( arg ) ) { if ( version == null && ++ i < args . length ) { version = args [ i ] ; continue ; } } else if ( "-unpacked" . equals ( arg ) ) { if ( unpacked == false ) { unpacked = true ; continue ; } } else if ( "-verbose" . equals ( arg ) ) { if ( verbose == false ) { verbose = true ; continue ; } } else if ( "-help" . equals ( arg ) == false ) { System . out . println ( "Unrecognised option: " + arg ) ; } outputHelp ( ) ; return ; } // check source directory if ( baseSrcDir == null ) { System . out . println ( "Source directory must be specified using -srcdir: " + baseSrcDir ) ; return ; } if ( baseSrcDir . isDirectory ( ) == false ) { System . out . println ( "Source does not exist or is not a directory: " + baseSrcDir ) ; return ; } dstDir = ( dstDir != null ? dstDir : baseSrcDir ) ; // parse source file names List < String > srcFileNames = Arrays . asList ( Arrays . copyOfRange ( args , i , args . length ) ) ; if ( srcFileNames . isEmpty ( ) ) { System . out . println ( "Source filenames not specified, using default set" ) ; System . out . println ( "(africa antarctica asia australasia backward etcetera europe northamerica southamerica)" ) ; srcFileNames = Arrays . asList ( "africa" , "antarctica" , "asia" , "australasia" , "backward" , "etcetera" , "europe" , "northamerica" , "southamerica" ) ; } // find source directories to process List < File > srcDirs = new ArrayList < File > ( ) ; if ( version != null ) { File srcDir = new File ( baseSrcDir , version ) ; if ( srcDir . isDirectory ( ) == false ) { System . out . println ( "Version does not represent a valid source directory : " + srcDir ) ; return ; } srcDirs . add ( srcDir ) ; } else { File [ ] dirs = baseSrcDir . listFiles ( ) ; for ( File dir : dirs ) { if ( dir . isDirectory ( ) && dir . getName ( ) . matches ( "[12][0-9][0-9][0-9][A-Za-z0-9._-]+" ) ) { srcDirs . add ( dir ) ; } } } if ( srcDirs . isEmpty ( ) ) { System . out . println ( "Source directory contains no valid source folders: " + baseSrcDir ) ; return ; } // check destination directory if ( dstDir . exists ( ) == false && dstDir . mkdirs ( ) == false ) { System . out . println ( "Destination directory could not be created: " + dstDir ) ; return ; } if ( dstDir . isDirectory ( ) == false ) { System . out . println ( "Destination is not a directory: " + dstDir ) ; return ; } process ( srcDirs , srcFileNames , dstDir , unpacked , verbose ) ;
public class MESubscription { /** * Updates the ControllableProxySubscription . * We go through a full blown MatchSpace add and remove operation * to ensure that MatchSpace caches get re - synched . * @ param transaction the { @ link Transaction } under which the * event has occurred */ public void eventPostCommitUpdate ( Transaction transaction ) throws SevereMessageStoreException { } }
super . eventPostCommitAdd ( transaction ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "eventPostCommitUpdate" , transaction ) ; // Remove the current CPS from the MatchSpace if ( _proxyHandler != null ) { _destination . getSubscriptionIndex ( ) . remove ( _controllableProxySubscription ) ; _proxyHandler . getMessageProcessor ( ) . getMessageProcessorMatching ( ) . removePubSubOutputHandlerMatchTarget ( _controllableProxySubscription ) ; } // Add the CPS to the MatchSpace . try { if ( _proxyHandler != null ) { _controllableProxySubscription = _proxyHandler . getMessageProcessor ( ) . getMessageProcessorMatching ( ) . addPubSubOutputHandlerMatchTarget ( _handler , _topicSpaceUuid , _topic , _foreignSecuredProxy , _meSubUserId ) ; } } catch ( SIException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.proxyhandler.MESubscription.eventPostCommitUpdate" , "1:738:1.55" , this ) ; SibTr . exception ( tc , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.MESubscription" , "1:745:1.55" , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "eventPostCommitUpdate" , "SIErrorException" ) ; // An error at this point is very bad ! throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.MESubscription" , "1:755:1.55" , e } , null ) , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "eventPostCommitUpdate" ) ;
public class Choice8 { /** * { @ inheritDoc } * @ param lazyAppFn */ @ Override public < I > Lazy < Choice8 < A , B , C , D , E , F , G , I > > lazyZip ( Lazy < ? extends Applicative < Function < ? super H , ? extends I > , Choice8 < A , B , C , D , E , F , G , ? > > > lazyAppFn ) { } }
return match ( a -> lazy ( a ( a ) ) , b -> lazy ( b ( b ) ) , c -> lazy ( c ( c ) ) , d -> lazy ( d ( d ) ) , e -> lazy ( e ( e ) ) , f -> lazy ( f ( f ) ) , g -> lazy ( g ( g ) ) , h -> lazyAppFn . fmap ( choiceF -> choiceF . < I > fmap ( f -> f . apply ( h ) ) . coerce ( ) ) ) ;
public class JsonParser { /** * 获取JsonObject * @ param key 例如 : country * @ return { @ link JSONObject } */ public JSONObject getObjectUseEval ( String key ) { } }
Object object = eval ( key ) ; return Checker . isNull ( object ) ? null : ( JSONObject ) object ;
public class CmsObject { /** * Returns the newest URL names for the given structure id for each locale . < p > * @ param id the structure id * @ return the list of URL names for each locale * @ throws CmsException if something goes wrong */ public List < String > getUrlNamesForAllLocales ( CmsUUID id ) throws CmsException { } }
return m_securityManager . readUrlNamesForAllLocales ( m_context , id ) ;
public class YamlReader { /** * see http : / / yaml . org / type / merge . html */ @ SuppressWarnings ( "unchecked" ) private void mergeMap ( Map < String , Object > dest , Object source ) throws YamlReaderException { } }
if ( source instanceof Collection ) { for ( Object item : ( ( Collection < Object > ) source ) ) mergeMap ( dest , item ) ; } else if ( source instanceof Map ) { Map < String , Object > map = ( Map < String , Object > ) source ; for ( Map . Entry < String , Object > entry : map . entrySet ( ) ) { if ( ! dest . containsKey ( entry . getKey ( ) ) ) dest . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } else throw new YamlReaderException ( "Expected a mapping or a sequence of mappings for a '<<' merge field but found: " + source . getClass ( ) . getSimpleName ( ) ) ;
public class TransformerColl { /** * define the transform process * @ param method a function takes in each entry from the map * and returns it ' s transformed state . * @ return collection with objects after the transform */ public Coll via ( RFunc2 < R , K , V > method ) { } }
return via ( Style . $ ( method ) ) ;
public class GoogleChartImageGenerator { /** * Generates Google bar chart URL for the provided samples . * @ param title chart title * @ param unit unit requested for displaying results * @ param showMaxMin true if additional datasets for max and min values should be shown * @ param samples stopwatch samples * @ return URL generating the bar chart */ public static String barChart ( String title , SimonUnit unit , boolean showMaxMin , StopwatchSample ... samples ) { } }
return new GoogleChartImageGenerator ( samples , title , unit , showMaxMin ) . process ( ) ;
public class KearnsVaziraniDFA { /** * private void updateTransitions ( TLongList transList , DTNode < I , Boolean , StateInfo < I > > oldDtTarget ) { */ private void updateTransitions ( List < Long > transList , AbstractWordBasedDTNode < I , Boolean , StateInfo < I , Boolean > > oldDtTarget ) { } }
// TODO : replace with primitive specialization int numTrans = transList . size ( ) ; final List < Word < I > > transAs = new ArrayList < > ( numTrans ) ; for ( int i = 0 ; i < numTrans ; i ++ ) { long encodedTrans = transList . get ( i ) ; int sourceState = ( int ) ( encodedTrans >> StateInfo . INTEGER_WORD_WIDTH ) ; int transIdx = ( int ) ( encodedTrans ) ; StateInfo < I , Boolean > sourceInfo = stateInfos . get ( sourceState ) ; I symbol = alphabet . getSymbol ( transIdx ) ; transAs . add ( sourceInfo . accessSequence . append ( symbol ) ) ; } final List < StateInfo < I , Boolean > > succs = sift ( Collections . nCopies ( numTrans , oldDtTarget ) , transAs ) ; for ( int i = 0 ; i < numTrans ; i ++ ) { long encodedTrans = transList . get ( i ) ; int sourceState = ( int ) ( encodedTrans >> StateInfo . INTEGER_WORD_WIDTH ) ; int transIdx = ( int ) ( encodedTrans ) ; setTransition ( sourceState , transIdx , succs . get ( i ) ) ; }
public class DbxClientV1 { /** * A more generic version of { @ link # getDelta } . You provide a < em > collector < / em > , * which lets you process the delta entries as they arrive over the network . */ public < C > DbxDeltaC < C > getDeltaC ( Collector < DbxDeltaC . Entry < DbxEntry > , C > collector , /* @ Nullable */ String cursor , boolean includeMediaInfo ) throws DbxException { } }
return _getDeltaC ( collector , cursor , null , includeMediaInfo ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcDocumentConfidentialityEnum ( ) { } }
if ( ifcDocumentConfidentialityEnumEEnum == null ) { ifcDocumentConfidentialityEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 813 ) ; } return ifcDocumentConfidentialityEnumEEnum ;
public class ApiOvhService { /** * Suspend the service . The service won ' t be accessible , but you will still be charged for it * REST : POST / service / { serviceId } / suspend * @ param serviceId [ required ] The internal ID of your service * API beta */ public void serviceId_suspend_POST ( Long serviceId ) throws IOException { } }
String qPath = "/service/{serviceId}/suspend" ; StringBuilder sb = path ( qPath , serviceId ) ; exec ( qPath , "POST" , sb . toString ( ) , null ) ;
public class CensoredDescriptives { /** * Calculates median . * @ param survivalFunction * @ return */ public static double median ( AssociativeArray2D survivalFunction ) { } }
Double ApointTi = null ; Double BpointTi = null ; int n = survivalFunction . size ( ) ; if ( n == 0 ) { throw new IllegalArgumentException ( "The provided collection can't be empty." ) ; } for ( Map . Entry < Object , AssociativeArray > entry : survivalFunction . entrySet ( ) ) { Object ti = entry . getKey ( ) ; AssociativeArray row = entry . getValue ( ) ; Double Sti = row . getDouble ( "Sti" ) ; if ( Sti == null ) { continue ; // skip censored } Double point = Double . valueOf ( ti . toString ( ) ) ; if ( Math . abs ( Sti - 0.5 ) < 0.0000001 ) { return point ; // we found extactly the point } else if ( Sti > 0.5 ) { ApointTi = point ; // keep the point just before the 0.5 probability } else { BpointTi = point ; // keep the first point after the 0.5 probability and exit loop break ; } } if ( n == 1 ) { return ( ApointTi != null ) ? ApointTi : BpointTi ; } else if ( ApointTi == null || BpointTi == null ) { throw new IllegalArgumentException ( "Invalid A and B points." ) ; // we should never get here } double ApointTiValue = TypeInference . toDouble ( survivalFunction . get2d ( ApointTi . toString ( ) , "Sti" ) ) ; double BpointTiValue = TypeInference . toDouble ( survivalFunction . get2d ( BpointTi . toString ( ) , "Sti" ) ) ; double median = BpointTi - ( BpointTiValue - 0.5 ) * ( BpointTi - ApointTi ) / ( BpointTiValue - ApointTiValue ) ; return median ;
public class AbstractAlipay { /** * Mobile SDK join the fields with quote , which is not documented at all . * @ param p * @ return */ protected String signRSAWithQuote ( final List < StringPair > p ) { } }
String param = join ( p , false , true ) ; String sign = rsaSign ( param ) ; return sign ;
public class AnimaQuery { /** * generate " in " statement , simultaneous setting value * @ param column column name * @ param args in param values * @ param < S > * @ return AnimaQuery */ public < S > AnimaQuery < T > in ( String column , List < S > args ) { } }
return this . in ( column , args . toArray ( ) ) ;
public class LocalTIDTable { /** * Remove the given local tid from the map . * This method should be called once a * transaction has completed . * @ param localTID The local TID to remove from the table */ public static void removeLocalTID ( int localTID ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "removeLocalTID" , localTID ) ; localTIDMap . remove ( localTID ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "removeLocalTID" ) ;
public class DenseMatrix { /** * Creates a { @ link DenseMatrix } of the given 1D { @ code array } w / o * copying the underlying array . */ public static DenseMatrix from1DArray ( int rows , int columns , double [ ] array ) { } }
return Basic1DMatrix . from1DArray ( rows , columns , array ) ;
public class ScalableBloomFilter { /** * Adds the given element to the set managed by this { @ link BloomFilter } . * @ param element { @ link Object element } to add to this { @ link BloomFilter } . * @ see # accept ( Object ) */ @ Override @ NullSafe public synchronized void add ( T element ) { } }
Optional . ofNullable ( element ) . ifPresent ( it -> { int hashCode = it . hashCode ( ) ; int index = ( hashCode % getScale ( ) ) ; BloomFilter < T > bloomFilter = resolveBloomFilter ( index ) ; bloomFilter . add ( it ) ; } ) ;
public class W3CDateFormat { /** * This is what you override when you extend DateFormat ; use { @ link DateFormat # format ( Date ) } instead */ @ Override public StringBuffer format ( Date date , StringBuffer toAppendTo , FieldPosition pos ) { } }
boolean includeTimeZone = pattern . includeTimeZone ; if ( pattern == Pattern . AUTO ) { includeTimeZone = autoFormat ( date ) ; } super . format ( date , toAppendTo , pos ) ; if ( includeTimeZone ) convertRfc822TimeZoneToW3c ( toAppendTo ) ; return toAppendTo ;
public class PostgreSQLLiaison { /** * from DatabaseLiaison */ public void deleteGenerator ( Connection conn , String table , String column ) throws SQLException { } }
executeQuery ( conn , "drop sequence if exists \"" + table + "_" + column + "_seq\"" ) ;
public class AbstractHistogram { /** * Get the value at a given percentile . * When the given percentile is & gt ; 0.0 , the value returned is the value that the given * percentage of the overall recorded value entries in the histogram are either smaller than * or equivalent to . When the given percentile is 0.0 , the value returned is the value that all value * entries in the histogram are either larger than or equivalent to . * Note that two values are " equivalent " in this statement if * { @ link org . HdrHistogram _ voltpatches . AbstractHistogram # valuesAreEquivalent } would return true . * @ param percentile The percentile for which to return the associated value * @ return The value that the given percentage of the overall recorded value entries in the * histogram are either smaller than or equivalent to . When the percentile is 0.0 , returns the * value that all value entries in the histogram are either larger than or equivalent to . */ public long getValueAtPercentile ( final double percentile ) { } }
final double requestedPercentile = Math . min ( percentile , 100.0 ) ; // Truncate down to 100% long countAtPercentile = ( long ) ( ( ( requestedPercentile / 100.0 ) * getTotalCount ( ) ) + 0.5 ) ; // round to nearest countAtPercentile = Math . max ( countAtPercentile , 1 ) ; // Make sure we at least reach the first recorded entry long totalToCurrentIndex = 0 ; for ( int i = 0 ; i < countsArrayLength ; i ++ ) { totalToCurrentIndex += getCountAtIndex ( i ) ; if ( totalToCurrentIndex >= countAtPercentile ) { long valueAtIndex = valueFromIndex ( i ) ; return ( percentile == 0.0 ) ? lowestEquivalentValue ( valueAtIndex ) : highestEquivalentValue ( valueAtIndex ) ; } } return 0 ;
public class ServiceImpl { /** * { @ inheritDoc } */ public Vector < Object > execute ( Vector < Object > runnerParams , Vector < Object > sutParams , Vector < Object > specificationParams , boolean implemented , String sections , String locale ) { } }
Runner runner = XmlRpcDataMarshaller . toRunner ( runnerParams ) ; // To prevent call forwarding runner . setServerName ( null ) ; runner . setServerPort ( null ) ; SystemUnderTest systemUnderTest = XmlRpcDataMarshaller . toSystemUnderTest ( sutParams ) ; Specification specification = XmlRpcDataMarshaller . toSpecification ( specificationParams ) ; Execution exe = runner . execute ( specification , systemUnderTest , implemented , sections , locale ) ; return exe . marshallize ( ) ;
public class PDIndexerManager { /** * Queue a single work item of any type . If the item is already in the queue , * it is ignored . * @ param aWorkItem * Work item to be queued . May not be < code > null < / code > . * @ return { @ link EChange # CHANGED } if it was queued */ @ Nonnull private EChange _queueUniqueWorkItem ( @ Nonnull final IIndexerWorkItem aWorkItem ) { } }
ValueEnforcer . notNull ( aWorkItem , "WorkItem" ) ; // Check for duplicate m_aRWLock . writeLock ( ) . lock ( ) ; try { if ( ! m_aUniqueItems . add ( aWorkItem ) ) { LOGGER . info ( "Ignoring work item " + aWorkItem . getLogText ( ) + " because it is already in the queue/re-index list!" ) ; return EChange . UNCHANGED ; } } finally { m_aRWLock . writeLock ( ) . unlock ( ) ; } // Queue it m_aIndexerWorkQueue . queueObject ( aWorkItem ) ; LOGGER . info ( "Queued work item " + aWorkItem . getLogText ( ) ) ; // Remove the entry from the dead list to avoid spamming the dead list if ( m_aDeadList . getAndRemoveEntry ( x -> x . getWorkItem ( ) . equals ( aWorkItem ) ) != null ) LOGGER . info ( "Removed the new work item " + aWorkItem . getLogText ( ) + " from the dead list" ) ; return EChange . CHANGED ;
public class ScriptController { /** * Returns all scripts * @ param model * @ param type - optional to specify type of script to return * @ return * @ throws Exception */ @ RequestMapping ( value = "/api/scripts" , method = RequestMethod . GET ) public @ ResponseBody HashMap < String , Object > getScripts ( Model model , @ RequestParam ( required = false ) Integer type ) throws Exception { } }
Script [ ] scripts = ScriptService . getInstance ( ) . getScripts ( type ) ; return Utils . getJQGridJSON ( scripts , "scripts" ) ;
public class AstNode { /** * Returns the string name for this operator . * @ param op the token type , e . g . { @ link Token # ADD } or { @ link Token # TYPEOF } * @ return the source operator string , such as " + " or " typeof " */ public static String operatorToString ( int op ) { } }
String result = operatorNames . get ( op ) ; if ( result == null ) throw new IllegalArgumentException ( "Invalid operator: " + op ) ; return result ;
public class ContextHandlerBuilder { /** * < p > Adds a request handler relative to the context of this builder . < / p > * < p > Note that handlers are executed in the order added to the builder , but all async * handlers are executed before synchronous handlers . < / p > * @ param handler The handler to add . * @ return The current Mu - Server Handler . * @ see # addHandler ( Method , String , RouteHandler ) */ public ContextHandlerBuilder addHandler ( MuHandler handler ) { } }
if ( handler != null ) { handler = getContextualHandlerForResourceHandler ( handler ) ; handlers . add ( handler ) ; } return this ;
public class PcapAddr { /** * Getting interface address . * @ return returns interface address . */ public SockAddr getAddr ( ) { } }
SockAddr sockAddr = null ; if ( this . addr != null ) { sockAddr = new SockAddr ( this . addr . getSaFamily ( ) . getValue ( ) , this . addr . getData ( ) ) ; } return sockAddr ;
public class CexIOAdapters { /** * From CEX API < a href = " https : / / cex . io / rest - api # / definitions / OrderStatus " > documentation < / a > < br > * Order status can assume follow values ( ' d ' = done , fully executed OR ' c ' = canceled , not * executed OR ' cd ' = cancel - done , partially executed OR ' a ' = active , created ) * @ param cexIOOrder cex raw order * @ return OrderStatus */ private static Order . OrderStatus adaptOrderStatus ( CexIOOpenOrder cexIOOrder ) { } }
if ( "c" . equalsIgnoreCase ( cexIOOrder . status ) ) return Order . OrderStatus . CANCELED ; if ( "d" . equalsIgnoreCase ( cexIOOrder . status ) ) return Order . OrderStatus . FILLED ; if ( "a" . equalsIgnoreCase ( cexIOOrder . status ) ) { try { BigDecimal remains = new BigDecimal ( cexIOOrder . remains ) ; BigDecimal amount = new BigDecimal ( cexIOOrder . amount ) ; if ( remains . compareTo ( BigDecimal . ZERO ) > 0 && remains . compareTo ( amount ) < 0 ) return Order . OrderStatus . PARTIALLY_FILLED ; else return Order . OrderStatus . PENDING_NEW ; } catch ( NumberFormatException ex ) { return Order . OrderStatus . PENDING_NEW ; } } if ( "cd" . equalsIgnoreCase ( cexIOOrder . status ) ) { try { BigDecimal remains = new BigDecimal ( cexIOOrder . remains ) ; BigDecimal amount = new BigDecimal ( cexIOOrder . amount ) ; if ( remains . compareTo ( BigDecimal . ZERO ) > 0 && remains . compareTo ( amount ) < 0 ) return Order . OrderStatus . PARTIALLY_CANCELED ; else return Order . OrderStatus . CANCELED ; } catch ( NumberFormatException ex ) { return Order . OrderStatus . CANCELED ; } } return Order . OrderStatus . UNKNOWN ;
public class Launcher { /** * Adds an environment variable to the process being created . * @ param key they key for the variable * @ param value the value for the variable * @ return the launcher */ public Launcher addEnvironmentVariable ( final String key , final String value ) { } }
env . put ( key , value ) ; return this ;
public class TerminalLineSettings { /** * Get the value of a stty property , including the management of a cache . * @ param name the stty property . * @ return the stty property value . */ public int getProperty ( String name ) { } }
checkNotNull ( name ) ; try { // tty properties are cached so we don ' t have to worry too much about getting term widht / height if ( config == null || System . currentTimeMillis ( ) - configLastFetched > 1000 ) { config = get ( "-a" ) ; configLastFetched = System . currentTimeMillis ( ) ; } return this . getProperty ( name , config ) ; } catch ( Exception e ) { return - 1 ; }
public class ServerDeserializer { /** * Gson invokes this call - back method during deserialization when it encounters a field of the specified type . * @ param element The Json data being deserialized * @ param type The type of the Object to deserialize to * @ param context The JSON deserialization context * @ return The server */ @ Override public Server deserialize ( JsonElement element , Type type , JsonDeserializationContext context ) throws JsonParseException { } }
JsonObject obj = element . getAsJsonObject ( ) ; JsonElement server = obj . get ( "server" ) ; if ( server != null && server . isJsonObject ( ) ) return gson . fromJson ( server , Server . class ) ; return gson . fromJson ( element , Server . class ) ;
public class FSDirectory { /** * updates quota without verification * callers responsibility is to make sure quota is not exceeded * @ param inodes * @ param numOfINodes * @ param nsDelta * @ param dsDelta */ private void unprotectedUpdateCount ( INode [ ] inodes , int numOfINodes , long nsDelta , long dsDelta ) { } }
for ( int i = 0 ; i < numOfINodes ; i ++ ) { if ( inodes [ i ] . isQuotaSet ( ) ) { // a directory with quota INodeDirectoryWithQuota node = ( INodeDirectoryWithQuota ) inodes [ i ] ; node . updateNumItemsInTree ( nsDelta , dsDelta ) ; } }
public class StaticTypeCheckingSupport { /** * Returns true for expressions of the form x [ . . . ] * @ param expression an expression * @ return true for array access expressions */ protected static boolean isArrayAccessExpression ( Expression expression ) { } }
return expression instanceof BinaryExpression && isArrayOp ( ( ( BinaryExpression ) expression ) . getOperation ( ) . getType ( ) ) ;
public class SeLionSelendroidDriver { /** * Scroll the screen up . The underlying application should have atleast one scroll view belonging to the class * ' android . widget . ScrollView ' . */ public void scrollUp ( ) { } }
logger . entering ( ) ; WebElement webElement = this . findElement ( By . className ( SCROLLVIEW_CLASS ) ) ; swipeUp ( webElement ) ; logger . exiting ( ) ;
public class MwsAQCall { /** * Create a HttpPost request for this call and add required headers and parameters to it . * @ return The Post Request . */ private HttpPost createRequest ( ) { } }
HttpPost request = new HttpPost ( serviceEndpoint . uri ) ; try { request . addHeader ( "Content-Type" , "application/x-www-form-urlencoded; charset=utf-8" ) ; request . addHeader ( "X-Amazon-User-Agent" , connection . getUserAgent ( ) ) ; for ( Map . Entry < String , String > header : connection . getRequestHeaders ( ) . entrySet ( ) ) { request . addHeader ( header . getKey ( ) , header . getValue ( ) ) ; } addRequiredParametersToRequest ( request ) ; } catch ( Exception e ) { request . releaseConnection ( ) ; throw MwsUtl . wrap ( e ) ; } return request ;
public class Descriptor { /** * Given the class , list up its { @ link PropertyType } s from its public fields / getters . */ private Map < String , PropertyType > buildPropertyTypes ( Class < ? > clazz ) { } }
Map < String , PropertyType > r = new HashMap < > ( ) ; for ( Field f : clazz . getFields ( ) ) r . put ( f . getName ( ) , new PropertyType ( f ) ) ; for ( Method m : clazz . getMethods ( ) ) if ( m . getName ( ) . startsWith ( "get" ) ) r . put ( Introspector . decapitalize ( m . getName ( ) . substring ( 3 ) ) , new PropertyType ( m ) ) ; return r ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcActionRequest ( ) { } }
if ( ifcActionRequestEClass == null ) { ifcActionRequestEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1 ) ; } return ifcActionRequestEClass ;
public class JSONObject { /** * Produce a string from a double . The string " null " will be returned if the * number is not finite . * @ param d * A double . * @ return A String . */ public static String doubleToString ( double d ) { } }
if ( Double . isInfinite ( d ) || Double . isNaN ( d ) ) { return "null" ; } // Shave off trailing zeros and decimal point , if possible . String string = Double . toString ( d ) ; if ( string . indexOf ( '.' ) > 0 && string . indexOf ( 'e' ) < 0 && string . indexOf ( 'E' ) < 0 ) { while ( string . endsWith ( "0" ) ) { string = string . substring ( 0 , string . length ( ) - 1 ) ; } if ( string . endsWith ( "." ) ) { string = string . substring ( 0 , string . length ( ) - 1 ) ; } } return string ;
public class TupleGenerator { /** * Returns a set of failure { @ link TestCaseDef test case definitions } for the given function input definition . */ private List < TestCaseDef > getFailureCases ( FunctionInputDef inputDef , VarTupleSet failureTuples , VarTupleSet validTuples ) { } }
logger_ . debug ( "{}: Creating failure test cases" , inputDef ) ; List < TestCaseDef > failureCases = new ArrayList < TestCaseDef > ( ) ; // For each failure input tuple not yet used in a test case . . . Tuple nextUnused ; while ( ( nextUnused = failureTuples . getNextUnused ( ) ) != null ) { // Completed bindings for all variables ? TestCaseDef failureCase = createTestCase ( nextUnused , inputDef , validTuples ) ; if ( failureCase != null ) { // Yes , add new failure test case . failureTuples . used ( nextUnused ) ; failureCases . add ( failureCase ) ; } } logger_ . info ( "{}: Created {} failure test cases" , inputDef , failureCases . size ( ) ) ; return failureCases ;
public class CoordinatorProxyService { /** * Initializes all the Fat clients ( 1 per store ) for the cluster that this * Coordinator talks to . This is invoked once during startup and then every * time the Metadata manager detects changes to the cluster and stores * metadata . * This method is synchronized because we do not want Coordinator Admin * changes to interfere with Async metadata version manager */ private synchronized void initializeAllFatClients ( ) { } }
updateCoordinatorMetadataWithLatestState ( ) ; // get All stores defined in the config file Map < String , Properties > storeClientConfigsMap = storeClientConfigs . getAllConfigsMap ( ) ; for ( StoreDefinition storeDef : this . coordinatorMetadata . getStoresDefs ( ) ) { String storeName = storeDef . getName ( ) ; // Initialize only those stores defined in the client configs file if ( storeClientConfigsMap . get ( storeName ) != null ) { initializeFatClient ( storeName , storeClientConfigsMap . get ( storeName ) ) ; } }
public class FileUtils { /** * Copya los directorios y ficheros de la carpeta origen hasta la carpeta * destino . En el path de destino crea los directorios que sean necesarios . * @ param origDir * directorio origen * @ param destDir * directorio final * @ return true si va bien , false si falla */ public static boolean copyFilesFromDirectoryToDirectory ( File origDir , File destDir ) { } }
if ( ! origDir . exists ( ) ) { return false ; } if ( ! destDir . exists ( ) ) { destDir . mkdirs ( ) ; } File array [ ] = origDir . listFiles ( ) ; if ( array != null ) { for ( File file : array ) { File destFile = new File ( destDir , file . getName ( ) ) ; if ( file . isDirectory ( ) ) { copyFilesFromDirectoryToDirectory ( file , destFile ) ; } else { if ( ! copyFile ( file , destFile ) ) { return false ; } } } } else { return false ; } return true ;
public class RequestHttpBase { /** * Sets a header , replacing an already - existing header . * @ param key the header key to set . * @ param value the header value to set . */ public void headerOut ( String key , String value ) { } }
Objects . requireNonNull ( value ) ; if ( isOutCommitted ( ) ) { return ; } if ( headerOutSpecial ( key , value ) ) { return ; } setHeaderOutImpl ( key , value ) ;
public class PathUtil { /** * Returns true if " file " is a subfile or subdirectory of " dir " . * For example with the directory / path / to / a , the following return values would occur : * / path / to / a / foo . txt - true / path / to / a / bar / zoo / boo / team . txt - true / path / to / b / foo . txt - false */ public static boolean isInSubDirectory ( File dir , File file ) { } }
if ( file == null ) return false ; if ( file . equals ( dir ) ) return true ; return isInSubDirectory ( dir , file . getParentFile ( ) ) ;
public class FullDTDReader { /** * Method used to ' intern ( ) ' qualified names ; main benefit is reduced * memory usage as the name objects are shared . May also slightly * speed up Map access , as more often identity comparisons catch * matches . * Note : it is assumed at this point that access is only from a single * thread , and non - recursive - - generally valid assumption as readers are * not shared . Restriction is needed since the method is not re - entrant : * it uses mAccessKey during the method call . */ private PrefixedName findSharedName ( String prefix , String localName ) { } }
HashMap < PrefixedName , PrefixedName > m = mSharedNames ; if ( mSharedNames == null ) { mSharedNames = m = new HashMap < PrefixedName , PrefixedName > ( ) ; } else { // Maybe we already have a shared instance . . . ? PrefixedName key = mAccessKey ; key . reset ( prefix , localName ) ; key = m . get ( key ) ; if ( key != null ) { // gotcha return key ; } } // Not found ; let ' s create , cache and return it : PrefixedName result = new PrefixedName ( prefix , localName ) ; m . put ( result , result ) ; return result ;
public class PeepholeMinimizeConditions { /** * Try to remove duplicate statements from IF blocks . For example : * if ( a ) { * x = 1; * return true ; * } else { * x = 2; * return true ; * becomes : * if ( a ) { * x = 1; * } else { * x = 2; * return true ; * @ param n The IF node to examine . */ private void tryRemoveRepeatedStatements ( Node n ) { } }
// Only run this if variable names are guaranteed to be unique . Otherwise bad things can happen : // see PeepholeMinimizeConditionsTest # testDontRemoveDuplicateStatementsWithoutNormalization if ( ! isASTNormalized ( ) ) { return ; } checkState ( n . isIf ( ) , n ) ; Node parent = n . getParent ( ) ; if ( ! NodeUtil . isStatementBlock ( parent ) ) { // If the immediate parent is something like a label , we // can ' t move the statement , so bail . return ; } Node cond = n . getFirstChild ( ) ; Node trueBranch = cond . getNext ( ) ; Node falseBranch = trueBranch . getNext ( ) ; checkNotNull ( trueBranch ) ; checkNotNull ( falseBranch ) ; while ( true ) { Node lastTrue = trueBranch . getLastChild ( ) ; Node lastFalse = falseBranch . getLastChild ( ) ; if ( lastTrue == null || lastFalse == null || ! areNodesEqualForInlining ( lastTrue , lastFalse ) ) { break ; } lastTrue . detach ( ) ; lastFalse . detach ( ) ; parent . addChildAfter ( lastTrue , n ) ; reportChangeToEnclosingScope ( parent ) ; }
public class FunctionMultiArgs { /** * Set an argument expression for a function . This method is called by the * XPath compiler . * @ param arg non - null expression that represents the argument . * @ param argNum The argument number index . * @ throws WrongNumberArgsException If a derived class determines that the * number of arguments is incorrect . */ public void setArg ( Expression arg , int argNum ) throws WrongNumberArgsException { } }
if ( argNum < 3 ) super . setArg ( arg , argNum ) ; else { if ( null == m_args ) { m_args = new Expression [ 1 ] ; m_args [ 0 ] = arg ; } else { // Slow but space conservative . Expression [ ] args = new Expression [ m_args . length + 1 ] ; System . arraycopy ( m_args , 0 , args , 0 , m_args . length ) ; args [ m_args . length ] = arg ; m_args = args ; } arg . exprSetParent ( this ) ; }
public class PatternBox { /** * Pattern for a EntityReference has a member PhysicalEntity that is controlling a state change * reaction of another EntityReference . This pattern is different from the original * controls - state - change . The controller in this case is not modeled as a controller , but as a * participant of the conversion , and it is at both sides . * @ return the pattern */ public static Pattern controlsStateChangeButIsParticipant ( ) { } }
Pattern p = new Pattern ( SequenceEntityReference . class , "controller ER" ) ; p . add ( linkedER ( true ) , "controller ER" , "controller generic ER" ) ; p . add ( erToPE ( ) , "controller generic ER" , "controller simple PE" ) ; p . add ( linkToComplex ( ) , "controller simple PE" , "controller PE" ) ; p . add ( participatesInConv ( ) , "controller PE" , "Conversion" ) ; p . add ( left ( ) , "Conversion" , "controller PE" ) ; p . add ( right ( ) , "Conversion" , "controller PE" ) ; // The controller ER is not associated with the Conversion in another way . p . add ( new NOT ( new InterToPartER ( 1 ) ) , "Conversion" , "controller PE" , "controller ER" ) ; stateChange ( p , null ) ; p . add ( equal ( false ) , "controller ER" , "changed ER" ) ; p . add ( equal ( false ) , "controller PE" , "input PE" ) ; p . add ( equal ( false ) , "controller PE" , "output PE" ) ; return p ;
public class ConfigUtil { /** * Returns an input stream referencing a file that exists somewhere in the classpath . * < p > The supplied classloader is searched first , followed by the system classloader . * @ param path The path to the file , relative to the root of the classpath directory from which * it will be loaded ( e . g . < code > com / foo / bar / foo . gif < / code > ) . */ public static InputStream getStream ( String path , ClassLoader loader ) { } }
// first try the supplied class loader InputStream in = getResourceAsStream ( path , loader ) ; if ( in != null ) { return in ; } // if that didn ' t work , try the system class loader ( but only if it ' s different from the // class loader we just tried ) try { ClassLoader sysloader = ClassLoader . getSystemClassLoader ( ) ; if ( sysloader != loader ) { return getResourceAsStream ( path , loader ) ; } } catch ( AccessControlException ace ) { // can ' t get the system loader , no problem ! } return null ;
public class FindingFilter { /** * For a record to match a filter , one of the values that is specified for this data type property must be the exact * match of the value of the < b > severity < / b > property of the < a > Finding < / a > data type . * @ param severities * For a record to match a filter , one of the values that is specified for this data type property must be * the exact match of the value of the < b > severity < / b > property of the < a > Finding < / a > data type . * @ see Severity */ public void setSeverities ( java . util . Collection < String > severities ) { } }
if ( severities == null ) { this . severities = null ; return ; } this . severities = new java . util . ArrayList < String > ( severities ) ;
public class SegmentsUtil { /** * set double from segments . * @ param segments target segments . * @ param offset value offset . */ public static void setDouble ( MemorySegment [ ] segments , int offset , double value ) { } }
if ( inFirstSegment ( segments , offset , 8 ) ) { segments [ 0 ] . putDouble ( offset , value ) ; } else { setDoubleMultiSegments ( segments , offset , value ) ; }
public class GoogleHadoopFSInputStream { /** * Gets the current position within the file being read . * @ return The current position within the file being read . * @ throws IOException if an IO error occurs . */ @ Override public synchronized long getPos ( ) throws IOException { } }
long pos = channel . position ( ) ; logger . atFine ( ) . log ( "getPos: %d" , pos ) ; return pos ;
public class CompletableFutures { /** * Asynchronously accumulate the results of a batch of Futures which using the supplied mapping function to * convert the data from each Future before reducing them using the supplied supplied Monoid ( a combining BiFunction / BinaryOperator and identity element that takes two * input values of the same type and returns the combined result ) { @ see cyclops2 . Monoids } . * A single Failure results in a Failed Future . * < pre > * { @ code * CompletableFuture < String > future = Future . accumulate ( Seq . of ( CompletableFuture . completedFuture ( 10 ) , CompletableFuture . completedFuture ( 1 ) ) , i - > " " + i , Monoids . stringConcat ) ; * CompletableFuture [ " 101 " ] * < / pre > * @ param fts Collection of Futures to accumulate successes * @ param mapper Mapping function to be applied to the result of each Future * @ param reducer Monoid to combine values from each Future * @ return CompletableFuture asynchronously populated with the accumulate operation */ public static < T , R > CompletableFuture < R > accumulate ( final IterableX < CompletableFuture < T > > fts , final Function < ? super T , R > mapper , final Monoid < R > reducer ) { } }
return sequence ( fts ) . thenApply ( s -> s . map ( mapper ) . reduce ( reducer ) ) ;
public class TaskInfoFetcher { /** * Add a listener for the final task info . This notification is guaranteed to be fired only once . * Listener is always notified asynchronously using a dedicated notification thread pool so , care should * be taken to avoid leaking { @ code this } when adding a listener in a constructor . Additionally , it is * possible notifications are observed out of order due to the asynchronous execution . */ public void addFinalTaskInfoListener ( StateChangeListener < TaskInfo > stateChangeListener ) { } }
AtomicBoolean done = new AtomicBoolean ( ) ; StateChangeListener < Optional < TaskInfo > > fireOnceStateChangeListener = finalTaskInfo -> { if ( finalTaskInfo . isPresent ( ) && done . compareAndSet ( false , true ) ) { stateChangeListener . stateChanged ( finalTaskInfo . get ( ) ) ; } } ; finalTaskInfo . addStateChangeListener ( fireOnceStateChangeListener ) ; fireOnceStateChangeListener . stateChanged ( finalTaskInfo . get ( ) ) ;
public class RouteGuideUtil { /** * Parses the JSON input file containing the list of features . */ public static List < Feature > parseFeatures ( URL file ) throws IOException { } }
InputStream input = file . openStream ( ) ; try { Reader reader = new InputStreamReader ( input , Charset . forName ( "UTF-8" ) ) ; try { FeatureDatabase . Builder database = FeatureDatabase . newBuilder ( ) ; JsonFormat . parser ( ) . merge ( reader , database ) ; return database . getFeatureList ( ) ; } finally { reader . close ( ) ; } } finally { input . close ( ) ; }
public class CloneableRuntimeException { /** * 简便函数 , 定义静态异常时使用 */ public CloneableRuntimeException setStackTrace ( Class < ? > throwClazz , String throwMethod ) { } }
ExceptionUtil . setStackTrace ( this , throwClazz , throwMethod ) ; return this ;
public class Cli { /** * Build the GlobalOptions information from the raw parsed options * @ param parsedOpts Options parsed from the cmd line * @ return */ private GlobalOptions createGlobalOptions ( CommandLine parsedOpts ) { } }
String host = parsedOpts . hasOption ( HOST_OPT ) ? parsedOpts . getOptionValue ( HOST_OPT ) : DEFAULT_REST_SERVER_HOST ; int port = DEFAULT_REST_SERVER_PORT ; try { if ( parsedOpts . hasOption ( PORT_OPT ) ) { port = Integer . parseInt ( parsedOpts . getOptionValue ( PORT_OPT ) ) ; } } catch ( NumberFormatException e ) { printHelpAndExit ( "The port must be a valid integer." ) ; } return new GlobalOptions ( host , port ) ;
public class VisDialog { /** * Adds the given button to the button table . * @ param object The object that will be passed to { @ link # result ( Object ) } if this button is clicked . May be null . */ public VisDialog button ( Button button , Object object ) { } }
buttonTable . add ( button ) ; setObject ( button , object ) ; return this ;
public class WSManagedConnectionFactoryImpl { /** * The spec interface is defined with raw types , so we have no choice but to declare it that way , too */ @ Override public Set < ManagedConnection > getInvalidConnections ( @ SuppressWarnings ( "rawtypes" ) Set connectionSet ) throws ResourceException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "getInvalidConnections" , connectionSet ) ; Set < ManagedConnection > badSet = new HashSet < ManagedConnection > ( ) ; WSRdbManagedConnectionImpl mc = null ; // Loop through each ManagedConnection in the list , using each connection ' s // preTestSQLString to check validity . Different connections can // have different preTestSQLStrings if they were created by different // ManagedConnectionFactories . for ( Iterator < ? > it = connectionSet . iterator ( ) ; it . hasNext ( ) ; ) { mc = ( WSRdbManagedConnectionImpl ) it . next ( ) ; if ( ! mc . validate ( ) ) badSet . add ( mc ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "getInvalidConnections" , badSet ) ; return badSet ;
public class OauthHelper { /** * populate / renew jwt info to the give jwt object . * based on the expire time of the jwt , to determine if need to renew jwt or not . * to avoid modifying class member which will case thread - safe problem , move this method from Http2Client to this helper class . * @ param jwt the given jwt needs to renew or populate * @ return When success return Jwt ; When fail return Status . */ public static Result < Jwt > populateCCToken ( final Jwt jwt ) { } }
boolean isInRenewWindow = jwt . getExpire ( ) - System . currentTimeMillis ( ) < jwt . getTokenRenewBeforeExpired ( ) ; logger . trace ( "isInRenewWindow = " + isInRenewWindow ) ; // if not in renew window , return the current jwt . if ( ! isInRenewWindow ) { return Success . of ( jwt ) ; } // the same jwt shouldn ' t be renew at the same time . different jwt shouldn ' t affect each other ' s renew activity . synchronized ( jwt ) { // if token expired , try to renew synchronously if ( jwt . getExpire ( ) <= System . currentTimeMillis ( ) ) { Result < Jwt > result = renewCCTokenSync ( jwt ) ; if ( logger . isTraceEnabled ( ) ) logger . trace ( "Check secondary token is done!" ) ; return result ; } else { // otherwise renew token silently renewCCTokenAsync ( jwt ) ; if ( logger . isTraceEnabled ( ) ) logger . trace ( "Check secondary token is done!" ) ; return Success . of ( jwt ) ; } }
public class DefaultGitHubClient { /** * Get comments from the given comment url * @ param commentsUrl * @ param repo * @ return * @ throws RestClientException */ public List < Comment > getComments ( String commentsUrl , GitHubRepo repo ) throws RestClientException { } }
List < Comment > comments = new ArrayList < > ( ) ; // decrypt password String decryptedPassword = decryptString ( repo . getPassword ( ) , settings . getKey ( ) ) ; String personalAccessToken = ( String ) repo . getOptions ( ) . get ( "personalAccessToken" ) ; String decryptedPersonalAccessToken = decryptString ( personalAccessToken , settings . getKey ( ) ) ; boolean lastPage = false ; String queryUrlPage = commentsUrl ; while ( ! lastPage ) { ResponseEntity < String > response = makeRestCall ( queryUrlPage , repo . getUserId ( ) , decryptedPassword , decryptedPersonalAccessToken ) ; JSONArray jsonArray = parseAsArray ( response ) ; for ( Object item : jsonArray ) { JSONObject jsonObject = ( JSONObject ) item ; Comment comment = new Comment ( ) ; JSONObject userJsonObj = ( JSONObject ) jsonObject . get ( "user" ) ; comment . setUser ( ( String ) userJsonObj . get ( "login" ) ) ; long crt = new DateTime ( str ( jsonObject , "created_at" ) ) . getMillis ( ) ; comment . setCreatedAt ( crt ) ; long upd = new DateTime ( str ( jsonObject , "updated_at" ) ) . getMillis ( ) ; comment . setUpdatedAt ( upd ) ; comment . setBody ( str ( jsonObject , "body" ) ) ; comments . add ( comment ) ; } if ( CollectionUtils . isEmpty ( jsonArray ) ) { lastPage = true ; } else { if ( isThisLastPage ( response ) ) { lastPage = true ; } else { lastPage = false ; queryUrlPage = getNextPageUrl ( response ) ; } } } return comments ;
public class GitFunction { /** * Add the names of the tags as children of the current node . * @ param git the Git object ; may not be null * @ param spec the call specification ; may not be null * @ param writer the document writer for the current node ; may not be null * @ throws GitAPIException if there is a problem accessing the Git repository */ protected void addTagsAsChildren ( Git git , CallSpecification spec , DocumentWriter writer ) throws GitAPIException { } }
// Generate the child references to the branches , which will be sorted by name ( by the command ) . ListTagCommand command = git . tagList ( ) ; List < Ref > tags = command . call ( ) ; // Reverse the sort of the branch names , since they might be version numbers . . . Collections . sort ( tags , REVERSE_REF_COMPARATOR ) ; for ( Ref ref : tags ) { String fullName = ref . getName ( ) ; String name = fullName . replaceFirst ( TAG_PREFIX , "" ) ; writer . addChild ( spec . childId ( name ) , name ) ; }
public class DefaultGroovyMethods { /** * Support the subscript operator with a range for a long array * @ param array a long array * @ param range a range indicating the indices for the items to retrieve * @ return list of the retrieved longs * @ since 1.0 */ @ SuppressWarnings ( "unchecked" ) public static List < Long > getAt ( long [ ] array , Range range ) { } }
return primitiveArrayGet ( array , range ) ;
public class AttributeMapper { /** * Return the variable identifier from the attribute - id */ public int getVariableId ( String attrId ) throws MIDDParsingException { } }
if ( ! attributeMapper . containsKey ( attrId ) ) { throw new MIDDParsingException ( "Attribute '" + attrId + "' not found" ) ; } return attributeMapper . get ( attrId ) . intValue ( ) ;
public class AuditorFactory { /** * Get an auditor instance for the specified auditor class , * auditor configuration , and auditor context . * @ param clazzClass to instantiate * @ param configToUseAuditor configuration to use * @ param contextToUseAuditor context to use * @ return Instance of an IHE Auditor */ public static IHEAuditor getAuditor ( Class < ? extends IHEAuditor > clazz , AuditorModuleConfig configToUse , AuditorModuleContext contextToUse ) { } }
IHEAuditor auditor = AuditorFactory . getAuditorForClass ( clazz ) ; if ( auditor != null ) { auditor . setConfig ( configToUse ) ; auditor . setContext ( contextToUse ) ; } return auditor ;
public class RuleQueryFactory { /** * Create a { @ link RuleQuery } from a { @ link Request } . * When a profile key is set , the language of the profile is automatically set in the query */ public RuleQuery createRuleQuery ( DbSession dbSession , Request request ) { } }
RuleQuery query = new RuleQuery ( ) ; query . setQueryText ( request . param ( WebService . Param . TEXT_QUERY ) ) ; query . setSeverities ( request . paramAsStrings ( PARAM_SEVERITIES ) ) ; query . setRepositories ( request . paramAsStrings ( PARAM_REPOSITORIES ) ) ; Date availableSince = request . paramAsDate ( PARAM_AVAILABLE_SINCE ) ; query . setAvailableSince ( availableSince != null ? availableSince . getTime ( ) : null ) ; query . setStatuses ( toEnums ( request . paramAsStrings ( PARAM_STATUSES ) , RuleStatus . class ) ) ; // Order is important : 1 . Load profile , 2 . Load organization either from parameter or from profile , 3 . Load compare to profile setProfile ( dbSession , query , request ) ; setOrganization ( dbSession , query , request ) ; setCompareToProfile ( dbSession , query , request ) ; QProfileDto profile = query . getQProfile ( ) ; query . setLanguages ( profile == null ? request . paramAsStrings ( PARAM_LANGUAGES ) : ImmutableList . of ( profile . getLanguage ( ) ) ) ; if ( wsSupport . areActiveRulesVisible ( query . getOrganization ( ) ) ) { query . setActivation ( request . paramAsBoolean ( PARAM_ACTIVATION ) ) ; } query . setTags ( request . paramAsStrings ( PARAM_TAGS ) ) ; query . setInheritance ( request . paramAsStrings ( PARAM_INHERITANCE ) ) ; query . setActiveSeverities ( request . paramAsStrings ( PARAM_ACTIVE_SEVERITIES ) ) ; query . setIsTemplate ( request . paramAsBoolean ( PARAM_IS_TEMPLATE ) ) ; query . setTemplateKey ( request . param ( PARAM_TEMPLATE_KEY ) ) ; query . setTypes ( toEnums ( request . paramAsStrings ( PARAM_TYPES ) , RuleType . class ) ) ; query . setKey ( request . param ( PARAM_RULE_KEY ) ) ; String sortParam = request . param ( WebService . Param . SORT ) ; if ( sortParam != null ) { query . setSortField ( sortParam ) ; query . setAscendingSort ( request . mandatoryParamAsBoolean ( WebService . Param . ASCENDING ) ) ; } return query ;
public class Mutator { /** * Creates a joint schema between two Schemas . All Fields from both schema are deduplicated * and combined into a single Schema . The left Schema has priority so if both Schemas have * the same Field with the same name but different Types , the Type from the left Schema will be * taken . * The name of the schema is auto - generated with a static counter . */ public static Schema jointSchema ( Schema leftSchema , Schema rightSchema ) { } }
return jointSchema ( "jointSchema" + ( COUNTER ++ ) , leftSchema , rightSchema ) ;
public class HalLinker { /** * Resolves a relation . Locates a HalLinkResolver for resolving the set of all linked resources in the relation . * @ param relation the relation to resolve * @ param processEngine the process engine to use * @ return the list of resolved resources * @ throws RuntimeException if no HalLinkResolver can be found for the linked resource type . */ public List < HalResource < ? > > resolve ( HalRelation relation , ProcessEngine processEngine ) { } }
HalLinkResolver linkResolver = hal . getLinkResolver ( relation . resourceType ) ; if ( linkResolver != null ) { Set < String > linkedIds = getLinkedResourceIdsByRelation ( relation ) ; if ( ! linkedIds . isEmpty ( ) ) { return linkResolver . resolveLinks ( linkedIds . toArray ( new String [ linkedIds . size ( ) ] ) , processEngine ) ; } else { return Collections . emptyList ( ) ; } } else { throw new RuntimeException ( "Cannot find HAL link resolver for resource type '" + relation . resourceType + "'." ) ; }
public class BaseMojo { /** * Append the version piece to the version # under construction . * @ param sb String builder receiving the version under construction . * @ param pc The version piece to add . If null , it is ignored . If non - numeric , a value of " 0 " is * used . */ private void appendVersionPiece ( StringBuilder sb , String pc ) { } }
if ( ( pc != null ) && ! pc . isEmpty ( ) ) { pc = pc . trim ( ) ; if ( sb . length ( ) > 0 ) { sb . append ( "." ) ; } sb . append ( StringUtils . isNumeric ( pc ) ? pc : "0" ) ; }
public class IO { /** * Copy Reader to Writer for byteCount bytes or until EOF or exception . */ public static void copy ( Reader in , Writer out , long byteCount ) throws IOException { } }
char buffer [ ] = new char [ bufferSize ] ; int len = bufferSize ; if ( byteCount >= 0 ) { while ( byteCount > 0 ) { if ( byteCount < bufferSize ) len = in . read ( buffer , 0 , ( int ) byteCount ) ; else len = in . read ( buffer , 0 , bufferSize ) ; if ( len == - 1 ) break ; byteCount -= len ; out . write ( buffer , 0 , len ) ; } } else { while ( true ) { len = in . read ( buffer , 0 , bufferSize ) ; if ( len == - 1 ) break ; out . write ( buffer , 0 , len ) ; } }
public class CmsListItem { /** * Returns the value of the column for this item . < p > * @ param columnId the column id * @ return the content , may be < code > null < / code > * @ throws CmsIllegalArgumentException if the given < code > columnId < / code > is invalid */ public Object get ( String columnId ) throws CmsIllegalArgumentException { } }
if ( ( getMetadata ( ) . getColumnDefinition ( columnId ) == null ) && ( getMetadata ( ) . getItemDetailDefinition ( columnId ) == null ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_LIST_INVALID_COLUMN_1 , columnId ) ) ; } return m_values . get ( columnId ) ;
public class Get { /** * Executes a getter ( < tt > getX ( ) < / tt > ) on the target object which returns BigInteger . * If the specified attribute is , for example , " < tt > name < / tt > " , the called method * will be " < tt > getName ( ) < / tt > " . * @ param attributeName the name of the attribute * @ return the result of the method execution */ public static Function < Object , BigInteger > attrOfBigInteger ( final String attributeName ) { } }
return new Get < Object , BigInteger > ( Types . BIG_INTEGER , attributeName ) ;
public class MsgManager { /** * Removes all { @ link AppMsg } from the queue . */ void clearMsg ( AppMsg appMsg ) { } }
if ( msgQueue . contains ( appMsg ) || stickyQueue . contains ( appMsg ) ) { // Avoid the message from being removed twice . removeMessages ( MESSAGE_DISPLAY , appMsg ) ; removeMessages ( MESSAGE_ADD_VIEW , appMsg ) ; removeMessages ( MESSAGE_REMOVE , appMsg ) ; msgQueue . remove ( appMsg ) ; stickyQueue . remove ( appMsg ) ; removeMsg ( appMsg ) ; }
public class FireFoxCapabilitiesBuilder { /** * Returns the default { @ link FirefoxOptions } used by this capabilities builder . */ private FirefoxOptions getDefaultFirefoxOptions ( ) { } }
FirefoxOptions options = new FirefoxOptions ( ) ; options . setLogLevel ( FirefoxDriverLogLevel . INFO ) ; options . setHeadless ( Boolean . parseBoolean ( getLocalConfigProperty ( ConfigProperty . BROWSER_RUN_HEADLESS ) ) ) ; return options ;
public class OpDef { /** * < pre > * Description of the input ( s ) . * < / pre > * < code > repeated . tensorflow . OpDef . ArgDef input _ arg = 2 ; < / code > */ public org . tensorflow . framework . OpDef . ArgDef getInputArg ( int index ) { } }
return inputArg_ . get ( index ) ;
public class CommandServiceJdoRepository { /** * region > findRecentByUser */ @ Programmatic public List < CommandJdo > findRecentByUser ( final String user ) { } }
return repositoryService . allMatches ( new QueryDefault < > ( CommandJdo . class , "findRecentByUser" , "user" , user ) ) ;
public class CmsStringUtil { /** * Returns a string representation for the given map using the given separators . < p > * @ param < K > type of map keys * @ param < V > type of map values * @ param map the map to write * @ param sepItem the item separator string * @ param sepKeyval the key - value pair separator string * @ return the string representation for the given map */ public static < K , V > String mapAsString ( Map < K , V > map , String sepItem , String sepKeyval ) { } }
StringBuffer string = new StringBuffer ( 128 ) ; Iterator < Map . Entry < K , V > > it = map . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Map . Entry < K , V > entry = it . next ( ) ; string . append ( entry . getKey ( ) ) ; string . append ( sepKeyval ) ; string . append ( entry . getValue ( ) ) ; if ( it . hasNext ( ) ) { string . append ( sepItem ) ; } } return string . toString ( ) ;
public class WebServiceTemplateBuilder { /** * Set the { @ link ClientInterceptor ClientInterceptors } that should be used with the * { @ link WebServiceTemplate } . Setting this value will replace any previously defined * interceptors . * @ param interceptors the interceptors to set * @ return a new builder instance * @ see # additionalInterceptors ( ClientInterceptor . . . ) */ public WebServiceTemplateBuilder interceptors ( ClientInterceptor ... interceptors ) { } }
Assert . notNull ( interceptors , "Interceptors must not be null" ) ; return interceptors ( Arrays . asList ( interceptors ) ) ;
public class RootFinder { /** * { @ inheritDoc } */ @ Override public String getFilename ( final FinderObject owner ) { } }
final Root root = ( Root ) owner ; return root . getTheFilename ( ) ;
public class AlpineResource { /** * Provides a facility to retrieve a param by more than one name . Different libraries * and frameworks , expect ( in some cases ) different names for the same param . * @ param queryParams the parameters from the querystring * @ param params an array of one or more param names * @ return the value of the param , or null if not found */ private String multiParam ( final MultivaluedMap < String , String > queryParams , final String ... params ) { } }
for ( final String param : params ) { final String value = queryParams . getFirst ( param ) ; if ( StringUtils . isNotBlank ( value ) ) { return value ; } } return null ;
public class AdminController { /** * Perform a several cleanups on the given surt : * * Convert a URL to a SURT * * Add a trailing slash to SURTs of the form : http : / / ( . . . ) * @ param surt * @ return */ protected String cleanSurt ( String surt ) { } }
if ( ! isSurt ( surt ) ) { surt = ArchiveUtils . addImpliedHttpIfNecessary ( surt ) ; surt = SURT . fromURI ( surt ) ; } if ( surt . endsWith ( ",)" ) && surt . indexOf ( ")" ) == surt . length ( ) - 1 ) { surt = surt + "/" ; } return surt ;
public class PipelinedBinaryConsumer { /** * Performs every composed consumer . * @ param former the former value * @ param latter the latter value */ @ Override public void accept ( E1 former , E2 latter ) { } }
for ( BiConsumer < E1 , E2 > consumer : consumers ) { consumer . accept ( former , latter ) ; }
public class DRL6Parser { /** * entryPointDeclaration : = ENTRY - POINT stringId annotation * END * @ return * @ throws org . antlr . runtime . RecognitionException */ public EntryPointDeclarationDescr entryPointDeclaration ( DeclareDescrBuilder ddb ) throws RecognitionException { } }
EntryPointDeclarationDescrBuilder declare = null ; try { declare = helper . start ( ddb , EntryPointDeclarationDescrBuilder . class , null ) ; match ( input , DRL6Lexer . ID , DroolsSoftKeywords . ENTRY , null , DroolsEditorType . KEYWORD ) ; if ( state . failed ) return null ; match ( input , DRL6Lexer . MINUS , null , null , DroolsEditorType . KEYWORD ) ; if ( state . failed ) return null ; match ( input , DRL6Lexer . ID , DroolsSoftKeywords . POINT , null , DroolsEditorType . KEYWORD ) ; if ( state . failed ) return null ; String ep = stringId ( ) ; if ( state . failed ) return null ; if ( state . backtracking == 0 ) { declare . entryPointId ( ep ) ; } while ( input . LA ( 1 ) == DRL6Lexer . AT ) { // annotation * annotation ( declare ) ; if ( state . failed ) return null ; } match ( input , DRL6Lexer . ID , DroolsSoftKeywords . END , null , DroolsEditorType . KEYWORD ) ; if ( state . failed ) return null ; } catch ( RecognitionException re ) { reportError ( re ) ; } finally { helper . end ( EntryPointDeclarationDescrBuilder . class , declare ) ; } return ( declare != null ) ? declare . getDescr ( ) : null ;
public class AbstractHttpCommandProcessor { /** * Resolves an HttpCommand to an ExecutionContext , which provides contextual * information to the ExecutionVenue that the command will be executed in . * @ param http * contains the HttpServletRequest from which the contextual * information is derived * @ return the ExecutionContext , populated with information from the * HttpCommend */ protected DehydratedExecutionContext resolveExecutionContext ( HttpCommand http , CredentialsContainer cc ) { } }
return contextResolution . resolveExecutionContext ( protocol , http , cc ) ;
public class DefaultShardManagerBuilder { /** * Sets the { @ link ExecutorService ExecutorService } that should be used in * the JDA callback handler which mostly consists of { @ link net . dv8tion . jda . core . requests . RestAction RestAction } callbacks . * By default JDA will use { @ link ForkJoinPool # commonPool ( ) } * < br > < b > Only change this pool if you know what you ' re doing . < / b > * @ param executor * The thread - pool to use for callback handling * @ param automaticShutdown * Whether { @ link net . dv8tion . jda . core . JDA # shutdown ( ) } should automatically shutdown this pool * @ return The DefaultShardManagerBuilder instance . Useful for chaining . */ public DefaultShardManagerBuilder setCallbackPool ( ExecutorService executor , boolean automaticShutdown ) { } }
return setCallbackPoolProvider ( executor == null ? null : new ThreadPoolProviderImpl < > ( executor , automaticShutdown ) ) ;
public class CsvBeanReader { /** * Invokes the setter on the bean with the supplied value . * @ param bean * the bean * @ param setMethod * the setter method for the field * @ param fieldValue * the field value to set * @ throws SuperCsvException * if there was an exception invoking the setter */ private static void invokeSetter ( final Object bean , final Method setMethod , final Object fieldValue ) { } }
try { setMethod . setAccessible ( true ) ; setMethod . invoke ( bean , fieldValue ) ; } catch ( final Exception e ) { throw new SuperCsvReflectionException ( String . format ( "error invoking method %s()" , setMethod . getName ( ) ) , e ) ; }
public class CmsCloneModuleThread { /** * Clones the export points of the module and adjusts its paths . < p > * @ param sourceModule the source module * @ param targetModule the target module * @ param sourcePathPart the source path part * @ param targetPathPart the target path part */ private void cloneExportPoints ( CmsModule sourceModule , CmsModule targetModule , String sourcePathPart , String targetPathPart ) { } }
for ( CmsExportPoint exp : targetModule . getExportPoints ( ) ) { if ( exp . getUri ( ) . contains ( sourceModule . getName ( ) ) ) { exp . setUri ( exp . getUri ( ) . replaceAll ( sourceModule . getName ( ) , targetModule . getName ( ) ) ) ; } if ( exp . getUri ( ) . contains ( sourcePathPart ) ) { exp . setUri ( exp . getUri ( ) . replaceAll ( sourcePathPart , targetPathPart ) ) ; } }
public class XMLUnit { /** * Obtains an XpathEngine to use in XPath tests . */ public static XpathEngine newXpathEngine ( ) { } }
XpathEngine eng = new org . custommonkey . xmlunit . jaxp13 . Jaxp13XpathEngine ( ) ; if ( namespaceContext != null ) { eng . setNamespaceContext ( namespaceContext ) ; } return eng ;
public class PerspectiveOps { /** * Creates a set of intrinsic parameters , without distortion , for a camera with the specified characteristics . * The focal length is assumed to be the same for x and y . * @ param width Image width * @ param height Image height * @ param hfov Horizontal FOV in degrees * @ return guess camera parameters */ public static CameraPinholeBrown createIntrinsic ( int width , int height , double hfov ) { } }
CameraPinholeBrown intrinsic = new CameraPinholeBrown ( ) ; intrinsic . width = width ; intrinsic . height = height ; intrinsic . cx = width / 2 ; intrinsic . cy = height / 2 ; intrinsic . fx = intrinsic . cx / Math . tan ( UtilAngle . degreeToRadian ( hfov / 2.0 ) ) ; intrinsic . fy = intrinsic . fx ; return intrinsic ;