signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AnalyzerComponentBuilder { /** * Notification method invoked when transformer is removed . */ @ Override protected void onRemovedInternal ( ) { } }
final List < AnalyzerChangeListener > listeners = getAllListeners ( ) ; for ( final AnalyzerChangeListener listener : listeners ) { listener . onRemove ( this ) ; }
public class SQLiteDatabase { /** * Runs the provided SQL and returns a cursor over the result set . * @ param cursorFactory the cursor factory to use , or null for the default factory * @ param sql the SQL query . The SQL string must not be ; terminated * @ param selectionArgs You may include ? s in where clause in the query , * which will be replaced by the values from selectionArgs . The * values will be bound as Strings . * @ param editTable the name of the first table , which is editable * @ param cancellationSignal A signal to cancel the operation in progress , or null if none . * If the operation is canceled , then { @ link OperationCanceledException } will be thrown * when the query is executed . * @ return A { @ link Cursor } object , which is positioned before the first entry . Note that * { @ link Cursor } s are not synchronized , see the documentation for more details . */ public Cursor rawQueryWithFactory ( CursorFactory cursorFactory , String sql , String [ ] selectionArgs , String editTable , CancellationSignal cancellationSignal ) { } }
acquireReference ( ) ; try { com . couchbase . lite . internal . database . sqlite . SQLiteCursorDriver driver = new com . couchbase . lite . internal . database . sqlite . SQLiteDirectCursorDriver ( this , sql , editTable , cancellationSignal ) ; return driver . query ( cursorFactory != null ? cursorFactory : mCursorFactory , selectionArgs ) ; } finally { releaseReference ( ) ; }
public class IO { /** * characters back to their quoted form . */ protected static String stringOf ( Value val ) { } }
StringBuilder s = new StringBuilder ( ) ; val = val . deref ( ) ; if ( val instanceof SeqValue ) { SeqValue sv = ( SeqValue ) val ; for ( Value v : sv . values ) { v = v . deref ( ) ; if ( v instanceof CharacterValue ) { CharacterValue cv = ( CharacterValue ) v ; s . append ( cv . unicode ) ; } else { s . append ( "?" ) ; } } return s . toString ( ) ; } else { return val . toString ( ) ; }
public class SubsystemAdd { /** * { @ inheritDoc } */ @ Override protected void populateModel ( ModelNode operation , ModelNode model ) throws OperationFailedException { } }
log . info ( "Populating the model: " + model ) ; // model . setEmptyObject ( ) ;
public class FulfillmentInfoUrl { /** * Get Resource Url for SetFulFillmentInfo * @ param orderId Unique identifier of the order . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param updateMode Specifies whether to update the original order , update the order in draft mode , or update the order in draft mode and then commit the changes to the original . Draft mode enables users to make incremental order changes before committing the changes to the original order . Valid values are " ApplyToOriginal , " " ApplyToDraft , " or " ApplyAndCommit . " * @ param version Determines whether or not to check versioning of items for concurrency purposes . * @ return String Resource Url */ public static MozuUrl setFulFillmentInfoUrl ( String orderId , String responseFields , String updateMode , String version ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/orders/{orderId}/fulfillmentinfo?updatemode={updateMode}&version={version}&responseFields={responseFields}" ) ; formatter . formatUrl ( "orderId" , orderId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "updateMode" , updateMode ) ; formatter . formatUrl ( "version" , version ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class EstimateSceneCalibrated { /** * Compares the angle that different observations form when their lines intersect . Returns * the median angle . Used to determine if this edge is good for triangulation * @ param edge edge * @ return median angle between observations in radians */ double medianTriangulationAngle ( Motion edge ) { } }
GrowQueue_F64 angles = new GrowQueue_F64 ( edge . associated . size ( ) ) ; angles . size = edge . associated . size ( ) ; for ( int i = 0 ; i < edge . associated . size ( ) ; i ++ ) { AssociatedIndex a = edge . associated . get ( i ) ; Point2D_F64 normA = edge . viewSrc . observationNorm . get ( a . src ) ; Point2D_F64 normB = edge . viewDst . observationNorm . get ( a . dst ) ; double acute = triangulationAngle ( normA , normB , edge . a_to_b ) ; angles . data [ i ] = acute ; } angles . sort ( ) ; return angles . getFraction ( 0.5 ) ;
public class Log4jConfigurationHelper { /** * Update the log4j configuration . * @ param targetClass the target class used to get the original log4j configuration file as a resource * @ param log4jPath the custom log4j configuration properties file path * @ param log4jFileName the custom log4j configuration properties file name * @ throws IOException if there ' s something wrong with updating the log4j configuration */ public static void updateLog4jConfiguration ( Class < ? > targetClass , String log4jPath , String log4jFileName ) throws IOException { } }
Closer closer = Closer . create ( ) ; try { InputStream fileInputStream = closer . register ( new FileInputStream ( log4jPath ) ) ; InputStream inputStream = closer . register ( targetClass . getResourceAsStream ( "/" + log4jFileName ) ) ; Properties customProperties = new Properties ( ) ; customProperties . load ( fileInputStream ) ; Properties originalProperties = new Properties ( ) ; originalProperties . load ( inputStream ) ; for ( Entry < Object , Object > entry : customProperties . entrySet ( ) ) { originalProperties . setProperty ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) . toString ( ) ) ; } LogManager . resetConfiguration ( ) ; PropertyConfigurator . configure ( originalProperties ) ; } catch ( Throwable t ) { throw closer . rethrow ( t ) ; } finally { closer . close ( ) ; }
public class WeightController { /** * 通知下一个weight任务可以被执行 * @ throws InterruptedException */ public synchronized void single ( long weight ) throws InterruptedException { } }
this . weights . remove ( weight ) ; // 触发下一个可运行的weight Long nextWeight = this . weights . peek ( ) ; if ( nextWeight != null ) { barrier . single ( nextWeight ) ; }
public class FileSystem { /** * Create a zip file from the given input file . * @ param input the name of the file to compress . * @ param output the name of the ZIP file to create . * @ throws IOException when ziiping is failing . * @ since 6.2 */ public static void zipFile ( File input , File output ) throws IOException { } }
try ( FileOutputStream fos = new FileOutputStream ( output ) ) { zipFile ( input , fos ) ; }
public class Lists { /** * null to empty list * @ param list list * @ param < E > element type * @ return list */ public static < E > List < E > empty ( List < E > list ) { } }
return Optional . ofNullable ( list ) . orElse ( newArrayList ( ) ) ;
public class nslimitidentifier { /** * Use this API to unset the properties of nslimitidentifier resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , nslimitidentifier resource , String [ ] args ) throws Exception { } }
nslimitidentifier unsetresource = new nslimitidentifier ( ) ; unsetresource . limitidentifier = resource . limitidentifier ; return unsetresource . unset_resource ( client , args ) ;
public class MarkerManager { /** * Retrieves or creates a Marker with the specified parent . * @ param name The name of the Marker . * @ param parent The parent Marker . * @ return The Marker with the specified name . * @ throws IllegalArgumentException if any argument is { @ code null } * @ deprecated Use the Marker add or set methods to add parent Markers . Will be removed by final GA release . */ @ Deprecated public static Marker getMarker ( final String name , final Marker parent ) { } }
return getMarker ( name ) . addParents ( parent ) ;
public class Ginv { /** * Same as { @ link inverse ( Matrix ) } but optimized for 2D double arrays * @ param matrix * the matrix to invert * @ return generalized matrix inverse */ public static DenseDoubleMatrix2D inverse ( final double [ ] [ ] matrix ) { } }
double epsilon = UJMPSettings . getInstance ( ) . getTolerance ( ) ; int rows = matrix . length ; int cols = matrix [ 0 ] . length ; double [ ] [ ] s = new double [ cols ] [ cols ] ; for ( int c = 0 ; c < cols ; c ++ ) { s [ c ] [ c ] = 1.0 ; } final double [ ] [ ] t = new double [ rows ] [ rows ] ; for ( int r = 0 ; r < rows ; r ++ ) { t [ r ] [ r ] = 1.0 ; } int maxDiag = Math . min ( rows , cols ) ; int diag = 0 ; for ( ; diag < maxDiag ; diag ++ ) { // get the largest value for the pivot swapPivot ( matrix , diag , s , t ) ; if ( matrix [ diag ] [ diag ] == 0.0 ) { break ; } // divide through to make pivot identity double divisor = matrix [ diag ] [ diag ] ; if ( Math . abs ( divisor ) < epsilon ) { matrix [ diag ] [ diag ] = 0.0 ; break ; } divideRowBy ( matrix , diag , diag , divisor ) ; divideRowBy ( t , diag , 0 , divisor ) ; matrix [ diag ] [ diag ] = 1.0 ; // remove values down remaining rows for ( int row = diag + 1 ; row < rows ; row ++ ) { double factor = matrix [ row ] [ diag ] ; if ( factor != 0.0 ) { addRowTimes ( matrix , diag , diag , row , factor ) ; addRowTimes ( t , diag , 0 , row , factor ) ; matrix [ row ] [ diag ] = 0.0 ; } } // remove values across remaining cols - some optimization could // be done here because the changes to the original matrix at this // point only touch the current diag column for ( int col = diag + 1 ; col < cols ; col ++ ) { double factor = matrix [ diag ] [ col ] ; if ( factor != 0.0 ) { addColTimes ( matrix , diag , diag , col , factor ) ; addColTimes ( s , diag , 0 , col , factor ) ; matrix [ diag ] [ col ] = 0.0 ; } } } double [ ] [ ] result = times ( s , t , diag ) ; return new ArrayDenseDoubleMatrix2D ( result ) ;
public class CQLTranslator { /** * Build where clause with given clause . * @ param builder * the builder * @ param fieldClazz * the field clazz * @ param field * the field * @ param value * the value * @ param clause * the clause * @ param useToken * the use token * @ return the string builder */ public StringBuilder onWhereClause ( StringBuilder builder , Class fieldClazz , String field , Object value , String clause , boolean useToken ) { } }
if ( clause . trim ( ) . equals ( IN_CLAUSE ) ) { useToken = false ; } builder = ensureCase ( builder , field , useToken ) ; builder . append ( SPACE_STRING ) ; if ( fieldClazz . isAssignableFrom ( List . class ) || fieldClazz . isAssignableFrom ( Map . class ) || fieldClazz . isAssignableFrom ( Set . class ) ) { builder . append ( "CONTAINS" ) ; } else { builder . append ( clause ) ; } builder . append ( SPACE_STRING ) ; if ( clause . trim ( ) . equals ( IN_CLAUSE ) ) { builder . append ( OPEN_BRACKET ) ; String itemValues = String . valueOf ( value ) ; itemValues = itemValues . startsWith ( OPEN_BRACKET ) && itemValues . endsWith ( CLOSE_BRACKET ) ? itemValues . substring ( 1 , itemValues . length ( ) - 1 ) : itemValues ; List < String > items = Arrays . asList ( ( ( String ) itemValues ) . split ( "\\s*,\\s*" ) ) ; int counter = 0 ; for ( String str : items ) { str = str . trim ( ) ; str = ( str . startsWith ( Constants . ESCAPE_QUOTE ) && str . endsWith ( Constants . ESCAPE_QUOTE ) ) || ( str . startsWith ( "'" ) && str . endsWith ( "'" ) ) ? str . substring ( 1 , str . length ( ) - 1 ) : str ; appendValue ( builder , fieldClazz , str , false , false ) ; counter ++ ; if ( counter < items . size ( ) ) { builder . append ( COMMA_STR ) ; } } builder . append ( CLOSE_BRACKET ) ; } else { appendValue ( builder , fieldClazz , value , false , useToken ) ; } return builder ;
public class HttpContainerBase { /** * Start the server . */ public boolean start ( ) { } }
Thread thread = Thread . currentThread ( ) ; ClassLoader oldLoader = thread . getContextClassLoader ( ) ; try { thread . setContextClassLoader ( classLoader ( ) ) ; if ( ! _lifecycle . toStarting ( ) ) { return false ; } _startTime = CurrentTime . currentTime ( ) ; _lifecycle . toStarting ( ) ; startImpl ( ) ; _lifecycle . toActive ( ) ; return true ; } catch ( RuntimeException e ) { log . log ( Level . WARNING , e . toString ( ) , e ) ; _lifecycle . toError ( ) ; throw e ; } catch ( Throwable e ) { log . log ( Level . WARNING , e . toString ( ) , e ) ; _lifecycle . toError ( ) ; // if the server can ' t start , it needs to completely fail , especially // for the watchdog throw new RuntimeException ( e ) ; // log . log ( Level . WARNING , e . toString ( ) , e ) ; // _ configException = e ; } finally { thread . setContextClassLoader ( oldLoader ) ; }
public class BackedHashMap { /** * superPut - Put into live cache */ public Object superPut ( Object key , Object value ) { } }
if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ SUPER_PUT ] , "" + key ) ; } BackedSession sess = ( BackedSession ) super . put ( key , value ) ; // The LRU HashMap handles the PMI counters for CacheDiscards and MemoryCount return sess ;
public class ClassParser { /** * Check that a constant has the expected tag . * @ param constant * the constant to check * @ param expectedTag * the expected constant tag * @ throws InvalidClassFileFormatException * if the constant ' s tag does not match the expected tag */ private void checkConstantTag ( Constant constant , int expectedTag ) throws InvalidClassFileFormatException { } }
if ( constant . tag != expectedTag ) { throw new InvalidClassFileFormatException ( expectedClassDescriptor , codeBaseEntry ) ; }
public class SpoonDeviceRunner { /** * Execute instrumentation on the target device and return a result summary . */ public DeviceResult run ( AndroidDebugBridge adb ) { } }
String testPackage = instrumentationInfo . getInstrumentationPackage ( ) ; String testRunner = instrumentationInfo . getTestRunnerClass ( ) ; logDebug ( debug , "InstrumentationInfo: [%s]" , instrumentationInfo ) ; if ( debug ) { SpoonUtils . setDdmlibInternalLoggingLevel ( ) ; } DeviceResult . Builder result = new DeviceResult . Builder ( ) ; IDevice device = obtainRealDevice ( adb , serial ) ; logDebug ( debug , "Got realDevice for [%s]" , serial ) ; // Get relevant device information . final DeviceDetails deviceDetails = DeviceDetails . createForDevice ( device ) ; result . setDeviceDetails ( deviceDetails ) ; logDebug ( debug , "[%s] setDeviceDetails %s" , serial , deviceDetails ) ; DdmPreferences . setTimeOut ( ( int ) adbTimeout . toMillis ( ) ) ; // Now install the main application and the instrumentation application . for ( File otherApk : otherApks ) { try { String extraArgument = getGrantAllExtraArgument ( deviceDetails ) ; device . installPackage ( otherApk . getAbsolutePath ( ) , true , extraArgument ) ; } catch ( InstallException e ) { logInfo ( "InstallException while install other apk on device [%s]" , serial ) ; e . printStackTrace ( System . out ) ; return result . markInstallAsFailed ( "Unable to install other APK." ) . addException ( e ) . build ( ) ; } } try { String extraArgument = getGrantAllExtraArgument ( deviceDetails ) ; device . installPackage ( testApk . getAbsolutePath ( ) , true , extraArgument ) ; } catch ( InstallException e ) { logInfo ( "InstallException while install test apk on device [%s]" , serial ) ; e . printStackTrace ( System . out ) ; return result . markInstallAsFailed ( "Unable to install instrumentation APK." ) . addException ( e ) . build ( ) ; } try { cleanScreenshotsDirectoriesOnDevice ( device ) ; cleanFilesDirectoriesOnDevice ( device ) ; } catch ( Exception e ) { logInfo ( "Exception while cleaning storage directories on device [%s]" , serial ) ; e . printStackTrace ( System . out ) ; return result . markInstallAsFailed ( "Unable to delete storage directories" ) . addException ( e ) . build ( ) ; } try { grantReadWriteExternalStorage ( deviceDetails , device ) ; } catch ( Exception e ) { logInfo ( "Exception while granting external storage access to application apk" + "on device [%s]" , serial ) ; e . printStackTrace ( System . out ) ; return result . markInstallAsFailed ( "Unable to grant external storage access to application APK." ) . addException ( e ) . build ( ) ; } // Create the output directory , if it does not already exist . work . mkdirs ( ) ; // Determine the test set that is applicable for this device . LogRecordingTestRunListener recorder ; List < TestIdentifier > activeTests ; List < TestIdentifier > ignoredTests ; try { recorder = queryTestSet ( testPackage , testRunner , device ) ; activeTests = recorder . activeTests ( ) ; ignoredTests = recorder . ignoredTests ( ) ; logDebug ( debug , "Active tests: %s" , activeTests ) ; logDebug ( debug , "Ignored tests: %s" , ignoredTests ) ; } catch ( Exception e ) { return result . addException ( e ) . build ( ) ; } // Initiate device logging . SpoonDeviceLogger deviceLogger = new SpoonDeviceLogger ( device ) ; List < ITestRunListener > listeners = new ArrayList < > ( ) ; listeners . add ( new SpoonTestRunListener ( result , debug ) ) ; listeners . add ( new XmlTestRunListener ( junitReport ) ) ; if ( testRunListeners != null ) { listeners . addAll ( testRunListeners ) ; } result . startTests ( ) ; if ( singleInstrumentationCall ) { try { logDebug ( debug , "Running all tests in a single instrumentation call on [%s]" , serial ) ; RemoteAndroidTestRunner runner = createConfiguredRunner ( testPackage , testRunner , device ) ; runner . run ( listeners ) ; } catch ( Exception e ) { result . addException ( e ) ; } } else { MultiRunITestListener multiRunListener = new MultiRunITestListener ( listeners ) ; multiRunListener . multiRunStarted ( recorder . runName ( ) , recorder . testCount ( ) ) ; for ( TestIdentifier test : activeTests ) { try { logDebug ( debug , "Running %s on [%s]" , test , serial ) ; RemoteAndroidTestRunner runner = createConfiguredRunner ( testPackage , testRunner , device ) ; runner . removeInstrumentationArg ( "package" ) ; runner . removeInstrumentationArg ( "class" ) ; runner . setMethodName ( test . getClassName ( ) , test . getTestName ( ) ) ; runner . run ( listeners ) ; } catch ( Exception e ) { result . addException ( e ) ; } } for ( TestIdentifier ignoredTest : ignoredTests ) { multiRunListener . testStarted ( ignoredTest ) ; multiRunListener . testIgnored ( ignoredTest ) ; multiRunListener . testEnded ( ignoredTest , emptyMap ( ) ) ; } multiRunListener . multiRunEnded ( ) ; } result . endTests ( ) ; mapLogsToTests ( deviceLogger , result ) ; try { logDebug ( debug , "About to grab screenshots and prepare output for [%s]" , serial ) ; pullDeviceFiles ( device ) ; if ( codeCoverage ) { pullCoverageFile ( device ) ; } cleanScreenshotsDirectory ( result ) ; cleanFilesDirectory ( result ) ; } catch ( Exception e ) { result . addException ( e ) ; } logDebug ( debug , "Done running for [%s]" , serial ) ; return result . build ( ) ;
public class PeerGroup { /** * Returns a future that is triggered when there are at least the requested number of connected peers that support * the given protocol version or higher . To block immediately , just call get ( ) on the result . * @ param numPeers How many peers to wait for . * @ param protocolVersion The protocol version the awaited peers must implement ( or better ) . * @ return a future that will be triggered when the number of connected peers implementing protocolVersion or higher is greater than or equals numPeers */ public ListenableFuture < List < Peer > > waitForPeersOfVersion ( final int numPeers , final long protocolVersion ) { } }
List < Peer > foundPeers = findPeersOfAtLeastVersion ( protocolVersion ) ; if ( foundPeers . size ( ) >= numPeers ) { return Futures . immediateFuture ( foundPeers ) ; } final SettableFuture < List < Peer > > future = SettableFuture . create ( ) ; addConnectedEventListener ( new PeerConnectedEventListener ( ) { @ Override public void onPeerConnected ( Peer peer , int peerCount ) { final List < Peer > peers = findPeersOfAtLeastVersion ( protocolVersion ) ; if ( peers . size ( ) >= numPeers ) { future . set ( peers ) ; removeConnectedEventListener ( this ) ; } } } ) ; return future ;
public class Reaction { /** * Removes a mapping between the reactant and product side to this * Reaction . * @ param pos Position of the Mapping to remove . * @ see # mappings */ @ Override public void removeMapping ( int pos ) { } }
for ( int i = pos ; i < mappingCount - 1 ; i ++ ) { map [ i ] = map [ i + 1 ] ; } map [ mappingCount - 1 ] = null ; mappingCount -- ; notifyChanged ( ) ;
public class ZipFileEntryAsset { /** * ( non - Javadoc ) * @ see org . jboss . declarchive . api . Asset # getStream ( ) */ @ Override // TODO : create AssetStreamException ? public InputStream openStream ( ) { } }
try { final ZipFile file = new ZipFile ( this . file ) ; return new InputStreamWrapper ( file , file . getInputStream ( entry ) ) ; } catch ( final Exception e ) { throw new RuntimeException ( "Could not open zip file stream" , e ) ; }
public class JawnServletContext { /** * Returns value of routing user segment , or route wild card value , or request parameter . * If this name represents multiple values , this call will result in { @ link IllegalArgumentException } . * @ param name name of parameter . * @ return value of routing user segment , or route wild card value , or request parameter . */ public String param ( String name ) { } }
/* if ( name . equals ( " id " ) ) { return getId ( ) ; } else */ if ( request . getParameter ( name ) != null ) { return request . getParameter ( name ) ; // } else if ( requestContext . getUserSegments ( ) . get ( name ) ! = null ) { // return requestContext . getUserSegments ( ) . get ( name ) ; // } else if ( requestContext . getWildCardName ( ) ! = null // & & name . equals ( requestContext . getWildCardName ( ) ) ) { // return requestContext . getWildCardValue ( ) ; } else { return getRouteParam ( name ) ; }
public class CmsCmisRelationHelper { /** * Creates a user - readable name from the given relation object . < p > * @ param relation the relation object * @ return the readable name */ protected String createReadableName ( CmsRelation relation ) { } }
return relation . getType ( ) . getName ( ) + "[ " + relation . getSourcePath ( ) + " -> " + relation . getTargetPath ( ) + " ]" ;
public class Criteria { /** * 添加逻辑运算表达式 */ private ICriteria addLogicalExpression ( Restrictor criterion ) { } }
LogicRestrictor e = ( LogicRestrictor ) criterion ; Restrictor ic1 = e . getLeft ( ) ; Restrictor ic2 = e . getRight ( ) ; boolean isOr = RestrictType . or . toString ( ) . equals ( e . getRestriction ( ) ) ; if ( isOr ) // 如果是 or 运算 , 用小括号扩上 queryString . append ( "(" ) ; // 添加左侧表达式 if ( ic1 instanceof SimpleRestrictor ) addSimpleExpression ( ic1 ) ; else if ( ic1 instanceof LogicRestrictor ) addLogicalExpression ( ic1 ) ; // 添加逻辑运算符 queryString . append ( " " ) . append ( e . getRestriction ( ) ) . append ( " " ) ; // 添加右侧表达式 if ( ic2 instanceof SimpleRestrictor ) addSimpleExpression ( ic2 ) ; else if ( ic2 instanceof LogicRestrictor ) addLogicalExpression ( ic2 ) ; if ( isOr ) // 如果是 or 运算 , 用小括号扩上 queryString . append ( ")" ) ; return this ;
public class SQLSharedServerLeaseLog { /** * Creates the database table that is being used for the recovery * log . * @ exception SQLException thrown if a SQLException is * encountered when accessing the * Database . */ private void createLeaseTable ( Connection conn ) throws SQLException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "createLeaseTable" , new java . lang . Object [ ] { conn , this } ) ; Statement createTableStmt = null ; PreparedStatement specStatement = null ; try { createTableStmt = conn . createStatement ( ) ; if ( _isOracle ) { String oracleTableString = oracleTablePreString + _leaseTableName + oracleTablePostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create Oracle Table using: " + oracleTableString ) ; createTableStmt . executeUpdate ( oracleTableString ) ; } else { String db2TableString = db2TablePreString + _leaseTableName + db2TablePostString ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Create DB2 Table using: " + db2TableString ) ; createTableStmt . executeUpdate ( db2TableString ) ; } } finally { if ( createTableStmt != null && ! createTableStmt . isClosed ( ) ) { createTableStmt . close ( ) ; } if ( specStatement != null && ! specStatement . isClosed ( ) ) { specStatement . close ( ) ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createLeaseTable" ) ;
public class PlatformDependent { /** * Create a new { @ link Queue } which is safe to use for multiple producers ( different threads ) and a single * consumer ( one thread ! ) with the given fixes { @ code capacity } . */ public static < T > Queue < T > newFixedMpscQueue ( int capacity ) { } }
return hasUnsafe ( ) ? new MpscArrayQueue < T > ( capacity ) : new MpscAtomicArrayQueue < T > ( capacity ) ;
public class PGDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setXpgUnits ( Integer newXpgUnits ) { } }
Integer oldXpgUnits = xpgUnits ; xpgUnits = newXpgUnits ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . PGD__XPG_UNITS , oldXpgUnits , xpgUnits ) ) ;
public class MessageBuilder { /** * メッセージ変数として列挙型を追加する 。 * @ param key 変数名 * @ param enums 列挙型の要素 * @ return 自身のタイプ */ public MessageBuilder varWithEnum ( final String key , final Enum < ? > enums ) { } }
vars . put ( key , enums . getClass ( ) . getSimpleName ( ) + "#" + enums . name ( ) ) ; return this ;
public class CounterImpl { /** * must be called from synchronized block ( CHECKED ) */ private void updateIncrementalSimonsIncrease ( long inc , long now ) { } }
for ( Simon simon : incrementalSimons . values ( ) ) { ( ( CounterImpl ) simon ) . increasePrivate ( inc , now ) ; }
public class CPDAvailabilityEstimatePersistenceImpl { /** * Returns the cpd availability estimate where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching cpd availability estimate , or < code > null < / code > if a matching cpd availability estimate could not be found */ @ Override public CPDAvailabilityEstimate fetchByUUID_G ( String uuid , long groupId ) { } }
return fetchByUUID_G ( uuid , groupId , true ) ;
public class BundleMatcher { /** * Given a MetaLocale object ( internal to CLDR ) , return the best available * CLDR bundle identifier . */ public CLDR . Locale match ( MetaLocale locale ) { } }
MetaLocale result = availableBundlesMap . get ( locale ) ; MetaLocale maxBundleId = null ; if ( result == null ) { // If the locale isn ' t expanded , do that here by adding likely subtags . // For the vast majority of cases this code shouldn ' t be executed , since // we ' ve indexed permutations of locales to create a static mapping to // bundle identifiers . maxBundleId = languageResolver . addLikelySubtags ( locale ) ; MetaLocale minBundleId = languageResolver . removeLikelySubtags ( maxBundleId ) ; result = availableBundlesMap . get ( minBundleId ) ; } if ( result == null ) { // Fallback necessary to find best match for language tags that have // invalid fields , e . g . those with an unknown region , etc . MetaLocale key = maxBundleId . copy ( ) ; for ( int flags : LanguageResolver . MATCH_ORDER ) { LanguageResolver . set ( maxBundleId , key , flags ) ; result = availableBundlesMap . get ( key ) ; if ( result != null && result . hasLanguage ( ) ) { return result ; } } } return result != null ? result : availableBundlesMap . get ( ( MetaLocale ) CLDR . Locale . en ) ;
public class GeometryTools { /** * Calculates the center of the given atoms and returns it as a Point2d . * See comment for center ( IAtomContainer atomCon , Dimension areaDim , HashMap renderingCoordinates ) for details on coordinate sets * @ param atoms The Iterator of the given atoms * @ return The center of the given atoms as Point2d */ public static Point2d get2DCenter ( Iterator < IAtom > atoms ) { } }
IAtom atom ; double xsum = 0 ; double ysum = 0 ; int length = 0 ; while ( atoms . hasNext ( ) ) { atom = ( IAtom ) atoms . next ( ) ; if ( atom . getPoint2d ( ) != null ) { xsum += atom . getPoint2d ( ) . x ; ysum += atom . getPoint2d ( ) . y ; } ++ length ; } return new Point2d ( xsum / ( double ) length , ysum / ( double ) length ) ;
public class Context { /** * Set the Company currently valid for this context . * @ param _ company Company to set * @ throws CacheReloadException on error */ public void setCompany ( final Company _company ) throws CacheReloadException { } }
if ( _company == null ) { this . companyId = null ; } else { this . companyId = _company . getId ( ) ; }
public class DataService { /** * Method to retrieve all records for the given entity * Note , without pagination this will return only 100 records * Use query API to add pagintion and obtain additional records * @ param entity * the entity * @ return returns the queryResult * @ throws FMSException * throws FMSException */ @ SuppressWarnings ( "unchecked" ) public < T extends IEntity > List < T > findAll ( T entity ) throws FMSException { } }
String intuitQuery = "SELECT * FROM " + entity . getClass ( ) . getSimpleName ( ) ; QueryResult result = executeQuery ( intuitQuery ) ; return ( List < T > ) result . getEntities ( ) ;
public class RoaringBitmap { /** * Generate a new bitmap that has the same cardinality as x , but with * all its values incremented by offset . * @ param x source bitmap * @ param offset increment * @ return a new bitmap */ public static RoaringBitmap addOffset ( final RoaringBitmap x , int offset ) { } }
int container_offset = offset >>> 16 ; int in_container_offset = offset % ( 1 << 16 ) ; if ( in_container_offset == 0 ) { RoaringBitmap answer = x . clone ( ) ; for ( int pos = 0 ; pos < answer . highLowContainer . size ( ) ; pos ++ ) { int key = Util . toIntUnsigned ( answer . highLowContainer . getKeyAtIndex ( pos ) ) ; key += container_offset ; if ( key > 0xFFFF ) { throw new IllegalArgumentException ( "Offset too large." ) ; } answer . highLowContainer . keys [ pos ] = ( short ) key ; } return answer ; } else { RoaringBitmap answer = new RoaringBitmap ( ) ; for ( int pos = 0 ; pos < x . highLowContainer . size ( ) ; pos ++ ) { int key = Util . toIntUnsigned ( x . highLowContainer . getKeyAtIndex ( pos ) ) ; key += container_offset ; if ( key > 0xFFFF ) { throw new IllegalArgumentException ( "Offset too large." ) ; } Container c = x . highLowContainer . getContainerAtIndex ( pos ) ; Container [ ] offsetted = Util . addOffset ( c , ( short ) in_container_offset ) ; if ( ! offsetted [ 0 ] . isEmpty ( ) ) { int current_size = answer . highLowContainer . size ( ) ; int lastkey = 0 ; if ( current_size > 0 ) { lastkey = answer . highLowContainer . getKeyAtIndex ( current_size - 1 ) ; } if ( ( current_size > 0 ) && ( lastkey == key ) ) { Container prev = answer . highLowContainer . getContainerAtIndex ( current_size - 1 ) ; Container orresult = prev . ior ( offsetted [ 0 ] ) ; answer . highLowContainer . setContainerAtIndex ( current_size - 1 , orresult ) ; } else { answer . highLowContainer . append ( ( short ) key , offsetted [ 0 ] ) ; } } if ( ! offsetted [ 1 ] . isEmpty ( ) ) { if ( key == 0xFFFF ) { throw new IllegalArgumentException ( "Offset too large." ) ; } answer . highLowContainer . append ( ( short ) ( key + 1 ) , offsetted [ 1 ] ) ; } } answer . repairAfterLazy ( ) ; return answer ; }
public class FastMultidimensionalScalingTransform { /** * Estimate the ( singed ! ) Eigenvalue for a particular vector . * @ param mat Matrix . * @ param in Input vector . * @ return Estimated eigenvalue */ protected double estimateEigenvalue ( double [ ] [ ] mat , double [ ] in ) { } }
double de = 0. , di = 0. ; // Matrix multiplication : for ( int d1 = 0 ; d1 < in . length ; d1 ++ ) { final double [ ] row = mat [ d1 ] ; double t = 0. ; for ( int d2 = 0 ; d2 < in . length ; d2 ++ ) { t += row [ d2 ] * in [ d2 ] ; } final double s = in [ d1 ] ; de += t * s ; di += s * s ; } return de / di ;
public class DeviceProxyDAODefaultImpl { public void set_attribute_info ( final DeviceProxy deviceProxy , final AttributeInfo [ ] attr ) throws DevFailed { } }
checkIfTango ( deviceProxy , "set_attribute_config" ) ; build_connection ( deviceProxy ) ; try { final AttributeConfig [ ] config = new AttributeConfig [ attr . length ] ; for ( int i = 0 ; i < attr . length ; i ++ ) { config [ i ] = attr [ i ] . get_attribute_config_obj ( ) ; } deviceProxy . device . set_attribute_config ( config ) ; } catch ( final DevFailed e ) { throw e ; } catch ( final Exception e ) { ApiUtilDAODefaultImpl . removePendingRepliesOfDevice ( deviceProxy ) ; throw_dev_failed ( deviceProxy , e , "set_attribute_info" , true ) ; }
public class DefaultClusterManager { /** * Loads a network configuration . */ private void doFindNetwork ( final Message < JsonObject > message ) { } }
final String name = message . body ( ) . getString ( "network" ) ; if ( name == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "No network name specified." ) ) ; } else { context . execute ( new Action < NetworkContext > ( ) { @ Override public NetworkContext perform ( ) { String scontext = data . < String , String > getMap ( String . format ( "%s.%s" , cluster , name ) ) . get ( String . format ( "%s.%s" , cluster , name ) ) ; if ( scontext != null ) { return Contexts . < NetworkContext > deserialize ( new JsonObject ( scontext ) ) ; } return null ; } } , new Handler < AsyncResult < NetworkContext > > ( ) { @ Override public void handle ( AsyncResult < NetworkContext > result ) { if ( result . failed ( ) ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , result . cause ( ) . getMessage ( ) ) ) ; } else if ( result . result ( ) == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "Not a valid network." ) ) ; } else { message . reply ( new JsonObject ( ) . putString ( "status" , "ok" ) . putObject ( "result" , Contexts . serialize ( result . result ( ) ) ) ) ; } } } ) ; }
public class WASFGAMeasures { /** * / * Measures code */ private void initMeasures ( ) { } }
durationMeasure = new DurationMeasure ( ) ; iterations = new CountingMeasure ( 0 ) ; solutionListMeasure = new BasicMeasure < > ( ) ; measureManager = new SimpleMeasureManager ( ) ; measureManager . setPullMeasure ( "currentExecutionTime" , durationMeasure ) ; measureManager . setPullMeasure ( "currentEvaluation" , iterations ) ; measureManager . setPushMeasure ( "currentPopulation" , solutionListMeasure ) ; measureManager . setPushMeasure ( "currentEvaluation" , iterations ) ;
public class CmsWidgetUtil { /** * Returns a flag , indicating if a boolean option is set , i . e . , it is in the map and has value null or ( case - insensitive ) " true " . * @ param configOptions the map with the config options . * @ param optionKey the boolean option to check . * @ return a flag , indicating if a boolean option is set */ public static boolean getBooleanOption ( Map < String , String > configOptions , String optionKey ) { } }
if ( configOptions . containsKey ( optionKey ) ) { String value = configOptions . get ( optionKey ) ; if ( ( value == null ) || Boolean . valueOf ( value ) . booleanValue ( ) ) { return true ; } } return false ;
public class StrutsUtil { /** * Get the message object . If we haven ' t already got one and * getMessageObjAttrName returns non - null create one and implant it in * the session . * @ param id Identifying string for messages * @ param caller Used for log identification * @ param request Needed to locate session * @ param messages MessageResources object for creating new object * @ param messageObjAttrName name of session attribute * @ param errProp name of exception message property * @ param clear the list * @ return MessageEmit null on failure */ public static MessageEmit getMessageObj ( final String id , final Object caller , final HttpServletRequest request , final MessageResources messages , final String messageObjAttrName , final String errProp , final boolean clear ) { } }
if ( messageObjAttrName == null ) { // don ' t set return null ; } HttpSession sess = request . getSession ( false ) ; if ( sess == null ) { logger . error ( "No session!!!!!!!" ) ; return null ; } Object o = sess . getAttribute ( messageObjAttrName ) ; MessageEmit msg = null ; // Ensure it ' s initialised correctly if ( ( o != null ) && ( o instanceof MessageEmitSvlt ) ) { if ( ( ( MessageEmitSvlt ) o ) . getMessages ( ) != null ) { msg = ( MessageEmit ) o ; } } if ( msg == null ) { msg = new MessageEmitSvlt ( ) ; } ( ( MessageEmitSvlt ) msg ) . reinit ( id , caller , messages , new ActionMessages ( ) , errProp , clear ) ; // Implant in session sess . setAttribute ( messageObjAttrName , msg ) ; return msg ;
public class AbstractView { /** * Updates the limits if point is not inside visible screen . * @ param x * @ param y */ public void updatePoint ( double x , double y ) { } }
userBounds . add ( x , y ) ; transform . transform ( x , y , transformedUserBounds :: add ) ;
public class VueGWTTools { /** * Determine the name of fields at runtime . This allows fields to be renamed during optimizations . * The fieldsMarker method must set the value of the wanted fields to either null , 0 or false * ( depending on the type of the field ) . * @ param instance The instance we want to get the name of the fields from * @ param fieldsMarker Mark the fields which names we want to get * @ return The list of names of the fields */ public static Set < String > getFieldsName ( Object instance , Runnable fieldsMarker ) { } }
JsPropertyMap < Any > map = cast ( instance ) ; JsObject jsObject = cast ( instance ) ; map . forEach ( key -> { if ( ! jsObject . hasOwnProperty ( key ) ) { return ; } try { Any val = asAny ( map . get ( key ) ) ; if ( isTripleEqual ( val , null ) || isTripleEqual ( val , asAny ( 1 ) ) || isTripleEqual ( val , asAny ( true ) ) ) { map . delete ( key ) ; } } catch ( Exception e ) { } } ) ; fieldsMarker . run ( ) ; Set < String > dataFields = new HashSet < > ( ) ; map . forEach ( key -> { if ( ! jsObject . hasOwnProperty ( key ) ) { return ; } try { Any val = asAny ( map . get ( key ) ) ; if ( isTripleEqual ( val , null ) || isTripleEqual ( val , asAny ( 1 ) ) || isTripleEqual ( val , asAny ( true ) ) ) { dataFields . add ( key ) ; } } catch ( Exception e ) { } } ) ; return dataFields ;
public class DolphinServlet { /** * Reads the body of the { @ code response } . * @ param request - an HttpServletRequest object that contains the request the client has made of the servlet * @ return the contents of the { @ code response } * @ throws IOException - if an input or output error is detected when the servlet handles the request * @ throws ServletException - if the request for the POST could not be handled */ protected String readInput ( HttpServletRequest request ) throws ServletException , IOException { } }
StringBuilder input = new StringBuilder ( ) ; String line = null ; while ( ( line = request . getReader ( ) . readLine ( ) ) != null ) { input . append ( line ) . append ( "\n" ) ; } return input . toString ( ) ;
public class AwsClientBuilder { /** * Sets the value of an advanced config option . * @ param key Key of value to set . * @ param value The new value . * @ param < T > Type of value . */ protected final < T > void putAdvancedConfig ( AdvancedConfig . Key < T > key , T value ) { } }
advancedConfig . put ( key , value ) ;
public class LFltToSrtFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LFltToSrtFunction fltToSrtFunctionFrom ( Consumer < LFltToSrtFunctionBuilder > buildingFunction ) { } }
LFltToSrtFunctionBuilder builder = new LFltToSrtFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class PropertyChangeUtils { /** * Tries to remove the given PropertyChangeListener from the given * target object . * If the given target object does not * { @ link # maintainsNamedPropertyChangeListeners ( Class ) * maintain named PropertyChangeListeners } , then nothing is done . * @ param target The target object * @ param propertyName The property name * @ param propertyChangeListener The PropertyChangeListener to remove * @ throws IllegalArgumentException If the attempt to invoke the method * for removing the given listener failed . * @ throws NullPointerException If any argument is < code > null < / code > */ public static void tryRemoveNamedPropertyChangeListenerUnchecked ( Object target , String propertyName , PropertyChangeListener propertyChangeListener ) { } }
Objects . requireNonNull ( target , "The target may not be null" ) ; Objects . requireNonNull ( propertyName , "The propertyName may not be null" ) ; Objects . requireNonNull ( propertyChangeListener , "The propertyChangeListener may not be null" ) ; if ( maintainsNamedPropertyChangeListeners ( target . getClass ( ) ) ) { PropertyChangeUtils . removeNamedPropertyChangeListenerUnchecked ( target , propertyName , propertyChangeListener ) ; }
public class PostCommentReplyRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PostCommentReplyRequest postCommentReplyRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( postCommentReplyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( postCommentReplyRequest . getInReplyTo ( ) , INREPLYTO_BINDING ) ; protocolMarshaller . marshall ( postCommentReplyRequest . getClientRequestToken ( ) , CLIENTREQUESTTOKEN_BINDING ) ; protocolMarshaller . marshall ( postCommentReplyRequest . getContent ( ) , CONTENT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SourceVisitor { /** * Visit a Source . This is the primary focus of the visitation . From * here , interesting information is gathered . * @ see GedObjectVisitor # visit ( Source ) */ @ Override public void visit ( final Source source ) { } }
titleString = source . getString ( ) ; for ( final GedObject gob : source . getAttributes ( ) ) { gob . accept ( this ) ; }
public class Privacy { /** * Deletes an existing privacy list . If the privacy list being deleted was the default list * then the user will end up with no default list . Therefore , the user will have to set a new * default list . * @ param listName the name of the list being deleted . */ public void deletePrivacyList ( String listName ) { } }
// Remove the list from the cache // CHECKSTYLE : OFF this . getItemLists ( ) . remove ( listName ) ; // CHECKSTYLE : ON // Check if deleted list was the default list if ( this . getDefaultName ( ) != null && listName . equals ( this . getDefaultName ( ) ) ) { // CHECKSTYLE : OFF this . setDefaultName ( null ) ; // CHECKSTYLE : ON }
public class ExtendedMessageFormat { /** * Consume a quoted string , adding it to < code > appendTo < / code > if specified . * @ param pattern * pattern to parse * @ param pos * current parse position * @ param appendTo * optional StringBuffer to append * @ param escapingOn * whether to process escaped quotes * @ return < code > appendTo < / code > */ private StringBuilder appendQuotedString ( String pattern , ParsePosition pos , StringBuilder appendTo , boolean escapingOn ) { } }
int start = pos . getIndex ( ) ; char [ ] c = pattern . toCharArray ( ) ; if ( escapingOn && c [ start ] == QUOTE ) { next ( pos ) ; return appendTo == null ? null : appendTo . append ( QUOTE ) ; } int lastHold = start ; for ( int i = pos . getIndex ( ) ; i < pattern . length ( ) ; i ++ ) { if ( escapingOn && pattern . substring ( i ) . startsWith ( ESCAPED_QUOTE ) ) { appendTo . append ( c , lastHold , pos . getIndex ( ) - lastHold ) . append ( QUOTE ) ; pos . setIndex ( i + ESCAPED_QUOTE . length ( ) ) ; lastHold = pos . getIndex ( ) ; continue ; } switch ( c [ pos . getIndex ( ) ] ) { case QUOTE : next ( pos ) ; return appendTo == null ? null : appendTo . append ( c , lastHold , pos . getIndex ( ) - lastHold ) ; default : next ( pos ) ; } } throw new IllegalArgumentException ( "Unterminated quoted string at position " + start ) ;
public class UrlUtils { /** * URL - encodes a string . * Assumes { @ code string } is in { @ link StandardCharsets # UTF _ 8 } format . * @ param string * The string to URL - encode . * @ return The URL - encoded version of the input string , or { @ code null } if { @ code string } is { @ code null } . * @ throws IllegalStateException * If unable to URL - encode because the JVM doesn ' t support { @ link StandardCharsets # UTF _ 8 } . */ public static String urlEncode ( String string ) { } }
if ( string == null ) { return null ; } try { return encode ( string , StandardCharsets . UTF_8 . name ( ) ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( "Platform doesn't support " + StandardCharsets . UTF_8 . name ( ) , e ) ; }
public class MBeanUtil { /** * Registers MBean in local MBean server . * Method has mechanism to resolve bean name duplication * depends on ` replace ` parameter of ths method . * If true - old bean be replaced by new one . * If false - prefix been added to bean name from method parameter * Prefix format : * copy { $ number } . { $ originalName } * where : * $ number - bean copy number ( 1 for first copy ) * $ originalName - original bean name passed to this method * @ param bean MBean object * @ param name MBean name * @ param replace controls resolving of bean name duplication . * true - old bean be replaced by new one . * false - prefix be added to bean name . * @ return name of MBean string * @ throws MalformedObjectNameException name from parameter is unacceptable as object name * @ throws NotCompliantMBeanException thrown by MBeanServer * @ throws MBeanRegistrationException thrown by MBeanServer */ public String registerMBean ( Object bean , String name , boolean replace ) throws MalformedObjectNameException , NotCompliantMBeanException , MBeanRegistrationException { } }
synchronized ( mBeanServer ) { ObjectName newBeanName = null ; try { newBeanName = new ObjectName ( name ) ; beansNames . add ( newBeanName ) ; // Saving bean name for possible later cleanups mBeanServer . registerMBean ( bean , newBeanName ) ; return name ; } catch ( InstanceAlreadyExistsException e ) { beansNames . remove ( newBeanName ) ; // Bean not registered , it name is not required for cleanup if ( replace ) { unregisterMBean ( name ) ; return registerMBean ( bean , name , true ) ; } else return registerMBean ( bean , resolveDuplicateName ( name ) ) ; } }
public class BasicHeaderSegment { /** * Clears all the stored content of this BasicHeaderSegment object . */ final void clear ( ) { } }
immediateFlag = false ; operationCode = OperationCode . LOGIN_REQUEST ; finalFlag = false ; totalAHSLength = 0x00 ; dataSegmentLength = 0x00000000 ; initiatorTaskTag = 0x00000000 ; parser = null ;
public class sslvserver { /** * Use this API to fetch all the sslvserver resources that are configured on netscaler . * This uses sslvserver _ args which is a way to provide additional arguments while fetching the resources . */ public static sslvserver [ ] get ( nitro_service service , sslvserver_args args ) throws Exception { } }
sslvserver obj = new sslvserver ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; sslvserver [ ] response = ( sslvserver [ ] ) obj . get_resources ( service , option ) ; return response ;
public class PolicyExecutor { /** * Performs a synchronous execution by first doing a pre - execute , calling the next executor , else calling the * executor ' s supplier , then finally doing a post - execute . */ protected Supplier < ExecutionResult > supply ( Supplier < ExecutionResult > supplier ) { } }
return ( ) -> { ExecutionResult result = preExecute ( ) ; if ( result != null ) return result ; return postExecute ( supplier . get ( ) ) ; } ;
public class Searches { /** * Searches the last matching element returning it . * @ param < E > the element type parameter * @ param iterable the iterable to be searched * @ param predicate the predicate to be applied to each element * @ throws IllegalArgumentException if no element matches * @ return the last element found */ public static < E > E findLast ( Iterable < E > iterable , Predicate < E > predicate ) { } }
dbc . precondition ( iterable != null , "cannot searchLast with a null iterable" ) ; final Iterator < E > filtered = new FilteringIterator < E > ( iterable . iterator ( ) , predicate ) ; return new LastElement < E > ( ) . apply ( filtered ) ;
public class Weighers { /** * A entry weigher backed by the specified weigher . The weight of the value * determines the weight of the entry . * @ param weigher the weigher to be " wrapped " in a entry weigher . * @ return A entry weigher view of the specified weigher . */ public static < K , V > EntryWeigher < K , V > asEntryWeigher ( final Weigher < ? super V > weigher ) { } }
return ( weigher == singleton ( ) ) ? Weighers . < K , V > entrySingleton ( ) : new EntryWeigherView < K , V > ( weigher ) ;
public class HttpRequestMessageImpl { /** * Initialize the scheme information based on the socket being secure or not . */ public void initScheme ( ) { } }
// set the scheme based on whether the socket is secure or not if ( null == getServiceContext ( ) || null == getServiceContext ( ) . getTSC ( ) ) { // discrimination path , not ready for this yet return ; } if ( getServiceContext ( ) . isSecure ( ) ) { this . myScheme = SchemeValues . HTTPS ; } else { this . myScheme = SchemeValues . HTTP ; }
public class Key { /** * Return the info word for this Cloud . Use the cache if possible */ public long cloud_info ( H2O cloud ) { } }
long x = _cache ; // See if cached for this Cloud . This should be the 99 % fast case . if ( cloud ( x ) == cloud . _idx ) return x ; // Cache missed ! Probaby it just needs ( atomic ) updating . // But we might be holding the stale cloud . . . // Figure out home Node in this Cloud char home = ( char ) D ( 0 ) ; // Figure out what replica # I am , if any int desired = desired ( x ) ; int replica = - 1 ; for ( int i = 0 ; i < desired ; i ++ ) { int idx = D ( i ) ; if ( idx >= 0 && cloud . _memary [ idx ] == H2O . SELF ) { replica = i ; break ; } } long cache = build_cache ( cloud . _idx , home , replica , desired ) ; set_cache ( cache ) ; // Attempt to upgrade cache , but ignore failure return cache ; // Return the magic word for this Cloud
public class AbstractElement { /** * Appends the child to the end of the element ' s content list * @ param content Child to append to end of content list . Null values are ignored . * @ return The element on which the method was called . */ @ Override public final org . jdom2 . Element addContent ( Content content ) { } }
// ignore empty elements if ( content == null ) { return null ; } return super . addContent ( content ) ;
public class QDate { /** * Format the date using % escapes : * < table > * < tr > < td > % a < td > day of week ( short ) * < tr > < td > % A < td > day of week ( verbose ) * < tr > < td > % b < td > month name ( short ) * < tr > < td > % B < td > month name ( verbose ) * < tr > < td > % c < td > Java locale date * < tr > < td > % d < td > day of month ( two - digit ) * < tr > < td > % F < td > % Y - % m - % d * < tr > < td > % H < td > 24 - hour ( two - digit ) * < tr > < td > % I < td > 12 - hour ( two - digit ) * < tr > < td > % j < td > day of year ( three - digit ) * < tr > < td > % l < td > 12 - hour ( one - digit prefixed by space ) * < tr > < td > % m < td > month ( two - digit ) * < tr > < td > % M < td > minutes * < tr > < td > % p < td > am / pm * < tr > < td > % P < td > AM / PM * < tr > < td > % S < td > seconds * < tr > < td > % s < td > milliseconds * < tr > < td > % x < td > Java locale short date * < tr > < td > % X < td > Java locale short time * < tr > < td > % W < td > week in year ( three - digit ) * < tr > < td > % w < td > day of week ( one - digit ) * < tr > < td > % y < td > year ( two - digit ) * < tr > < td > % Y < td > year ( four - digit ) * < tr > < td > % Z < td > time zone ( name ) * < tr > < td > % z < td > time zone ( + / - 0800) * < / table > */ public CharBuffer format ( CharBuffer cb , String format ) { } }
int length = format . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { char ch = format . charAt ( i ) ; if ( ch != '%' ) { cb . append ( ch ) ; continue ; } switch ( format . charAt ( ++ i ) ) { case 'a' : cb . append ( SHORT_WEEKDAY [ getDayOfWeek ( ) - 1 ] ) ; break ; case 'A' : cb . append ( LONG_WEEKDAY [ getDayOfWeek ( ) - 1 ] ) ; break ; case 'h' : case 'b' : cb . append ( SHORT_MONTH [ ( int ) _month ] ) ; break ; case 'B' : cb . append ( LONG_MONTH [ ( int ) _month ] ) ; break ; case 'c' : cb . append ( printLocaleDate ( ) ) ; break ; case 'd' : cb . append ( ( _dayOfMonth + 1 ) / 10 ) ; cb . append ( ( _dayOfMonth + 1 ) % 10 ) ; break ; case 'D' : cb . append ( ( _month + 1 ) / 10 ) ; cb . append ( ( _month + 1 ) % 10 ) ; cb . append ( '/' ) ; cb . append ( ( _dayOfMonth + 1 ) / 10 ) ; cb . append ( ( _dayOfMonth + 1 ) % 10 ) ; cb . append ( '/' ) ; cb . append ( _year / 10 % 10 ) ; cb . append ( _year % 10 ) ; break ; case 'e' : if ( ( _dayOfMonth + 1 ) / 10 == 0 ) cb . append ( ' ' ) ; else cb . append ( ( _dayOfMonth + 1 ) / 10 ) ; cb . append ( ( _dayOfMonth + 1 ) % 10 ) ; break ; // ISO year case 'F' : { cb . append ( _year / 1000 % 10 ) ; cb . append ( _year / 100 % 10 ) ; cb . append ( _year / 10 % 10 ) ; cb . append ( _year % 10 ) ; cb . append ( '-' ) ; cb . append ( ( _month + 1 ) / 10 ) ; cb . append ( ( _month + 1 ) % 10 ) ; cb . append ( '-' ) ; cb . append ( ( _dayOfMonth + 1 ) / 10 ) ; cb . append ( ( _dayOfMonth + 1 ) % 10 ) ; break ; } case 'H' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 24 ; cb . append ( hour / 10 ) ; cb . append ( hour % 10 ) ; break ; } case 'I' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 12 ; if ( hour == 0 ) hour = 12 ; cb . append ( hour / 10 ) ; cb . append ( hour % 10 ) ; break ; } case 'j' : cb . append ( ( _dayOfYear + 1 ) / 100 ) ; cb . append ( ( _dayOfYear + 1 ) / 10 % 10 ) ; cb . append ( ( _dayOfYear + 1 ) % 10 ) ; break ; case 'l' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 12 ; if ( hour == 0 ) hour = 12 ; if ( hour < 10 ) { cb . append ( ' ' ) ; } cb . append ( hour ) ; break ; } case 'm' : cb . append ( ( _month + 1 ) / 10 ) ; cb . append ( ( _month + 1 ) % 10 ) ; break ; case 'M' : cb . append ( ( _timeOfDay / 600000 ) % 6 ) ; cb . append ( ( _timeOfDay / 60000 ) % 10 ) ; break ; case 'p' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 24 ; if ( hour < 12 ) cb . append ( "am" ) ; else cb . append ( "pm" ) ; break ; } case 'P' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 24 ; if ( hour < 12 ) cb . append ( "AM" ) ; else cb . append ( "PM" ) ; break ; } case 'S' : cb . append ( ( _timeOfDay / 10000 ) % 6 ) ; cb . append ( ( _timeOfDay / 1000 ) % 10 ) ; break ; case 's' : cb . append ( ( _timeOfDay / 100 ) % 10 ) ; cb . append ( ( _timeOfDay / 10 ) % 10 ) ; cb . append ( _timeOfDay % 10 ) ; break ; case 'T' : { int hour = ( int ) ( _timeOfDay / 3600000 ) % 24 ; cb . append ( hour / 10 ) ; cb . append ( hour % 10 ) ; cb . append ( ':' ) ; cb . append ( ( _timeOfDay / 600000 ) % 6 ) ; cb . append ( ( _timeOfDay / 60000 ) % 10 ) ; cb . append ( ':' ) ; cb . append ( ( _timeOfDay / 10000 ) % 6 ) ; cb . append ( ( _timeOfDay / 1000 ) % 10 ) ; break ; } case 'W' : int week = getWeek ( ) ; cb . append ( ( week + 1 ) / 10 ) ; cb . append ( ( week + 1 ) % 10 ) ; break ; case 'w' : cb . append ( getDayOfWeek ( ) - 1 ) ; break ; case 'x' : cb . append ( printShortLocaleDate ( ) ) ; break ; case 'X' : cb . append ( printShortLocaleTime ( ) ) ; break ; case 'y' : cb . append ( _year / 10 % 10 ) ; cb . append ( _year % 10 ) ; break ; case 'Y' : cb . append ( _year / 1000 % 10 ) ; cb . append ( _year / 100 % 10 ) ; cb . append ( _year / 10 % 10 ) ; cb . append ( _year % 10 ) ; break ; case 'Z' : if ( _zoneName == null ) cb . append ( "GMT" ) ; else cb . append ( _zoneName ) ; break ; case 'z' : long offset = _zoneOffset ; if ( offset < 0 ) { cb . append ( "-" ) ; offset = - offset ; } else cb . append ( "+" ) ; cb . append ( ( offset / 36000000 ) % 10 ) ; cb . append ( ( offset / 3600000 ) % 10 ) ; cb . append ( ( offset / 600000 ) % 6 ) ; cb . append ( ( offset / 60000 ) % 10 ) ; break ; case '%' : cb . append ( '%' ) ; break ; default : cb . append ( format . charAt ( i ) ) ; } } return cb ;
public class DeleteCustomerGatewayRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DeleteCustomerGatewayRequest > getDryRunRequest ( ) { } }
Request < DeleteCustomerGatewayRequest > request = new DeleteCustomerGatewayRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class LinearSearch { /** * Search for the value in the list and return the index of the specified occurrence from the * beginning of the list . The run time of this algorithm depends on the * implementation of the list . It is advised to use an array based implementation * to achieve O ( n ) runtime . * @ param < E > the type of elements in this list . * @ param list list that we are searching in . * @ param value value that is being searched in the list . * @ param occurrence number of times we have seen the value before returning the index . * @ return the index where the value is found in the list , else - 1. */ public static < E > int search ( List < E > list , E value , int occurrence ) { } }
if ( occurrence <= 0 || occurrence > list . size ( ) ) { throw new IllegalArgumentException ( "Occurrence must be greater or equal to 1 and less than " + "the list length: " + occurrence ) ; } int valuesSeen = 0 ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) { if ( list . get ( i ) == value ) { valuesSeen ++ ; if ( valuesSeen == occurrence ) { return i ; } } } return - 1 ;
public class WriterCallbacks { /** * Creates a writer callback that copies all the content read from an { @ link InputStream } into * the target stream . * < p > This writer can be used only once . * @ param is the source * @ return the writer callback */ public static WriterCallback from ( final InputStream is ) { } }
return new WriterCallback ( ) { @ Override public void write ( OutputStream os ) throws IOException { ByteStreams . copy ( is , os ) ; } } ;
public class PathUtil { /** * Adjusts the specified path to relative form : * 1 ) Removes , if present , a preceding slash 2 ) Adds , if not present , a trailing slash * Null arguments are returned as - is * @ param path */ public static String adjustToRelativeDirectoryContext ( final String path ) { } }
// Return nulls if ( path == null ) { return path ; } // Strip absolute form final String removedPrefix = optionallyRemovePrecedingSlash ( path ) ; // Add end of context slash final String addedPostfix = optionallyAppendSlash ( removedPrefix ) ; // Return return addedPostfix ;
public class Block { /** * Verify the transactions on a block . * @ param height block height , if known , or - 1 otherwise . If provided , used * to validate the coinbase input script of v2 and above blocks . * @ throws VerificationException if there was an error verifying the block . */ private void checkTransactions ( final int height , final EnumSet < VerifyFlag > flags ) throws VerificationException { } }
// The first transaction in a block must always be a coinbase transaction . if ( ! transactions . get ( 0 ) . isCoinBase ( ) ) throw new VerificationException ( "First tx is not coinbase" ) ; if ( flags . contains ( Block . VerifyFlag . HEIGHT_IN_COINBASE ) && height >= BLOCK_HEIGHT_GENESIS ) { transactions . get ( 0 ) . checkCoinBaseHeight ( height ) ; } // The rest must not be . for ( int i = 1 ; i < transactions . size ( ) ; i ++ ) { if ( transactions . get ( i ) . isCoinBase ( ) ) throw new VerificationException ( "TX " + i + " is coinbase when it should not be." ) ; }
public class UpdateRegexMatchSetRequest { /** * An array of < code > RegexMatchSetUpdate < / code > objects that you want to insert into or delete from a * < a > RegexMatchSet < / a > . For more information , see < a > RegexMatchTuple < / a > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUpdates ( java . util . Collection ) } or { @ link # withUpdates ( java . util . Collection ) } if you want to override * the existing values . * @ param updates * An array of < code > RegexMatchSetUpdate < / code > objects that you want to insert into or delete from a * < a > RegexMatchSet < / a > . For more information , see < a > RegexMatchTuple < / a > . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateRegexMatchSetRequest withUpdates ( RegexMatchSetUpdate ... updates ) { } }
if ( this . updates == null ) { setUpdates ( new java . util . ArrayList < RegexMatchSetUpdate > ( updates . length ) ) ; } for ( RegexMatchSetUpdate ele : updates ) { this . updates . add ( ele ) ; } return this ;
public class ServiceConfigurationManager { /** * Registers a new service into the library * @ param id * The id of the service * @ param configFile * The content of the json document describing the service * @ param describeService * The parsed { @ link DescribeService } * @ throws POIProxyException * When any of the parameters is null */ public void registerServiceConfiguration ( String id , String configFile , DescribeService service ) throws POIProxyException { } }
if ( service == null || configFile == null || service . getId ( ) == null ) { throw new POIProxyException ( "Null service configuration" ) ; } this . registeredConfigurations . put ( id , configFile ) ; this . parsedConfigurations . put ( id , service ) ; try { this . save ( id , configFile ) ; } catch ( IOException e ) { throw new POIProxyException ( "Unable to write service configuration" ) ; }
public class Snappy { /** * High - level API for uncompressing the input byte array . * @ param input * @ return the uncompressed byte array * @ throws IOException */ public static byte [ ] uncompress ( byte [ ] input ) throws IOException { } }
byte [ ] result = new byte [ Snappy . uncompressedLength ( input ) ] ; Snappy . uncompress ( input , 0 , input . length , result , 0 ) ; return result ;
public class OgmEntityPersister { /** * Returns the names of all those columns which represent a collection to be stored within the owning entity * structure ( element collections and / or * - to - many associations , depending on the dialect ' s capabilities ) . */ private List < String > getEmbeddedCollectionColumns ( ) { } }
List < String > embeddedCollections = new ArrayList < String > ( ) ; for ( String property : getPropertyNames ( ) ) { Type propertyType = getPropertyType ( property ) ; if ( propertyType . isAssociationType ( ) ) { Joinable associatedJoinable = ( ( AssociationType ) propertyType ) . getAssociatedJoinable ( getFactory ( ) ) ; // * - to - many if ( associatedJoinable . isCollection ( ) ) { OgmCollectionPersister inversePersister = ( OgmCollectionPersister ) associatedJoinable ; if ( gridDialect . isStoredInEntityStructure ( inversePersister . getAssociationKeyMetadata ( ) , inversePersister . getAssociationTypeContext ( property ) ) ) { embeddedCollections . add ( property ) ; } } // * - to - one else { // TODO : For now I ' m adding all * - to - one columns to the projection list ; Actually we need to ask the // dialect whether it ' s an embedded association , which we can ' t find out atm . though as we need all // entity persisters to be set up for this embeddedCollections . add ( property ) ; } } // for embeddables check whether they contain element collections else if ( propertyType . isComponentType ( ) ) { collectEmbeddedCollectionColumns ( ( ComponentType ) propertyType , property , embeddedCollections ) ; } } return embeddedCollections ;
public class ProtocolDataUnit { /** * Serializes the data segment ( binary or key - value pairs ) to a destination array , staring from offset to write . * @ param dst The array to write in . * @ param offset The start offset to start from in < code > dst < / code > . * @ return The written length . * @ throws InternetSCSIException If any violation of the iSCSI - Standard emerge . */ public final int serializeDataSegment ( final ByteBuffer dst , final int offset ) throws InternetSCSIException { } }
dataSegment . rewind ( ) ; dst . position ( offset ) ; dst . put ( dataSegment ) ; return dataSegment . limit ( ) ;
public class AdminDictSynonymAction { @ Execute public HtmlResponse uploadpage ( final String dictId ) { } }
saveToken ( ) ; return asHtml ( path_AdminDictSynonym_AdminDictSynonymUploadJsp ) . useForm ( UploadForm . class , op -> { op . setup ( form -> { form . dictId = dictId ; } ) ; } ) . renderWith ( data -> { synonymService . getSynonymFile ( dictId ) . ifPresent ( file -> { RenderDataUtil . register ( data , "path" , file . getPath ( ) ) ; } ) . orElse ( ( ) -> { throwValidationError ( messages -> messages . addErrorsFailedToDownloadSynonymFile ( GLOBAL ) , ( ) -> asDictIndexHtml ( ) ) ; } ) ; } ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcTendonTypeEnum ( ) { } }
if ( ifcTendonTypeEnumEEnum == null ) { ifcTendonTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 912 ) ; } return ifcTendonTypeEnumEEnum ;
public class AbstractCassandraMutation { /** * Encode a byte array as a hexadecimal string * @ parambytes * @ return */ public static String toHex ( byte [ ] bytes ) { } }
StringBuilder hexString = new StringBuilder ( ) ; for ( int i = 0 ; i < bytes . length ; i ++ ) { String hex = Integer . toHexString ( 0xFF & bytes [ i ] ) ; if ( hex . length ( ) == 1 ) { hexString . append ( '0' ) ; } hexString . append ( hex ) ; } return hexString . toString ( ) ;
public class SimpleParser { /** * Returns an input stream to read data from the given file name . */ protected InputStream getInputStream ( String path ) throws IOException { } }
FileInputStream fis = new FileInputStream ( path ) ; return new BufferedInputStream ( fis ) ;
public class ChangeObjects { /** * method to change a group of the HELM2Notation * @ param notation * new group * @ param position * position of the to be changed group * @ param helm2notation * input HELM2Notation */ public final static void changeGroup ( final GroupingNotation notation , final int position , final HELM2Notation helm2notation ) { } }
helm2notation . getListOfGroupings ( ) . set ( position , notation ) ;
public class SQLiteViewStore { private int getViewID ( ) { } }
if ( viewID < 0 ) { String sql = "SELECT view_id FROM views WHERE name=?" ; String [ ] args = { name } ; Cursor cursor = null ; try { cursor = store . getStorageEngine ( ) . rawQuery ( sql , args ) ; if ( cursor . moveToNext ( ) ) { viewID = cursor . getInt ( 0 ) ; } } catch ( SQLException e ) { Log . e ( Log . TAG_VIEW , "Error getting view id" , e ) ; } finally { if ( cursor != null ) { cursor . close ( ) ; } } } return viewID ;
public class AbstractThriftConnectionStrategy { /** * @ see com . wmz7year . thrift . pool . ThriftConnectionStrategy # getConnection ( ) */ @ Override public ThriftConnection < T > getConnection ( ) throws ThriftConnectionPoolException { } }
long statsObtainTime = preConnection ( ) ; ThriftConnectionHandle < T > result = ( ThriftConnectionHandle < T > ) getConnectionInternal ( ) ; if ( result != null ) { postConnection ( result , statsObtainTime ) ; } return result ;
public class RelationTransformer { /** * Changes the ' mappedBy ' property of all { @ link AttributeType # ONE _ TO _ MANY } attributes of an * { @ link EntityType } to other , new Attributes . Does nothing for mappedBy attributes whose IDs are * not present in the supplied Map . * @ param entityType the EntityType to update * @ param newAttributes a Map of ( old ) Attribute IDs and new Attributes */ static void transformMappedBys ( EntityType entityType , Map < String , Attribute > newAttributes ) { } }
if ( newAttributes . isEmpty ( ) ) { return ; } stream ( entityType . getAtomicAttributes ( ) ) . filter ( Attribute :: isMappedBy ) . forEach ( attr -> transformMappedBy ( attr , newAttributes ) ) ;
public class FastAdapterDialog { /** * Set a listener to be invoked when the positive button of the dialog is pressed . * @ param text The text to display in the positive button * @ param listener The { @ link DialogInterface . OnClickListener } to use . * @ return This Builder object to allow for chaining of calls to set methods */ public FastAdapterDialog < Item > withPositiveButton ( String text , OnClickListener listener ) { } }
return withButton ( BUTTON_POSITIVE , text , listener ) ;
public class CmsExport { /** * Exports one single project with all it ' s data . < p > * @ param parent the parent node to add the project to * @ param project the project to be exported * @ throws CmsImportExportException if something goes wrong * @ throws SAXException if something goes wrong processing the manifest . xml */ protected void exportProject ( Element parent , CmsProject project ) throws CmsImportExportException , SAXException { } }
I_CmsReport report = getReport ( ) ; CmsDefaultUsers defaultUsers = OpenCms . getDefaultUsers ( ) ; String users ; try { users = getCms ( ) . readGroup ( project . getGroupId ( ) ) . getName ( ) ; } catch ( CmsException e ) { CmsMessageContainer message = org . opencms . db . Messages . get ( ) . container ( org . opencms . db . Messages . ERR_READ_GROUP_FOR_ID_1 , project . getGroupId ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( message . key ( ) , e ) ; } users = defaultUsers . getGroupUsers ( ) ; report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; report . print ( message , I_CmsReport . FORMAT_ERROR ) ; } String managers ; try { managers = getCms ( ) . readGroup ( project . getManagerGroupId ( ) ) . getName ( ) ; } catch ( CmsException e ) { CmsMessageContainer message = org . opencms . db . Messages . get ( ) . container ( org . opencms . db . Messages . ERR_READ_GROUP_FOR_ID_1 , project . getManagerGroupId ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( message . key ( ) , e ) ; } managers = defaultUsers . getGroupAdministrators ( ) ; report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; report . print ( message , I_CmsReport . FORMAT_ERROR ) ; } Element e = parent . addElement ( CmsImportVersion10 . N_PROJECT ) ; e . addElement ( CmsImportVersion10 . N_NAME ) . addText ( project . getSimpleName ( ) ) ; e . addElement ( CmsImportVersion10 . N_DESCRIPTION ) . addCDATA ( project . getDescription ( ) ) ; e . addElement ( CmsImportVersion10 . N_USERSGROUP ) . addText ( users ) ; e . addElement ( CmsImportVersion10 . N_MANAGERSGROUP ) . addText ( managers ) ; Element resources = e . addElement ( CmsImportVersion10 . N_RESOURCES ) ; try { Iterator < String > it = getCms ( ) . readProjectResources ( project ) . iterator ( ) ; while ( it . hasNext ( ) ) { String resName = it . next ( ) ; resources . addElement ( CmsImportVersion10 . N_RESOURCE ) . addText ( resName ) ; } } catch ( CmsException exc ) { CmsMessageContainer message = org . opencms . db . Messages . get ( ) . container ( org . opencms . db . Messages . ERR_READ_PROJECT_RESOURCES_2 , project . getName ( ) , project . getUuid ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( message . key ( ) , exc ) ; } throw new CmsImportExportException ( message , exc ) ; } // write the XML digestElement ( parent , e ) ;
public class MatchFilterBase { /** * Sets the match and nomatch return values based on a " policy " * string . Valid values for the policy string are defined as * constants for this class : ACCEPT _ ON _ MATCH , DENY _ ON _ MATCH , * ACCEPT _ ON _ NOMATCH , DENY _ ON _ NOMATCH . * @ param policyStr The policy to use . */ public void setChainPolicy ( String policyStr ) { } }
if ( policyStr . equalsIgnoreCase ( ACCEPT_ON_MATCH ) ) { matchReturnValue = ACCEPT ; noMatchReturnValue = NEUTRAL ; } else if ( policyStr . equalsIgnoreCase ( DENY_ON_MATCH ) ) { matchReturnValue = DENY ; noMatchReturnValue = NEUTRAL ; } else if ( policyStr . equalsIgnoreCase ( ACCEPT_ON_NOMATCH ) ) { matchReturnValue = NEUTRAL ; noMatchReturnValue = ACCEPT ; } else if ( policyStr . equalsIgnoreCase ( DENY_ON_NOMATCH ) ) { matchReturnValue = NEUTRAL ; noMatchReturnValue = DENY ; } else { LogLog . error ( "invalid chainPolicy: " + policyStr ) ; }
public class ChgrpCommand { /** * Changes the group for the directory or file with the path specified in args . * @ param path The { @ link AlluxioURI } path as the input of the command * @ param group The group to be updated to the file or directory * @ param recursive Whether change the group recursively */ private void chgrp ( AlluxioURI path , String group , boolean recursive ) throws AlluxioException , IOException { } }
SetAttributePOptions options = SetAttributePOptions . newBuilder ( ) . setGroup ( group ) . setRecursive ( recursive ) . build ( ) ; mFileSystem . setAttribute ( path , options ) ; System . out . println ( "Changed group of " + path + " to " + group ) ;
public class AssetWatcher { /** * Adds an image resource to be watched . */ public void add ( Image image ) { } }
assert ! start || listener == null ; ++ total ; image . addCallback ( callback ) ;
public class Music { /** * Returns a collection of Note between begin and end included * @ return a Collection of NoteAbstract ( Note or MultiNote ) * @ throws IllegalArgumentException * @ deprecated use { @ link # getVoice ( String ) } . { @ link Voice # getNotesBetween ( MusicElement , MusicElement ) getNotesBetween . . . } */ public Collection getNotesBetween ( MusicElement elmtBegin , MusicElement elmtEnd ) throws IllegalArgumentException { } }
return getFirstVoice ( ) . getNotesBetween ( elmtBegin , elmtEnd ) ;
public class JsonRuntimeReporterHelper { /** * Construct the JSON report for report generation * @ return A { @ link JsonObject } which represents the report . */ private JsonObject buildJSONReport ( ) { } }
logger . entering ( ) ; Gson gson = new GsonBuilder ( ) . setPrettyPrinting ( ) . create ( ) ; JsonArray testObjects = loadJSONArray ( jsonCompletedTest ) ; for ( TestMethodInfo temp : completedTest ) { testObjects . add ( gson . fromJson ( temp . toJson ( ) , JsonElement . class ) ) ; } for ( TestMethodInfo temp : runningTest ) { testObjects . add ( gson . fromJson ( temp . toJson ( ) , JsonElement . class ) ) ; } JsonArray configObjects = loadJSONArray ( jsonCompletedConfig ) ; for ( ConfigMethodInfo temp : runningConfig ) { configObjects . add ( gson . fromJson ( temp . toJson ( ) , JsonElement . class ) ) ; } JsonObject summary = new JsonObject ( ) ; summary . add ( "testMethodsSummary" , getReportSummaryCounts ( testObjects ) ) ; summary . add ( "configurationMethodsSummary" , getReportSummaryCounts ( configObjects ) ) ; JsonElement reportMetadata = gson . fromJson ( ReporterConfigMetadata . toJsonAsString ( ) , JsonElement . class ) ; JsonObject reporter = new JsonObject ( ) ; reporter . add ( "reportSummary" , summary ) ; reporter . add ( "testMethods" , testObjects ) ; reporter . add ( "configurationMethods" , configObjects ) ; reporter . add ( "configSummary" , generateConfigSummary ( ) ) ; reporter . add ( "localConfigSummary" , testJsonLocalConfigSummary ) ; reporter . add ( "reporterMetadata" , reportMetadata ) ; logger . exiting ( reporter ) ; return reporter ;
public class SoapServerFaultResponseActionBuilder { /** * Sets the attachment with content resource . * @ param contentId * @ param contentType * @ param contentResource * @ return */ public SoapServerFaultResponseActionBuilder attachment ( String contentId , String contentType , Resource contentResource ) { } }
return attachment ( contentId , contentType , contentResource , FileUtils . getDefaultCharset ( ) ) ;
public class Resolve { /** * Main overload resolution routine . On each overload resolution step , a * lookup helper class is used to perform the method / constructor lookup ; * at the end of the lookup , the helper is used to validate the results * ( this last step might trigger overload resolution diagnostics ) . */ Symbol lookupMethod ( Env < AttrContext > env , DiagnosticPosition pos , Symbol location , MethodCheck methodCheck , LookupHelper lookupHelper ) { } }
MethodResolutionContext resolveContext = new MethodResolutionContext ( ) ; resolveContext . methodCheck = methodCheck ; return lookupMethod ( env , pos , location , resolveContext , lookupHelper ) ;
public class HandlerHolder { /** * Parse the topic specifications into the appropriate lists . * @ param topics * the topics the handler is interested in */ private void populateTopics ( String [ ] topics ) { } }
for ( String t : topics ) { // Clean up leading and trailing white space as appropriate t = t . trim ( ) ; // Ignore topics that start or end with a ' / ' if ( t . startsWith ( "/" ) || t . endsWith ( "/" ) || t . contains ( "//" ) || t . isEmpty ( ) ) { continue ; } // Validate subscribe permission per section 113.10.2 checkTopicSubscribePermission ( t ) ; if ( t . equals ( "*" ) ) { wildcardTopics . add ( "" ) ; } else if ( t . endsWith ( "/*" ) ) { wildcardTopics . add ( t . substring ( 0 , t . length ( ) - 1 ) ) ; } else { discreteTopics . add ( t ) ; } }
public class DigestUtils { /** * Computes hex encoded SHA512 digest . * @ param data data to be hashed * @ return sha - 512 hash */ public static String sha512 ( final String data ) { } }
return digest ( MessageDigestAlgorithms . SHA_512 , data . getBytes ( StandardCharsets . UTF_8 ) ) ;
public class SipExtension { /** * { @ inheritDoc } */ @ Override public void initialize ( ExtensionContext context ) { } }
// final boolean registerRuntimeOnly = context . isRuntimeOnlyRegistrationValid ( ) ; final SubsystemRegistration subsystem = context . registerSubsystem ( SUBSYSTEM_NAME , ModelVersion . create ( MANAGEMENT_API_MAJOR_VERSION , MANAGEMENT_API_MINOR_VERSION ) ) ; final ManagementResourceRegistration registration = subsystem . registerSubsystemModel ( SipDefinition . INSTANCE ) ; registration . registerOperationHandler ( GenericSubsystemDescribeHandler . DEFINITION , GenericSubsystemDescribeHandler . INSTANCE ) ; // final ManagementResourceRegistration registration = // subsystem . registerSubsystemModel ( SipSubsystemDescriptionProviders . SUBSYSTEM ) ; // registration . registerOperationHandler ( ADD , SipSubsystemAdd . INSTANCE , SipSubsystemAdd . INSTANCE , false ) ; // registration . registerOperationHandler ( DESCRIBE , SipSubsystemDescribe . INSTANCE , SipSubsystemDescribe . INSTANCE , false , // OperationEntry . EntryType . PRIVATE ) ; // registration . registerOperationHandler ( REMOVE , ReloadRequiredRemoveStepHandler . INSTANCE , // SipSubsystemDescriptionProviders . SUBSYSTEM _ REMOVE , false ) ; subsystem . registerXMLElementWriter ( SipSubsystemParser . getInstance ( ) ) ; // connectors final ManagementResourceRegistration connectors = registration . registerSubModel ( SipConnectorDefinition . INSTANCE ) ; // final ManagementResourceRegistration connectors = registration . registerSubModel ( CONNECTOR _ PATH , // SipSubsystemDescriptionProviders . CONNECTOR ) ; // connectors . registerOperationHandler ( ADD , SipConnectorAdd . INSTANCE , SipConnectorAdd . INSTANCE , false ) ; // connectors . registerOperationHandler ( REMOVE , SipConnectorRemove . INSTANCE , SipConnectorRemove . INSTANCE , false ) ; // if ( registerRuntimeOnly ) { // for ( final String attributeName : SipConnectorMetrics . ATTRIBUTES ) { // connectors . registerMetric ( attributeName , SipConnectorMetrics . INSTANCE ) ; // connectors . registerReadWriteAttribute ( Constants . PROTOCOL , null , new // WriteAttributeHandlers . StringLengthValidatingHandler ( 1 , true ) , Storage . CONFIGURATION ) ; // connectors . registerReadWriteAttribute ( Constants . SCHEME , null , new // WriteAttributeHandlers . StringLengthValidatingHandler ( 1 , true ) , Storage . CONFIGURATION ) ; // connectors . registerReadWriteAttribute ( Constants . SOCKET _ BINDING , null , new // WriteAttributeHandlers . StringLengthValidatingHandler ( 1 ) , Storage . CONFIGURATION ) ; // connectors . registerReadWriteAttribute ( Constants . ENABLED , null , new // WriteAttributeHandlers . ModelTypeValidatingHandler ( ModelType . BOOLEAN , true ) , Storage . CONFIGURATION ) ; // deployment final ManagementResourceRegistration deployments = subsystem . registerDeploymentModel ( SipDeploymentDefinition . INSTANCE ) ; deployments . registerSubModel ( SipDeploymentServletDefinition . INSTANCE ) ; // if ( registerRuntimeOnly ) { // final ManagementResourceRegistration deployments = // subsystem . registerDeploymentModel ( SipSubsystemDescriptionProviders . DEPLOYMENT ) ; // final ManagementResourceRegistration servlets = deployments . registerSubModel ( PathElement . pathElement ( " servlet " ) , // SipSubsystemDescriptionProviders . SERVLET ) ; // ServletDeploymentStats . register ( servlets ) ;
public class ApiOvhHostingweb { /** * Request specific operation for your hosting * REST : POST / hosting / web / { serviceName } / request * @ param action [ required ] Action you want to request * @ param serviceName [ required ] The internal name of your hosting */ public OvhTask serviceName_request_POST ( String serviceName , OvhRequestActionEnum action ) throws IOException { } }
String qPath = "/hosting/web/{serviceName}/request" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "action" , action ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class ExprCfgParserImpl { /** * term : : = simple - term * : : = term ' [ ' expr ' ] ' * : : = term . identifier */ private ExprCfg parseTerm ( ) { } }
ExprCfg term = parseSimpleTerm ( ) ; while ( true ) { ExprToken token = scanToken ( ) ; switch ( token ) { /* case LBRACE : ExprCfg expr = parseExpr ( ) ; token = scanToken ( ) ; if ( token ! = ExprToken . RBRACE ) { throw error ( L . l ( " Expected ' ] ' at { 0 } . All open array braces must have matching closing brace . " , token ) ) ; term = term . createField ( expr ) ; break ; */ case LPAREN : { ExprCfg [ ] args = parseArgs ( ) ; ExprCfg expr = term . createMethod ( args ) ; if ( expr == null ) throw error ( L . l ( "Method call not supported in this context `{0}'." , term ) ) ; term = expr ; break ; } case DOT : { int ch = skipWhitespace ( read ( ) ) ; if ( ! Character . isJavaIdentifierStart ( ( char ) ch ) ) throw error ( L . l ( "Expected `]' at {0}. Field references must be identifiers." , badChar ( ch ) ) ) ; String field = readName ( ch ) ; term = term . createField ( field ) ; break ; } /* case Expr . NOT : { if ( Expr . NOT = = token & & term ! = null & & term . isConstant ( ) ) throw new ELParseException ( L . l ( " invalid expression ` { 0 } ' " , _ string ) ) ; unreadToken ( ) ; return term ; */ default : unreadToken ( ) ; return term ; } }
public class ContentSpecParser { /** * Processes a line that represents the Global Options for the Content Specification . * @ param parserData * @ param line The line to be processed . * @ return True if the line was processed without errors , otherwise false . */ protected boolean parseGlobalOptionsLine ( final ParserData parserData , final String line , int lineNumber ) throws ParsingException { } }
// Read in the variables from the line final HashMap < ParserType , String [ ] > variableMap = getLineVariables ( parserData , line , lineNumber , '[' , ']' , ',' , false ) ; // Check the read in values are valid if ( ( variableMap . size ( ) > 1 && variableMap . containsKey ( ParserType . NONE ) ) || ( variableMap . size ( ) > 0 && ! variableMap . containsKey ( ParserType . NONE ) ) ) { throw new ParsingException ( format ( ProcessorConstants . ERROR_RELATIONSHIP_BASE_LEVEL_MSG , lineNumber , line ) ) ; } String [ ] variables = variableMap . get ( ParserType . NONE ) ; // Check that some options were found , if so then parse them if ( variables . length > 0 ) { addOptions ( parserData , parserData . getCurrentLevel ( ) , variables , 0 , line , lineNumber ) ; } else { log . warn ( format ( ProcessorConstants . WARN_EMPTY_BRACKETS_MSG , lineNumber ) ) ; } return true ;
public class Story { /** * Change the current position of the story to the given path . From here you can * call Continue ( ) to evaluate the next line . * The path String is a dot - separated path as used ly by the engine . These * examples should work : * myKnot myKnot . myStitch * Note however that this won ' t necessarily work : * myKnot . myStitch . myLabelledChoice * . . . because of the way that content is nested within a weave structure . * By default this will reset the callstack beforehand , which means that any * tunnels , threads or functions you were in at the time of calling will be * discarded . This is different from the behaviour of ChooseChoiceIndex , which * will always keep the callstack , since the choices are known to come from the * correct state , and known their source thread . * You have the option of passing false to the resetCallstack parameter if you * don ' t want this behaviour , and will leave any active threads , tunnels or * function calls in - tact . * This is potentially dangerous ! If you ' re in the middle of a tunnel , it ' ll * redirect only the inner - most tunnel , meaning that when you tunnel - return * using ' - & gt ; - & gt ; - & gt ; ' , it ' ll return to where you were before . This may be * what you want though . However , if you ' re in the middle of a function , * ChoosePathString will throw an exception . * @ param path * A dot - separted path string , as specified above . * @ param resetCallstack * Whether to reset the callstack first ( see summary description ) . * @ param arguments * Optional set of arguments to pass , if path is to a knot that takes * them . */ public void choosePathString ( String path , boolean resetCallstack , Object [ ] arguments ) throws Exception { } }
ifAsyncWeCant ( "call ChoosePathString right now" ) ; if ( resetCallstack ) { resetCallstack ( ) ; } else { // ChoosePathString is potentially dangerous since you can call it when the // stack is // pretty much in any state . Let ' s catch one of the worst offenders . if ( state . getCallStack ( ) . getCurrentElement ( ) . type == PushPopType . Function ) { String funcDetail = "" ; Container container = state . getCallStack ( ) . getCurrentElement ( ) . currentPointer . container ; if ( container != null ) { funcDetail = "(" + container . getPath ( ) . toString ( ) + ") " ; } throw new Exception ( "Story was running a function " + funcDetail + "when you called ChoosePathString(" + path + ") - this is almost certainly not not what you want! Full stack trace: \n" + state . getCallStack ( ) . getCallStackTrace ( ) ) ; } } state . passArgumentsToEvaluationStack ( arguments ) ; choosePath ( new Path ( path ) ) ;
public class AbstractLoginServlet { /** * 返回一个不包含contextPath的请求路径 , 如 : < code > / ssoclient / login < / code > */ protected String parseRequestUriWithoutContextPath ( HttpServletRequest req ) { } }
String requestUri = req . getRequestURI ( ) ; String contextPath = req . getContextPath ( ) ; requestUri = requestUri . substring ( contextPath . length ( ) ) ; if ( requestUri . startsWith ( "/" ) ) { return requestUri ; } else { return "/" + requestUri ; }
public class CmsWorkplaceUserInfoEntry { /** * Returns the class type . < p > * @ return the class type */ public Class < ? > getClassType ( ) { } }
if ( m_type == null ) { return String . class ; } try { return Class . forName ( m_type ) ; } catch ( ClassNotFoundException e ) { return String . class ; }
public class Interpreter { /** * Get a line of a source file by its location . * @ param file * @ param line * @ param sep * @ return */ public String getSourceLine ( File file , int line , String sep ) { } }
SourceFile source = sourceFiles . get ( file ) ; if ( source == null ) { try { source = new SourceFile ( file ) ; sourceFiles . put ( file , source ) ; } catch ( IOException e ) { return "Cannot open source file: " + file ; } } return line + sep + source . getLine ( line ) ;
public class Downloader { /** * Retrieves a file from a given URL and returns the contents . * @ param url the URL of the file to download * @ param useProxy whether to use the configured proxy when downloading * files * @ return the content of the file * @ throws DownloadFailedException is thrown if there is an error * downloading the file */ public String fetchContent ( URL url , boolean useProxy ) throws DownloadFailedException { } }
try ( HttpResourceConnection conn = new HttpResourceConnection ( settings , useProxy ) ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ) { final InputStream in = conn . fetch ( url ) ; IOUtils . copy ( in , out ) ; return out . toString ( UTF8 ) ; } catch ( IOException ex ) { final String msg = format ( "Download failed, unable to retrieve '%s'" , url . toString ( ) ) ; throw new DownloadFailedException ( msg , ex ) ; }