signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class IfcLightIntensityDistributionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcLightDistributionData > getDistributionData ( ) { } } | return ( EList < IfcLightDistributionData > ) eGet ( Ifc4Package . Literals . IFC_LIGHT_INTENSITY_DISTRIBUTION__DISTRIBUTION_DATA , true ) ; |
public class MediaSource { /** * Get ( optional ) rotation from resource
* @ param mediaRequest Media request
* @ param mediaHandlerConfig Media handler config
* @ return Rotation value or null if not set or invalid */
@ SuppressWarnings ( "null" ) protected final @ Nullable Integer getMediaRotation ( @ NotNull MediaRequest mediaRequest , @ NotNull MediaHandlerConfig mediaHandlerConfig ) { } } | if ( mediaRequest . getResource ( ) != null ) { String rotationProperty = getMediaRotationProperty ( mediaRequest , mediaHandlerConfig ) ; String stringValue = mediaRequest . getResource ( ) . getValueMap ( ) . get ( rotationProperty , String . class ) ; if ( StringUtils . isNotEmpty ( stringValue ) ) { int rotationValue = NumberUtils . toInt ( stringValue ) ; if ( TransformedRenditionHandler . isValidRotation ( rotationValue ) ) { return rotationValue ; } } } return null ; |
public class CmsToolBar { /** * Initializes the toolbar . < p >
* @ param appId the app id
* @ param context the app UI context */
protected void init ( String appId , I_CmsAppUIContext context ) { } } | m_dialogContext = new ToolbarContext ( appId ) ; m_appContext = context ; initContextMenu ( ) ; m_itemsRight . addComponent ( m_quickLaunchDropDown ) ; m_itemsRight . addComponent ( m_userDropDown ) ; enableDefaultButtons ( true ) ; |
public class BundleMaker { /** * Creates a new Key and instantiates the BundleInterface .
* @ param _ names List of Names representing StaticSources
* @ param _ bundleclass The Class to be instantiated
* @ return the Key to a Bundle
* @ throws EFapsException on error */
private static String createNewKey ( final List < String > _names , final Class < ? > _bundleclass ) throws EFapsException { } } | final StringBuilder builder = new StringBuilder ( ) ; final List < String > oids = new ArrayList < > ( ) ; String ret = null ; try { for ( final String name : _names ) { if ( builder . length ( ) > 0 ) { builder . append ( "-" ) ; } final Cache < String , StaticCompiledSource > cache = InfinispanCache . get ( ) . < String , StaticCompiledSource > getIgnReCache ( BundleMaker . NAMECACHE ) ; if ( ! cache . containsKey ( name ) ) { final QueryBuilder queryBldr = new QueryBuilder ( CIAdminProgram . StaticCompiled ) ; queryBldr . addWhereAttrEqValue ( CIAdminProgram . StaticCompiled . Name , name ) ; final MultiPrintQuery multi = queryBldr . getPrint ( ) ; multi . addAttribute ( CIAdminProgram . StaticCompiled . Name ) ; multi . execute ( ) ; while ( multi . next ( ) ) { final String statName = multi . < String > getAttribute ( CIAdminProgram . StaticCompiled . Name ) ; final StaticCompiledSource source = new StaticCompiledSource ( multi . getCurrentInstance ( ) . getOid ( ) , statName ) ; cache . put ( source . getName ( ) , source ) ; } } if ( cache . containsKey ( name ) ) { final String oid = cache . get ( name ) . getOid ( ) ; builder . append ( oid ) ; oids . add ( oid ) ; } } ret = builder . toString ( ) ; final BundleInterface bundle = ( BundleInterface ) _bundleclass . newInstance ( ) ; bundle . setKey ( ret , oids ) ; final Cache < String , BundleInterface > cache = InfinispanCache . get ( ) . < String , BundleInterface > getIgnReCache ( BundleMaker . CACHE4BUNDLE ) ; cache . put ( ret , bundle ) ; } catch ( final InstantiationException e ) { throw new EFapsException ( BundleMaker . class , "createNewKey.InstantiationException" , e , _bundleclass ) ; } catch ( final IllegalAccessException e ) { throw new EFapsException ( BundleMaker . class , "createNewKey.IllegalAccessException" , e , _bundleclass ) ; } return ret ; |
public class UniverseApi { /** * Get character races Get a list of character races - - - This route expires
* daily at 11:05
* @ param acceptLanguage
* Language to use in the response ( optional , default to en - us )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param language
* Language to use in the response , takes precedence over
* Accept - Language ( optional , default to en - us )
* @ return List & lt ; RacesResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < RacesResponse > getUniverseRaces ( String acceptLanguage , String datasource , String ifNoneMatch , String language ) throws ApiException { } } | ApiResponse < List < RacesResponse > > resp = getUniverseRacesWithHttpInfo ( acceptLanguage , datasource , ifNoneMatch , language ) ; return resp . getData ( ) ; |
public class ThemeManager { /** * Remap themes . < br / >
* < br / >
* Example , you can remap { @ link # LIGHT } theme on
* { @ link R . style # Holo _ Theme _ Dialog _ Light } : < br / >
* < pre >
* ThemeManager . map ( { @ link # LIGHT } , { @ link R . style # Holo _ Theme _ Dialog _ Light } ) ;
* < / pre >
* If theme value negative - remove pair flags - theme */
public static void map ( int flags , int theme ) { } } | if ( theme > 0 ) { _THEMES_MAP . put ( flags & _THEME_MASK , theme ) ; } else { final int i = _THEMES_MAP . indexOfKey ( flags & _THEME_MASK ) ; if ( i > 0 ) { _THEMES_MAP . removeAt ( i ) ; } } |
public class IFixCompareCommandTask { /** * This method will look in the wlp / lib / versions / aparIds . zip file for a file
* named aparIds . csv which it will then read to obtain a set of APARs that
* are included in this Fix Pack .
* @ param installLocation
* The location of the installation file to get the APAR IDs
* from , it can be either an archive file ( zip or jar ) or an
* extracted installation . The file must exist
* @ param context
* @ param console
* @ return A set included all of the APAR IDs that are included in this fix
* pack . Will be empty if no APARs are found .
* @ throws IllegalArgumentException
* if the < code > installLocation < / code > is neither a directory or
* archive or the aparId zip is not valid
* @ throws IOException
* If something goes wrong reading the zip file
* @ throws ZipException
* If something goes wrong reading the zip file */
private Set < String > findFixPackAparIds ( File installLocation , CommandConsole console , ExecutionContext context ) throws IllegalArgumentException , ZipException , IOException { } } | // First need to work out what type of installation we have , is it an
// archive or extracted ?
Set < String > fixPackAparIds ; if ( installLocation . isDirectory ( ) ) { // Extracted
fixPackAparIds = readAparCsvFromExtractedInstall ( installLocation ) ; InstalledIFixInformation installedIFixes = findInstalledIFixes ( installLocation , console , context . optionExists ( VERBOSE_OPTION ) ) ; for ( String apar : installedIFixes . validIFixes . keySet ( ) ) { fixPackAparIds . add ( apar ) ; } } else { String fileName = installLocation . getName ( ) ; if ( ! FileUtils . matchesFileExtension ( ".jar" , fileName ) && ! FileUtils . matchesFileExtension ( ".zip" , fileName ) ) { // We have a file that isn ' t an archive , can ' t proceed so return
// an error message
throw new IllegalArgumentException ( getMessage ( "compare.install.not.zip.or.dir" , installLocation . getAbsolutePath ( ) ) ) ; } // Archive
fixPackAparIds = readAparCsvFromArchiveInstall ( installLocation , console ) ; } return fixPackAparIds ; |
public class MtasSpanUniquePositionSpans { /** * Find matches .
* @ return true , if successful
* @ throws IOException Signals that an I / O exception has occurred . */
private boolean findMatches ( ) throws IOException { } } | // check for something in queue of matches
if ( ! queueMatches . isEmpty ( ) ) { return true ; } else { while ( true ) { // try to get something in queue of spans
if ( queueSpans . isEmpty ( ) && ! collectSpan ( ) ) { return false ; } // try to get matches with first span in queue
Match firstMatch = queueSpans . get ( 0 ) ; queueSpans . remove ( 0 ) ; // create a list of matches with same startposition as firstMatch
List < Match > matches = new ArrayList < > ( ) ; matches . add ( firstMatch ) ; // try to collect spans until lastStartPosition not equal to
// startposition of firstMatch
while ( ! lastSpan && ( lastStartPosition == firstMatch . startPosition ( ) ) ) { collectSpan ( ) ; } while ( ! queueSpans . isEmpty ( ) && ( queueSpans . get ( 0 ) . startPosition ( ) == firstMatch . startPosition ( ) ) ) { matches . add ( queueSpans . get ( 0 ) ) ; queueSpans . remove ( 0 ) ; } // construct all matches for this startposition
for ( Match match : matches ) { // only unique spans
if ( ! queueMatches . contains ( match ) ) { queueMatches . add ( match ) ; } } // check for something in queue of matches
if ( ! queueMatches . isEmpty ( ) ) { return true ; } } } |
public class DiskSnapshotMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DiskSnapshot diskSnapshot , ProtocolMarshaller protocolMarshaller ) { } } | if ( diskSnapshot == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( diskSnapshot . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getSupportCode ( ) , SUPPORTCODE_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getSizeInGb ( ) , SIZEINGB_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getState ( ) , STATE_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getProgress ( ) , PROGRESS_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getFromDiskName ( ) , FROMDISKNAME_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getFromDiskArn ( ) , FROMDISKARN_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getFromInstanceName ( ) , FROMINSTANCENAME_BINDING ) ; protocolMarshaller . marshall ( diskSnapshot . getFromInstanceArn ( ) , FROMINSTANCEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DnsBatch { /** * A joint callback for both " get change request " and " create change request " operations . */
private RpcBatch . Callback < Change > createChangeRequestCallback ( final String zoneName , final DnsBatchResult < ChangeRequest > result , final boolean nullForNotFound , final boolean idempotent ) { } } | return new RpcBatch . Callback < Change > ( ) { @ Override public void onSuccess ( Change response ) { result . success ( response == null ? null : ChangeRequest . fromPb ( options . getService ( ) , zoneName , response ) ) ; } @ Override public void onFailure ( GoogleJsonError googleJsonError ) { DnsException serviceException = new DnsException ( googleJsonError , idempotent ) ; if ( serviceException . getCode ( ) == HTTP_NOT_FOUND ) { if ( "entity.parameters.changeId" . equals ( serviceException . getLocation ( ) ) || ( serviceException . getMessage ( ) != null && serviceException . getMessage ( ) . contains ( "parameters.changeId" ) ) ) { // the change id was not found , but the zone exists
result . success ( null ) ; return ; } // the zone does not exist , so throw an exception
} result . error ( serviceException ) ; } } ; |
public class SqlBuilderHelper { /** * Generate log for INSERT operations
* @ param method
* the method
* @ param methodBuilder
* the method builder */
public static void generateSQLForStaticQuery ( final SQLiteModelMethod method , MethodSpec . Builder methodBuilder ) { } } | methodBuilder . addComment ( "generate static SQL for statement" ) ; JQLChecker checker = JQLChecker . getInstance ( ) ; // replace the table name , other pieces will be removed
String sql = checker . replace ( method , method . jql , new JQLReplacerListenerImpl ( method ) { @ Override public String onColumnNameToUpdate ( String columnName ) { return onColumnName ( columnName ) ; } @ Override public String onColumnName ( String columnName ) { SQLProperty tempProperty = method . getEntity ( ) . get ( columnName ) ; AssertKripton . assertTrueOrUnknownPropertyInJQLException ( tempProperty != null , method , columnName ) ; return tempProperty . columnName ; } @ Override public String onBindParameter ( String bindParameterName , boolean inStatement ) { return "?" ; } } ) ; methodBuilder . addStatement ( "String _sql=$S" , sql ) ; |
public class StateMachine { /** * Gets a future that completes when the state is no longer { @ code . equals ( ) } to { @ code currentState ) } . */
public ListenableFuture < T > getStateChange ( T currentState ) { } } | checkState ( ! Thread . holdsLock ( lock ) , "Can not wait for state change while holding the lock" ) ; requireNonNull ( currentState , "currentState is null" ) ; synchronized ( lock ) { // return a completed future if the state has already changed , or we are in a terminal state
if ( ! state . equals ( currentState ) || isTerminalState ( state ) ) { return immediateFuture ( state ) ; } return futureStateChange . get ( ) . createNewListener ( ) ; } |
public class Mixin { /** * { @ inheritDoc } */
@ Override public void prepare ( CssFormatter formatter ) { } } | List < Rule > rules = mixins . get ( name ) ; if ( rules != null ) { for ( int i = 0 ; i < rules . size ( ) ; i ++ ) { Rule rule = rules . get ( i ) ; mixins . addAll ( rule . getMixins ( ) ) ; } } |
public class AppClassLoader { /** * This method will define the package using the byteResourceInformation for a class to get the URL for this package to try to load a manifest . If a manifest can ' t be loaded
* from the URL it will create the package with no package versioning or sealing information .
* @ param byteResourceInformation The information about the class file
* @ param packageName The name of the package to create */
@ FFDCIgnore ( value = { } } | IllegalArgumentException . class } ) private void definePackage ( ByteResourceInformation byteResourceInformation , String packageName ) { // If the package is in a JAR then we can load the JAR manifest to see what package definitions it ' s got
Manifest manifest = byteResourceInformation . getManifest ( ) ; try { // The URLClassLoader . definePackage ( ) will NPE with a null manifest so use the other definePackage if we don ' t have a manifest
if ( manifest == null ) { definePackage ( packageName , null , null , null , null , null , null , null ) ; } else { definePackage ( packageName , manifest , byteResourceInformation . getResourceUrl ( ) ) ; } } catch ( IllegalArgumentException e ) { // Ignore , this happens if the package is already defined but it is hard to guard against this in a thread safe way . See :
// http : / / bugs . sun . com / view _ bug . do ? bug _ id = 4841786
} |
public class Pattern { /** * Adds a condition that has to be satisfied by an event
* in order to be considered a match . If another condition has already been
* set , the new one is going to be combined with the previous with a
* logical { @ code AND } . In other case , this is going to be the only
* condition .
* @ param condition The condition as an { @ link IterativeCondition } .
* @ return The pattern with the new condition is set . */
public Pattern < T , F > where ( IterativeCondition < F > condition ) { } } | Preconditions . checkNotNull ( condition , "The condition cannot be null." ) ; ClosureCleaner . clean ( condition , true ) ; if ( this . condition == null ) { this . condition = condition ; } else { this . condition = new RichAndCondition < > ( this . condition , condition ) ; } return this ; |
public class ZmqMainThread { private void disconnect ( ZMQ . Socket socket , String eventName ) { } } | String endpoint = getConnectedEndPoint ( eventName ) ; if ( endpoint != null ) { EventList eventList = connectedMap . get ( endpoint ) ; if ( eventList != null ) { socket . unsubscribe ( eventName . getBytes ( ) ) ; traceZmqSubscription ( eventName , false ) ; eventList . remove ( eventName ) ; if ( eventList . size ( ) == 0 ) { socket . disconnect ( endpoint ) ; connectedMap . remove ( endpoint ) ; } } } |
public class SipStandardContextValve { /** * Report a " not found " error for the specified resource . FIXME : We
* should really be using the error reporting settings for this web
* application , but currently that code runs at the wrapper level rather
* than the context level .
* @ param response The response we are creating */
protected void notFound ( HttpServletResponse response ) { } } | try { response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; } catch ( IllegalStateException e ) { ; } catch ( IOException e ) { ; } |
public class LanguageUtils { /** * Returns the set of all approved translations .
* @ param appid appid name of the { @ link com . erudika . para . core . App }
* @ param langCode the 2 - letter language code
* @ return a set of keys for approved translations */
public Set < String > getApprovedTransKeys ( String appid , String langCode ) { } } | HashSet < String > approvedTransKeys = new HashSet < > ( ) ; if ( StringUtils . isBlank ( langCode ) ) { return approvedTransKeys ; } for ( Map . Entry < String , String > entry : readLanguage ( appid , langCode ) . entrySet ( ) ) { if ( ! getDefaultLanguage ( appid ) . get ( entry . getKey ( ) ) . equals ( entry . getValue ( ) ) ) { approvedTransKeys . add ( entry . getKey ( ) ) ; } } return approvedTransKeys ; |
public class GenericsUtils { /** * variables could also contain variables , e . g . < T , K extends List < T > > */
private static void recordVariable ( final TypeVariable var , final List < TypeVariable > found ) { } } | // prevent cycles
if ( ! found . contains ( var ) ) { found . add ( var ) ; for ( Type type : var . getBounds ( ) ) { findVariables ( type , found ) ; } } |
public class AgentManager { /** * Change state if connected call was terminated .
* @ param event */
void handleAgentCompleteEvent ( AgentCompleteEvent event ) { } } | AsteriskAgentImpl agent = getAgentByAgentId ( event . getChannel ( ) ) ; if ( agent == null ) { logger . error ( "Ignored AgentCompleteEvent for unknown agent " + event . getChannel ( ) ) ; return ; } agent . updateState ( AgentState . AGENT_IDLE ) ; |
public class Database { /** * Inserts the result of the migration into the migration table
* @ param migration the migration that was executed
* @ param wasSuccessful indicates if the migration was successful or not */
private void logMigration ( DbMigration migration , boolean wasSuccessful ) { } } | BoundStatement boundStatement = logMigrationStatement . bind ( wasSuccessful , migration . getVersion ( ) , migration . getScriptName ( ) , migration . getMigrationScript ( ) , new Date ( ) ) ; session . execute ( boundStatement ) ; |
public class LByteConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static LByteConsumer byteConsumerFrom ( Consumer < LByteConsumerBuilder > buildingFunction ) { } } | LByteConsumerBuilder builder = new LByteConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class DeleteEndpointConfigRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteEndpointConfigRequest deleteEndpointConfigRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteEndpointConfigRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteEndpointConfigRequest . getEndpointConfigName ( ) , ENDPOINTCONFIGNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StandardJdbcProcessor { /** * A batch prepared statement is capable of carrying out a batch of updates in one go
* The statement or stored procedure must have been constructed in a such a way that it can handle a batch update
* @ param statement declaration of the prepared statement
* @ param inParamsBatch a collection of arrays of parameters to feed the stored procedure
* @ param isCallable must be true if the statement should be regarded as a stored procedure instead of an SQL statment
* @ return an array containing the number of affected rows per statement
* @ throws SQLException if execution fails */
public int [ ] executeBatchPreparedStatement ( String statement , Collection inParamsBatch , boolean isCallable ) throws SQLException { } } | Connection conn = null ; PreparedStatement ps = null ; try { // read only connections not supported
conn = dataSource . getConnection ( ) ; conn . setAutoCommit ( false ) ; if ( isCallable ) { ps = conn . prepareCall ( statement ) ; } else { ps = conn . prepareStatement ( statement ) ; } Iterator x = inParamsBatch . iterator ( ) ; System . out . println ( new LogEntry ( "Executing:" + statement + " for " + inParamsBatch . size ( ) + " input rows" ) ) ; while ( x . hasNext ( ) ) { Object [ ] inParams = ( Object [ ] ) x . next ( ) ; for ( int i = 0 ; i < ( inParams . length ) ; i ++ ) { try { if ( inParams [ i ] instanceof JdbcNullObject ) { ps . setNull ( i + 1 , ( ( JdbcNullObject ) inParams [ i ] ) . getType ( ) ) ; } else { ps . setObject ( i + 1 , inParams [ i ] ) ; } } catch ( SQLException e ) { System . out . println ( new LogEntry ( e ) ) ; throw new SQLException ( "Error while binding object (nr " + i + ": [" + inParams [ i ] + "]) to stored procedure." , "SP:" + statement + "\n" + "OBJECT:" + inParams [ i ] ) ; } } ps . addBatch ( ) ; } int [ ] i = ps . executeBatch ( ) ; // TODO what if execution wasn ' t successful for all statements ?
conn . commit ( ) ; return i ; } catch ( SQLException e ) { StringBuffer sb = new StringBuffer ( e . getMessage ( ) + '(' + statement + ")\n\n" ) ; Iterator x = inParamsBatch . iterator ( ) ; while ( x . hasNext ( ) ) { sb . append ( '{' ) ; Object [ ] inParams = ( Object [ ] ) x . next ( ) ; for ( int i = 0 ; i < inParams . length ; i ++ ) { sb . append ( ( i != 0 ? ", " : "" ) + ( inParams [ i ] instanceof String ? "'" : "" ) + inParams [ i ] + ( inParams [ i ] instanceof String ? "'" : "" ) ) ; } sb . append ( "}\n" ) ; } System . out . println ( new LogEntry ( Level . CRITICAL , "batch processing of " + statement + " failed" , sb ) ) ; throw new SQLException ( e . getMessage ( ) + '(' + statement + ')' ) ; } finally { if ( ps != null ) { ps . close ( ) ; } if ( conn != null ) { conn . close ( ) ; } } |
public class BsRelatedContentCB { public RelatedContentCB acceptPK ( String id ) { } } | assertObjectNotNull ( "id" , id ) ; BsRelatedContentCB cb = this ; cb . query ( ) . docMeta ( ) . setId_Equal ( id ) ; return ( RelatedContentCB ) this ; |
public class ContextManager { /** * Tears down the contexts . If an exception is thrown by a { @ link SetupAction } it is wrapped and re - thrown after all
* { @ link SetupAction # teardown ( java . util . Map ) } methods have been called .
* Contexts are torn down in the opposite order to which they are set up ( i . e . the first context set up is the last to be
* torn down ) .
* If more than one teardown ( ) method thrown an exception then only the first is propagated . */
void teardown ( final Map < String , Object > properties ) { } } | Throwable exceptionToThrow = null ; final ListIterator < SetupAction > itr = setupActions . listIterator ( setupActions . size ( ) ) ; while ( itr . hasPrevious ( ) ) { final SetupAction action = itr . previous ( ) ; try { action . teardown ( properties ) ; } catch ( Throwable e ) { if ( exceptionToThrow == null ) { exceptionToThrow = e ; } } } if ( exceptionToThrow != null ) { throw new RuntimeException ( exceptionToThrow ) ; } |
public class MockiEbean { /** * Run the test callable restoring the original EbeanServer afterwards . */
public < V > V run ( Callable < V > testCallable ) throws Exception { } } | try { beforeRun ( ) ; return testCallable . call ( ) ; } finally { afterRun ( ) ; restoreOriginal ( ) ; } |
public class XslTransformer { /** * Get a wrapped XSL transformer instance for supplied stylesheet file .
* @ param xslFile XSL file
* @ return XSL transformer instance
* @ throws TransformerConfigurationException if an error occurs while processing */
public static XslTransformer getTransformer ( File xslFile ) throws TransformerConfigurationException { } } | if ( xslFile == null ) { throw new IllegalArgumentException ( "xslFile is null" ) ; } return getTransformer ( new StreamSource ( xslFile ) ) ; |
public class AsmInvokeDistributeFactory { /** * 构建 { @ link InvokeDistribute # invoke ( String , String , String , Object [ ] ) invoke } 方法
* @ param cw ClassWriter
* @ param className 生成的类名
* @ param parentClass 父类 */
private static void buildMethod ( ClassWriter cw , String className , Class < ? > parentClass ) { } } | /* Build method */
MethodVisitor mv = cw . visitMethod ( ACC_PUBLIC , // public method
DISTRIBUTE_METHOD_NAME , // name
DISTRIBUTE_METHOD_DESC , // descriptor
null , // signature ( null means not generic )
CollectionUtil . array ( convert ( NoSuchMethodException . class ) ) ) ; // exceptions ( array of strings )
// 开始方法区
mv . visitCode ( ) ; // 判断要调用那个方法 , 然后将动态调用转化为对应的本地调用
{ List < Method > allMethod = ReflectUtil . getAllMethod ( parentClass ) ; Label next = new Label ( ) ; Label start = new Label ( ) ; for ( Method method : allMethod ) { // 只处理非静态的public方法和protected方法
if ( Modifier . isStatic ( method . getModifiers ( ) ) || ( ! AccessorUtil . isPublic ( method ) && ! AccessorUtil . isProtected ( method ) ) ) { continue ; } createIf ( mv , method , next , start , className , parentClass ) ; start = next ; next = new Label ( ) ; } // 结束位置标记
mv . visitLabel ( start ) ; mv . visitFrame ( F_SAME , 0 , null , 0 , null ) ; } // throw new NoSuchMethodException ( String . format ( " method [ % s : % s : % s ] not found " , owner , methodName , desc ) ) ;
{ // 默认抛出Error , 不应该有这种情况
mv . visitTypeInsn ( NEW , convert ( NoSuchMethodException . class ) ) ; mv . visitInsn ( DUP ) ; mv . visitLdcInsn ( "method [%s:%s:%s] not found" ) ; mv . visitInsn ( ICONST_3 ) ; mv . visitTypeInsn ( ANEWARRAY , convert ( Object . class ) ) ; mv . visitInsn ( DUP ) ; mv . visitInsn ( ICONST_0 ) ; mv . visitVarInsn ( ALOAD , 1 ) ; mv . visitInsn ( AASTORE ) ; mv . visitInsn ( DUP ) ; mv . visitInsn ( ICONST_1 ) ; mv . visitVarInsn ( ALOAD , 2 ) ; mv . visitInsn ( AASTORE ) ; mv . visitInsn ( DUP ) ; mv . visitInsn ( ICONST_2 ) ; mv . visitVarInsn ( ALOAD , 3 ) ; mv . visitInsn ( AASTORE ) ; mv . visitMethodInsn ( INVOKESTATIC , convert ( String . class ) , MethodConst . FORMAT_METHOD . getName ( ) , getMethodDesc ( MethodConst . FORMAT_METHOD ) , false ) ; mv . visitMethodInsn ( INVOKESPECIAL , convert ( NoSuchMethodException . class ) , INIT , getConstructorDesc ( ERROR_CONSTRUCTOR ) , false ) ; mv . visitInsn ( ATHROW ) ; mv . visitMaxs ( 0 , 0 ) ; } mv . visitEnd ( ) ; |
public class StructrCMISService { /** * - - - - - interface PolicyService - - - - - */
@ Override public void applyPolicy ( String repositoryId , String policyId , String objectId , ExtensionsData extension ) { } } | policyService . applyPolicy ( repositoryId , policyId , objectId , extension ) ; |
public class ReportTaskProgressRequest { /** * Key - value pairs that define the properties of the ReportTaskProgressInput object .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFields ( java . util . Collection ) } or { @ link # withFields ( java . util . Collection ) } if you want to override the
* existing values .
* @ param fields
* Key - value pairs that define the properties of the ReportTaskProgressInput object .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ReportTaskProgressRequest withFields ( Field ... fields ) { } } | if ( this . fields == null ) { setFields ( new com . amazonaws . internal . SdkInternalList < Field > ( fields . length ) ) ; } for ( Field ele : fields ) { this . fields . add ( ele ) ; } return this ; |
public class EventBus { /** * Post an event to the bus . All subscribers to the event class type posted will be notified .
* @ param event to post to subscribers */
public void post ( Notification event ) { } } | for ( Map . Entry < Object , List < SubscriberMethod > > entry : listeners . entrySet ( ) ) { for ( SubscriberMethod method : entry . getValue ( ) ) { if ( method . eventTypeToInvokeOn . isInstance ( event ) ) { try { method . methodToInvokeOnEvent . invoke ( entry . getKey ( ) , event ) ; } catch ( InvocationTargetException e ) { // We log this exception and swallow it because we need to ensure we don ' t
// prevent completion of notifications if one listener is badly behaved and
// throws an exception of some kind .
LOGGER . log ( Level . SEVERE , "Subscriber invocation failed for method \"" + method . toString ( ) + "\"" , e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( EventBus . class . getName ( ) + " could not access " + "subscriber " + method . toString ( ) , e ) ; } } } } |
public class ImageName { /** * Get the full name of this image like { @ link # getFullName ( String ) } does , but allow
* an optional registry . This registry is used when this image does not already
* contain a registry . If no tag was provided in the initial name , < code > latest < / code > is used .
* @ param optionalRegistry optional registry to use when this image does not provide
* a registry . Can be null in which case no optional registry is used *
* @ return full name with original registry ( if set ) or optional registry ( if not < code > null < / code > ) . */
public String getFullName ( String optionalRegistry ) { } } | String fullName = getNameWithoutTag ( optionalRegistry ) ; if ( tag != null ) { fullName = fullName + ":" + tag ; } if ( digest != null ) { fullName = fullName + "@" + digest ; } return fullName ; |
public class ArrayMap { /** * Remove an existing key from the array map .
* @ param key The key of the mapping to remove .
* @ return Returns the value that was stored under the key , or null if there
* was no such key . */
@ Override public V remove ( Object key ) { } } | int index = key == null ? indexOfNull ( ) : indexOf ( key , key . hashCode ( ) ) ; if ( index >= 0 ) { return removeAt ( index ) ; } return null ; |
public class EbInterfaceWriter { /** * Create a writer builder for Ebi302InvoiceType .
* @ return The builder and never < code > null < / code > */
@ Nonnull public static EbInterfaceWriter < Ebi302InvoiceType > ebInterface302 ( ) { } } | final EbInterfaceWriter < Ebi302InvoiceType > ret = EbInterfaceWriter . create ( Ebi302InvoiceType . class ) ; ret . setNamespaceContext ( EbInterface302NamespaceContext . getInstance ( ) ) ; return ret ; |
public class LazyReact { /** * Generate an infinite FutureStream executing the provided Supplier continually and asynhcronously
* < pre >
* { @ code
* new LazyReact ( ) . generate ( this : : load )
* . limit ( 5)
* . reduce ( SemigroupK . stringConcat ) ;
* Optional [ " data1data2data3data4data5 " ]
* } < / pre >
* @ param s Supplier to execute asynchronously to create an infinite Stream
* @ return Infinite FutureStream */
public < U > FutureStream < U > generateAsync ( final Supplier < U > s ) { } } | return this . constructFutures ( ReactiveSeq . generate ( ( ) -> 1 ) . map ( n -> CompletableFuture . supplyAsync ( s , getExecutor ( ) ) ) ) ; |
public class YearWeek { /** * Returns a copy of this year - week with the specified number of years subtracted .
* If the week of this instance is 53 and the new year does not have 53 weeks ,
* the week will be adjusted to be 52.
* This instance is immutable and unaffected by this method call .
* @ param yearsToSubtract the years to subtract , may be negative
* @ return the year - week with the years subtracted , not null */
public YearWeek minusYears ( long yearsToSubtract ) { } } | if ( yearsToSubtract == 0 ) { return this ; } int newYear = Math . toIntExact ( Math . subtractExact ( year , yearsToSubtract ) ) ; return withYear ( newYear ) ; |
public class JDBC4ResultSetMetaData { /** * Get the designated column ' s specified column size . */
public int getPrecision ( int column ) throws SQLException { } } | sourceResultSet . checkColumnBounds ( column ) ; VoltType type = sourceResultSet . table . getColumnType ( column - 1 ) ; Integer result = type . getTypePrecisionAndRadix ( ) [ 0 ] ; if ( result == null ) { result = 0 ; } return result ; |
public class Headers { /** * Extracts a quoted value from a header that has a given key . For instance if the header is
* content - disposition = form - data ; filename * = " utf - 8 ' ' test . txt "
* and the key is filename * then " test . txt " will be returned after extracting character set and language
* ( following RFC 2231 ) and performing URL decoding to the value using the specified encoding
* @ param header The header
* @ param key The key that identifies the token to extract
* @ return The token , or null if it was not found */
public static String extractQuotedValueFromHeaderWithEncoding ( final String header , final String key ) { } } | String value = extractQuotedValueFromHeader ( header , key ) ; if ( value != null ) { return value ; } value = extractQuotedValueFromHeader ( header , key + "*" ) ; if ( value != null ) { int characterSetDelimiter = value . indexOf ( '\'' ) ; int languageDelimiter = value . lastIndexOf ( '\'' , characterSetDelimiter + 1 ) ; String characterSet = value . substring ( 0 , characterSetDelimiter ) ; try { String fileNameURLEncoded = value . substring ( languageDelimiter + 1 ) ; return URLDecoder . decode ( fileNameURLEncoded , characterSet ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( e ) ; } } return null ; |
public class SubscriptionFilter { /** * Filter method . Checks whether the given item is a consumerDispatcher and matches the
* one associated with this filter
* @ param item - The item on the itemstream
* @ return boolean - True if a match , false otherwise . */
public boolean filterMatches ( AbstractItem item ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "filterMatches" , item ) ; /* Cast the incoming item to a PersistentStoreReferenceStream object . if it is not , an
* exception will be thrown and the match will fail */
SIMPReferenceStream rstream ; if ( item instanceof SIMPReferenceStream ) { rstream = ( SIMPReferenceStream ) item ; // Check for matching consumer dispatchers
if ( rstream == consumerDispatcher . getReferenceStream ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "filterMatches" , Boolean . TRUE ) ; return true ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "filterMatches" , Boolean . FALSE ) ; return false ; |
public class Problems { /** * Get the first problem of the highest severity .
* @ return The most severe problem in this set */
public final Problem getLeadProblem ( ) { } } | Collections . sort ( this . problems ) ; return this . problems . isEmpty ( ) ? null : this . problems . get ( this . problems . size ( ) - 1 ) ; |
public class Util { /** * Deletes the specified { @ code directory } recursively . */
public static void deleteFileTree ( File directory ) throws IOException { } } | if ( directory . exists ( ) ) { Files . walkFileTree ( directory . toPath ( ) , DeletingFileVisitor . INSTANCE ) ; } |
public class Strings { public static String indent ( String str , String space ) { } } | StringBuilder builder ; builder = new StringBuilder ( ) ; for ( String line : Separator . RAW_LINE . split ( str ) ) { builder . append ( space ) ; builder . append ( line ) ; } return builder . toString ( ) ; |
public class ZonedDateTime { /** * Returns a copy of this { @ code ZonedDateTime } with the specified number of days subtracted .
* This operates on the local time - line ,
* { @ link LocalDateTime # minusDays ( long ) subtracting days } to the local date - time .
* This is then converted back to a { @ code ZonedDateTime } , using the zone ID
* to obtain the offset .
* When converting back to { @ code ZonedDateTime } , if the local date - time is in an overlap ,
* then the offset will be retained if possible , otherwise the earlier offset will be used .
* If in a gap , the local date - time will be adjusted forward by the length of the gap .
* This instance is immutable and unaffected by this method call .
* @ param days the days to subtract , may be negative
* @ return a { @ code ZonedDateTime } based on this date - time with the days subtracted , not null
* @ throws DateTimeException if the result exceeds the supported date range */
public ZonedDateTime minusDays ( long days ) { } } | return ( days == Long . MIN_VALUE ? plusDays ( Long . MAX_VALUE ) . plusDays ( 1 ) : plusDays ( - days ) ) ; |
public class GradleDependencyResolutionHelper { /** * Attempt to load the project identifiers ( group : artifact ) for projects that have been included . This method isn ' t
* guaranteed to work all the time since there is no good API that we can use and need to rely on reflection for now .
* @ param project the project reference .
* @ return a collection of " included " project identifiers ( a . k . a . , composite projects ) . */
private static Set < String > getIncludedProjectIdentifiers ( final Project project ) { } } | return getCachedReference ( project , "thorntail_included_project_identifiers" , ( ) -> { Set < String > identifiers = new HashSet < > ( ) ; // Check for included builds as well .
project . getGradle ( ) . getIncludedBuilds ( ) . forEach ( build -> { // Determine if the given reference has the following method definition ,
// org . gradle . internal . build . IncludedBuildState # getAvailableModules ( )
try { Method method = build . getClass ( ) . getMethod ( "getAvailableModules" ) ; Class < ? > retType = method . getReturnType ( ) ; if ( Set . class . isAssignableFrom ( retType ) ) { // We have identified the right method . Get the values out of it .
Set availableModules = ( Set ) method . invoke ( build ) ; for ( Object entry : availableModules ) { Field field = entry . getClass ( ) . getField ( "left" ) ; Object value = field . get ( entry ) ; if ( value instanceof ModuleVersionIdentifier ) { ModuleVersionIdentifier mv = ( ModuleVersionIdentifier ) value ; identifiers . add ( String . format ( "%s:%s:%s" , mv . getGroup ( ) , mv . getName ( ) , mv . getVersion ( ) ) ) ; } else { project . getLogger ( ) . debug ( "Unable to determine field type: {}" , field ) ; } } } else { project . getLogger ( ) . debug ( "Unable to determine method return type: {}" , retType ) ; } } catch ( ReflectiveOperationException e ) { project . getLogger ( ) . debug ( "Unable to determine the included projects." , e ) ; } } ) ; return identifiers ; } ) ; |
public class Installer { /** * Command - line entry point . */
public static void main ( String [ ] args ) { } } | LogConfig . initMinimal ( ) ; try { Distribution dist = new ClassLoaderDistribution ( ) ; InstallOptions opts = null ; if ( args . length == 0 ) { opts = new InstallOptions ( dist ) ; } else { Map < String , String > props = new HashMap < String , String > ( ) ; for ( String file : args ) { props . putAll ( FileUtils . loadMap ( new File ( file ) ) ) ; } opts = new InstallOptions ( dist , props ) ; } // set fedora . home
System . setProperty ( "fedora.home" , opts . getValue ( InstallOptions . FEDORA_HOME ) ) ; new Installer ( dist , opts ) . install ( ) ; } catch ( Exception e ) { printException ( e ) ; System . exit ( 1 ) ; } |
public class HierarchyPathMarshaller { /** * Marshall the given parameter object . */
public void marshall ( HierarchyPath hierarchyPath , ProtocolMarshaller protocolMarshaller ) { } } | if ( hierarchyPath == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( hierarchyPath . getLevelOne ( ) , LEVELONE_BINDING ) ; protocolMarshaller . marshall ( hierarchyPath . getLevelTwo ( ) , LEVELTWO_BINDING ) ; protocolMarshaller . marshall ( hierarchyPath . getLevelThree ( ) , LEVELTHREE_BINDING ) ; protocolMarshaller . marshall ( hierarchyPath . getLevelFour ( ) , LEVELFOUR_BINDING ) ; protocolMarshaller . marshall ( hierarchyPath . getLevelFive ( ) , LEVELFIVE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DescribeJobFlowsRequest { /** * Return only job flows whose state is contained in this list .
* @ param jobFlowStates
* Return only job flows whose state is contained in this list .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see JobFlowExecutionState */
public DescribeJobFlowsRequest withJobFlowStates ( JobFlowExecutionState ... jobFlowStates ) { } } | com . amazonaws . internal . SdkInternalList < String > jobFlowStatesCopy = new com . amazonaws . internal . SdkInternalList < String > ( jobFlowStates . length ) ; for ( JobFlowExecutionState value : jobFlowStates ) { jobFlowStatesCopy . add ( value . toString ( ) ) ; } if ( getJobFlowStates ( ) == null ) { setJobFlowStates ( jobFlowStatesCopy ) ; } else { getJobFlowStates ( ) . addAll ( jobFlowStatesCopy ) ; } return this ; |
public class DefaultFaceletContext { /** * { @ inheritDoc } */
@ Override @ SuppressWarnings ( "unchecked" ) public void putContext ( Class key , Object contextObject ) { } } | _ctx . putContext ( key , contextObject ) ; |
public class SignalManager { /** * Create a signal for the specified constraints .
* @ param viewConstraints The constraints to create a signal for .
* @ throws DatasetException if the signal could not be created . */
public void signalReady ( Constraints viewConstraints ) { } } | try { rootFileSystem . mkdirs ( signalDirectory ) ; } catch ( IOException e ) { throw new DatasetIOException ( "Unable to create signal manager directory: " + signalDirectory , e ) ; } String normalizedConstraints = getNormalizedConstraints ( viewConstraints ) ; Path signalPath = new Path ( signalDirectory , normalizedConstraints ) ; try { // create the output stream to overwrite the current contents , if the directory or file
// exists it will be overwritten to get a new timestamp
FSDataOutputStream os = rootFileSystem . create ( signalPath , true ) ; os . close ( ) ; } catch ( IOException e ) { throw new DatasetIOException ( "Could not access signal path: " + signalPath , e ) ; } |
public class StorePackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getRemoteServiceUpdate ( ) { } } | if ( remoteServiceUpdateEClass == null ) { remoteServiceUpdateEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 84 ) ; } return remoteServiceUpdateEClass ; |
public class Graphics { /** * Draw a line with a gradient between the two points .
* @ param x1
* The starting x position to draw the line
* @ param y1
* The starting y position to draw the line
* @ param red1
* The starting position ' s shade of red
* @ param green1
* The starting position ' s shade of green
* @ param blue1
* The starting position ' s shade of blue
* @ param alpha1
* The starting position ' s alpha value
* @ param x2
* The ending x position to draw the line
* @ param y2
* The ending y position to draw the line
* @ param red2
* The ending position ' s shade of red
* @ param green2
* The ending position ' s shade of green
* @ param blue2
* The ending position ' s shade of blue
* @ param alpha2
* The ending position ' s alpha value */
public void drawGradientLine ( float x1 , float y1 , float red1 , float green1 , float blue1 , float alpha1 , float x2 , float y2 , float red2 , float green2 , float blue2 , float alpha2 ) { } } | predraw ( ) ; TextureImpl . bindNone ( ) ; GL . glBegin ( SGL . GL_LINES ) ; GL . glColor4f ( red1 , green1 , blue1 , alpha1 ) ; GL . glVertex2f ( x1 , y1 ) ; GL . glColor4f ( red2 , green2 , blue2 , alpha2 ) ; GL . glVertex2f ( x2 , y2 ) ; GL . glEnd ( ) ; postdraw ( ) ; |
public class AbstractUseCase { /** * Notifies the listener that the use case has failed to execute . < br / >
* You can override this method for a custom notification when your listeners may be listening to multiple use cases
* at a time .
* @ param listener The listener to notify . */
@ RestrictTo ( LIBRARY ) public void notifyFailedUseCase ( AbstractException exception , UseCaseListener listener ) { } } | try { LOGGER . debug ( "Notifying " + getClass ( ) . getSimpleName ( ) + " finish failed to listener " + listener . getClass ( ) . getSimpleName ( ) ) ; listener . onFinishFailedUseCase ( exception ) ; } catch ( Exception e ) { AbstractException abstractException = wrapException ( e ) ; logHandledException ( abstractException ) ; } |
public class SQLiteViewStore { /** * Generates and runs the SQL SELECT statement for a view query , calling the onRow callback .
* in CBL _ SQLiteViewStorage . m
* - ( CBLStatus ) _ runQueryWithOptions : ( const CBLQueryOptions * ) options onRow : ( QueryRowBlock ) onRow */
private Status runQuery ( QueryOptions options , QueryRowBlock block ) { } } | if ( options == null ) options = new QueryOptions ( ) ; // OPT : It would be faster to use separate tables for raw - or ascii - collated views so that
// they could be indexed with the right collation , instead of having to specify it here .
String collationStr = "" ; if ( collation == View . TDViewCollation . TDViewCollationASCII ) collationStr += " COLLATE JSON_ASCII" ; else if ( collation == View . TDViewCollation . TDViewCollationRaw ) collationStr += " COLLATE JSON_RAW" ; StringBuilder sql = new StringBuilder ( "SELECT key, value, docid, revs.sequence" ) ; if ( options . isIncludeDocs ( ) ) { sql . append ( ", revid, json" ) ; } sql . append ( String . format ( Locale . ENGLISH , " FROM 'maps_%s', revs, docs" , mapTableName ( ) ) ) ; sql . append ( " WHERE 1" ) ; List < String > argsList = new ArrayList < String > ( ) ; if ( options . getKeys ( ) != null && options . getKeys ( ) . size ( ) > 0 ) { sql . append ( " AND key in (" ) ; String item = "?" ; for ( Object key : options . getKeys ( ) ) { // null key should be ignored
if ( key != null ) { sql . append ( item ) ; item = ", ?" ; argsList . add ( toJSONString ( key ) ) ; } } sql . append ( ')' ) ; } Object minKey = options . getStartKey ( ) ; Object maxKey = options . getEndKey ( ) ; String minKeyDocId = options . getStartKeyDocId ( ) ; String maxKeyDocId = options . getEndKeyDocId ( ) ; boolean inclusiveMin = options . isInclusiveStart ( ) ; boolean inclusiveMax = options . isInclusiveEnd ( ) ; if ( options . isDescending ( ) ) { Object min = minKey ; minKey = maxKey ; maxKey = min ; inclusiveMin = inclusiveMax ; inclusiveMax = true ; minKeyDocId = options . getEndKeyDocId ( ) ; maxKeyDocId = options . getStartKeyDocId ( ) ; } if ( minKey != null ) { String minKeyJSON = toJSONString ( minKey ) ; sql . append ( inclusiveMin ? " AND key >= ?" : " AND key > ?" ) ; sql . append ( collationStr ) ; argsList . add ( minKeyJSON ) ; if ( minKeyDocId != null && inclusiveMin ) { // OPT : This calls the JSON collator a 2nd time unnecessarily .
sql . append ( String . format ( Locale . ENGLISH , " AND (key > ? %s OR docid >= ?)" , collationStr ) ) ; argsList . add ( minKeyJSON ) ; argsList . add ( minKeyDocId ) ; } } if ( maxKey != null ) { maxKey = View . keyForPrefixMatch ( maxKey , options . getPrefixMatchLevel ( ) ) ; String maxKeyJSON = toJSONString ( maxKey ) ; sql . append ( inclusiveMax ? " AND key <= ?" : " AND key < ?" ) ; sql . append ( collationStr ) ; argsList . add ( maxKeyJSON ) ; if ( maxKeyDocId != null && inclusiveMax ) { sql . append ( String . format ( Locale . ENGLISH , " AND (key < ? %s OR docid <= ?)" , collationStr ) ) ; argsList . add ( maxKeyJSON ) ; argsList . add ( maxKeyDocId ) ; } } sql . append ( String . format ( Locale . ENGLISH , " AND revs.sequence = 'maps_%s'.sequence AND docs.doc_id = revs.doc_id ORDER BY key" , mapTableName ( ) ) ) ; sql . append ( collationStr ) ; if ( options . isDescending ( ) ) { sql . append ( " DESC" ) ; } sql . append ( options . isDescending ( ) ? ", docid DESC" : ", docid" ) ; sql . append ( " LIMIT ? OFFSET ?" ) ; argsList . add ( Integer . toString ( options . getLimit ( ) ) ) ; argsList . add ( Integer . toString ( options . getSkip ( ) ) ) ; Log . v ( Log . TAG_VIEW , "Query %s: %s | args: %s" , name , sql . toString ( ) , argsList ) ; Status status = new Status ( Status . OK ) ; Cursor cursor = null ; try { cursor = store . getStorageEngine ( ) . rawQuery ( sql . toString ( ) , argsList . toArray ( new String [ argsList . size ( ) ] ) ) ; // regular query
cursor . moveToNext ( ) ; while ( ! cursor . isAfterLast ( ) ) { // Call the block !
byte [ ] keyData = cursor . getBlob ( 0 ) ; byte [ ] valueData = cursor . getBlob ( 1 ) ; String docID = cursor . getString ( 2 ) ; status = block . onRow ( keyData , valueData , docID , cursor ) ; if ( status . isError ( ) ) break ; else if ( status . getCode ( ) <= 0 ) { status = new Status ( Status . OK ) ; break ; } cursor . moveToNext ( ) ; } } finally { if ( cursor != null ) cursor . close ( ) ; } return status ; |
public class LocalScanUploadMonitor { /** * Checks whether any completed tasks returned before scanning the entire range . If so then the unscanned
* ranges are resplit and new tasks are created from them . */
private ScanStatus resplitPartiallyCompleteTasks ( ScanStatus status ) { } } | boolean anyUpdated = false ; int nextTaskId = - 1 ; for ( ScanRangeStatus complete : status . getCompleteScanRanges ( ) ) { if ( complete . getResplitRange ( ) . isPresent ( ) ) { // This task only partially completed ; there are still more data to scan .
if ( nextTaskId == - 1 ) { nextTaskId = getNextTaskId ( status ) ; } ScanRange resplitRange = complete . getResplitRange ( ) . get ( ) ; // Resplit the un - scanned portion into new ranges
List < ScanRange > subRanges = resplit ( complete . getPlacement ( ) , resplitRange , status . getOptions ( ) . getRangeScanSplitSize ( ) ) ; // Create new tasks for each subrange that are immediately available for being queued .
List < ScanRangeStatus > resplitStatuses = Lists . newArrayListWithCapacity ( subRanges . size ( ) ) ; for ( ScanRange subRange : subRanges ) { resplitStatuses . add ( new ScanRangeStatus ( nextTaskId ++ , complete . getPlacement ( ) , subRange , complete . getBatchId ( ) , complete . getBlockedByBatchId ( ) , complete . getConcurrencyId ( ) ) ) ; } _scanStatusDAO . resplitScanRangeTask ( status . getScanId ( ) , complete . getTaskId ( ) , resplitStatuses ) ; anyUpdated = true ; } } if ( ! anyUpdated ) { return status ; } // Slightly inefficient to reload but less risky than trying to keep the DAO and in - memory object in sync
return _scanStatusDAO . getScanStatus ( status . getScanId ( ) ) ; |
public class ContainerDefinition { /** * A list of DNS servers that are presented to the container . This parameter maps to < code > Dns < / code > in the < a
* href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a > section of the
* < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the < code > - - dns < / code > option to < a
* href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > .
* < note >
* This parameter is not supported for Windows containers .
* < / note >
* @ return A list of DNS servers that are presented to the container . This parameter maps to < code > Dns < / code > in the
* < a href = " https : / / docs . docker . com / engine / api / v1.35 / # operation / ContainerCreate " > Create a container < / a >
* section of the < a href = " https : / / docs . docker . com / engine / api / v1.35 / " > Docker Remote API < / a > and the
* < code > - - dns < / code > option to < a href = " https : / / docs . docker . com / engine / reference / run / " > docker run < / a > . < / p >
* < note >
* This parameter is not supported for Windows containers . */
public java . util . List < String > getDnsServers ( ) { } } | if ( dnsServers == null ) { dnsServers = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return dnsServers ; |
public class TimeParametersImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setTransferTime ( Parameter newTransferTime ) { } } | if ( newTransferTime != transferTime ) { NotificationChain msgs = null ; if ( transferTime != null ) msgs = ( ( InternalEObject ) transferTime ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - BpsimPackage . TIME_PARAMETERS__TRANSFER_TIME , null , msgs ) ; if ( newTransferTime != null ) msgs = ( ( InternalEObject ) newTransferTime ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - BpsimPackage . TIME_PARAMETERS__TRANSFER_TIME , null , msgs ) ; msgs = basicSetTransferTime ( newTransferTime , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . TIME_PARAMETERS__TRANSFER_TIME , newTransferTime , newTransferTime ) ) ; |
public class SpiceServiceListenerNotifier { /** * Inform the observers of a request . The observers can optionally observe
* the new request if required .
* @ param request the request that couldn ' t be aggregated to another request . */
public void notifyObserversOfRequestNotFound ( CachedSpiceRequest < ? > request ) { } } | RequestProcessingContext requestProcessingContext = new RequestProcessingContext ( ) ; requestProcessingContext . setExecutionThread ( Thread . currentThread ( ) ) ; post ( new RequestNotFoundNotifier ( request , spiceServiceListenerList , requestProcessingContext ) ) ; |
public class RoleDefinitionsInner { /** * Get all role definitions that are applicable at scope and above .
* @ param scope The scope of the role definition .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; RoleDefinitionInner & gt ; object if successful . */
public PagedList < RoleDefinitionInner > list ( final String scope ) { } } | ServiceResponse < Page < RoleDefinitionInner > > response = listSinglePageAsync ( scope ) . toBlocking ( ) . single ( ) ; return new PagedList < RoleDefinitionInner > ( response . body ( ) ) { @ Override public Page < RoleDefinitionInner > nextPage ( String nextPageLink ) { return listNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ; |
public class State { /** * Map both the result and the final state to a new result and final state .
* @ param fn the mapping function
* @ param < B > the potentially new final state type
* @ return the mapped { @ link State } */
public < B > State < S , B > mapState ( Fn1 < ? super Tuple2 < A , S > , ? extends Product2 < B , S > > fn ) { } } | return state ( s -> fn . apply ( run ( s ) ) ) ; |
public class DefaultStatusBar { /** * Sets the error message text to be displayed on the status bar .
* Error messages are shown over the standard message , and in a red color .
* @ param errorMessage
* the error message to be set , if < code > null < / code > , the error message is cleared , and the standard
* message is shown again */
public void setErrorMessage ( String errorMessage ) { } } | if ( errorMessage == null ) { logger . debug ( "Resetting the status bar message color to normal" ) ; messageLabel . setForeground ( SystemColor . controlText ) ; this . errorMessage = null ; setMessage ( message ) ; } else { logger . debug ( "Setting the status bar messsage color to red" ) ; messageLabel . setForeground ( Color . RED ) ; logger . debug ( "Setting status bar error message to \"" + errorMessage + "\"" ) ; this . errorMessage = errorMessage ; messageLabel . setText ( this . errorMessage ) ; } |
public class MasterWorkerInfo { /** * Marks the worker as registered , while updating all of its metadata .
* @ param globalStorageTierAssoc global mapping between storage aliases and ordinal position
* @ param storageTierAliases list of storage tier aliases in order of their position in the
* hierarchy
* @ param totalBytesOnTiers mapping from storage tier alias to total bytes
* @ param usedBytesOnTiers mapping from storage tier alias to used byes
* @ param blocks set of block ids on this worker
* @ return A Set of blocks removed ( or lost ) from this worker */
public Set < Long > register ( final StorageTierAssoc globalStorageTierAssoc , final List < String > storageTierAliases , final Map < String , Long > totalBytesOnTiers , final Map < String , Long > usedBytesOnTiers , final Set < Long > blocks ) { } } | // If the storage aliases do not have strictly increasing ordinal value based on the total
// ordering , throw an error
for ( int i = 0 ; i < storageTierAliases . size ( ) - 1 ; i ++ ) { if ( globalStorageTierAssoc . getOrdinal ( storageTierAliases . get ( i ) ) >= globalStorageTierAssoc . getOrdinal ( storageTierAliases . get ( i + 1 ) ) ) { throw new IllegalArgumentException ( "Worker cannot place storage tier " + storageTierAliases . get ( i ) + " above " + storageTierAliases . get ( i + 1 ) + " in the hierarchy" ) ; } } mStorageTierAssoc = new WorkerStorageTierAssoc ( storageTierAliases ) ; // validate the number of tiers
if ( mStorageTierAssoc . size ( ) != totalBytesOnTiers . size ( ) || mStorageTierAssoc . size ( ) != usedBytesOnTiers . size ( ) ) { throw new IllegalArgumentException ( "totalBytesOnTiers and usedBytesOnTiers should have the same number of tiers as " + "storageTierAliases, but storageTierAliases has " + mStorageTierAssoc . size ( ) + " tiers, while totalBytesOnTiers has " + totalBytesOnTiers . size ( ) + " tiers and usedBytesOnTiers has " + usedBytesOnTiers . size ( ) + " tiers" ) ; } // defensive copy
mTotalBytesOnTiers = new HashMap < > ( totalBytesOnTiers ) ; mUsedBytesOnTiers = new HashMap < > ( usedBytesOnTiers ) ; mCapacityBytes = 0 ; for ( long bytes : mTotalBytesOnTiers . values ( ) ) { mCapacityBytes += bytes ; } mUsedBytes = 0 ; for ( long bytes : mUsedBytesOnTiers . values ( ) ) { mUsedBytes += bytes ; } Set < Long > removedBlocks ; if ( mIsRegistered ) { // This is a re - register of an existing worker . Assume the new block ownership data is more
// up - to - date and update the existing block information .
LOG . info ( "re-registering an existing workerId: {}" , mId ) ; // Compute the difference between the existing block data , and the new data .
removedBlocks = Sets . difference ( mBlocks , blocks ) ; } else { removedBlocks = Collections . emptySet ( ) ; } // Set the new block information .
mBlocks = new HashSet < > ( blocks ) ; mIsRegistered = true ; return removedBlocks ; |
public class ServicesInner { /** * Stop service .
* The services resource is the top - level resource that represents the Data Migration Service . This action stops the service and the service cannot be used for data migration . The service owner won ' t be billed when the service is stopped .
* @ param groupName Name of the resource group
* @ param serviceName Name of the service
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > stopAsync ( String groupName , String serviceName , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromResponse ( stopWithServiceResponseAsync ( groupName , serviceName ) , serviceCallback ) ; |
public class AmazonPinpointClient { /** * Get a Voice Channel
* @ param getVoiceChannelRequest
* @ return Result of the GetVoiceChannel operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ throws ForbiddenException
* 403 response
* @ throws NotFoundException
* 404 response
* @ throws MethodNotAllowedException
* 405 response
* @ throws TooManyRequestsException
* 429 response
* @ sample AmazonPinpoint . GetVoiceChannel
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / GetVoiceChannel " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetVoiceChannelResult getVoiceChannel ( GetVoiceChannelRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetVoiceChannel ( request ) ; |
public class DistributionSetTypeSpecification { /** * { @ link Specification } for retrieving { @ link DistributionSetType } s by its
* DELETED attribute .
* @ param isDeleted
* TRUE / FALSE are compared to the attribute DELETED . If NULL the
* attribute is ignored
* @ return the { @ link DistributionSetType } { @ link Specification } */
public static Specification < JpaDistributionSetType > isDeleted ( final Boolean isDeleted ) { } } | return ( targetRoot , query , cb ) -> cb . equal ( targetRoot . < Boolean > get ( JpaDistributionSetType_ . deleted ) , isDeleted ) ; |
public class FileIoUtil { /** * Tries to find the line termination character ( s ) of the given file .
* Only useful for files with text content !
* @ param _ file
* @ return never null , will return system default line terminator on any error */
public static String guessLineTerminatorOfFile ( String _file ) { } } | if ( StringUtil . isEmpty ( _file ) ) { return SystemUtil . LINE_SEPARATOR ; } File file = new File ( _file ) ; if ( ! file . exists ( ) || ! file . canRead ( ) ) { return SystemUtil . LINE_SEPARATOR ; } try ( BufferedReader reader = new BufferedReader ( new FileReader ( file ) ) ) { boolean carriageReturn = false ; boolean lineFeed = false ; boolean nextIteration = false ; char [ ] buf = new char [ 1 ] ; while ( reader . read ( buf ) != - 1 ) { if ( buf [ 0 ] == '\r' ) { carriageReturn = true ; } else if ( buf [ 0 ] == '\n' ) { lineFeed = true ; } // found both , must be DOS / windows like separators
if ( carriageReturn && lineFeed ) { return "\r\n" ; } // found only carriage return , check next character as well
if ( carriageReturn && ! nextIteration ) { nextIteration = true ; continue ; } if ( lineFeed ) { // we have a line - feed and no carriage return before
return "\n" ; } else if ( carriageReturn ) { // only carriage return found before , seems to macOS 9 line ending
return "\r" ; } } } catch ( IOException _ex ) { return SystemUtil . LINE_SEPARATOR ; } return SystemUtil . LINE_SEPARATOR ; |
public class DatabaseUtils { /** * Reads a Integer out of a field in a Cursor and writes it to a Map .
* @ param cursor The cursor to read from
* @ param field The INTEGER field to read
* @ param values The { @ link ContentValues } to put the value into , with the field as the key
* @ param key The key to store the value with in the map */
public static void cursorIntToContentValues ( Cursor cursor , String field , ContentValues values , String key ) { } } | int colIndex = cursor . getColumnIndex ( field ) ; if ( ! cursor . isNull ( colIndex ) ) { values . put ( key , cursor . getInt ( colIndex ) ) ; } else { values . put ( key , ( Integer ) null ) ; } |
public class SortedOps { /** * Appends a " sorted " operation to the provided stream .
* @ param < T > the type of both input and output elements
* @ param upstream a reference stream with element type T
* @ param comparator the comparator to order elements by */
static < T > Stream < T > makeRef ( AbstractPipeline < ? , T , ? > upstream , Comparator < ? super T > comparator ) { } } | return new OfRef < > ( upstream , comparator ) ; |
public class OutdatedStateManager { /** * Created a fresh CancelIndicator */
public CancelIndicator newCancelIndicator ( final ResourceSet rs ) { } } | CancelIndicator _xifexpression = null ; if ( ( rs instanceof XtextResourceSet ) ) { final boolean cancelationAllowed = ( this . cancelationAllowed . get ( ) ) . booleanValue ( ) ; final int current = ( ( XtextResourceSet ) rs ) . getModificationStamp ( ) ; final CancelIndicator _function = ( ) -> { return ( cancelationAllowed && ( ( ( XtextResourceSet ) rs ) . isOutdated ( ) || ( current != ( ( XtextResourceSet ) rs ) . getModificationStamp ( ) ) ) ) ; } ; return _function ; } else { _xifexpression = CancelIndicator . NullImpl ; } return _xifexpression ; |
public class DataTypeParser { /** * Parses SQL - 92 Exact numeric data types . < exact numeric type > : : = NUMERIC [ < left paren > < precision > [ < comma > < scale > ]
* < right paren > ] | DECIMAL [ < left paren > < precision > [ < comma > < scale > ] < right paren > ] | DEC [ < left paren > < precision > [
* < comma > < scale > ] < right paren > ] | INTEGER | INT | SMALLINT
* @ param tokens
* @ return the { @ link DataType }
* @ throws ParsingException */
protected DataType parseExactNumericType ( DdlTokenStream tokens ) throws ParsingException { } } | DataType dataType = null ; String typeName = null ; if ( tokens . matchesAnyOf ( "INTEGER" , "INT" , "SMALLINT" ) ) { dataType = new DataType ( ) ; typeName = consume ( tokens , dataType , false ) ; dataType . setName ( typeName ) ; } else if ( tokens . matchesAnyOf ( "NUMERIC" , "DECIMAL" , "DEC" ) ) { dataType = new DataType ( ) ; typeName = consume ( tokens , dataType , false ) ; dataType . setName ( typeName ) ; int precision = 0 ; int scale = 0 ; if ( tokens . matches ( L_PAREN ) ) { consume ( tokens , dataType , false , L_PAREN ) ; precision = ( int ) parseLong ( tokens , dataType ) ; if ( canConsume ( tokens , dataType , false , COMMA ) ) { scale = ( int ) parseLong ( tokens , dataType ) ; } else { scale = getDefaultScale ( ) ; } consume ( tokens , dataType , false , R_PAREN ) ; } else { precision = getDefaultPrecision ( ) ; scale = getDefaultScale ( ) ; } dataType . setPrecision ( precision ) ; dataType . setScale ( scale ) ; } return dataType ; |
public class HldSeSelEntityProcNms { /** * < p > Get processor name for retrieve to edit / delete . < / p >
* @ param pClass a Class
* @ param pAction Action
* @ return a thing */
protected final String getForRetrieveForEditDelete ( final Class < ? > pClass , final String pAction ) { } } | if ( this . sharedEntities . contains ( pClass ) ) { return null ; } else if ( IHasSeSeller . class . isAssignableFrom ( pClass ) ) { return PrcEntityRetrieve . class . getSimpleName ( ) ; } return null ; |
public class JSONHTTPResponseHandler { /** * This function converts the HTTP response content to the specific object .
* @ param httpResponse
* The HTTP response
* @ return The object */
@ Override protected JSONObject convertToObject ( HTTPResponse httpResponse ) { } } | // get response text
JSONObject jsonObject = null ; String content = httpResponse . getContent ( ) ; if ( content != null ) { try { // convert to JSON
jsonObject = new JSONObject ( content ) ; } catch ( JSONException exception ) { throw new FaxException ( "Unable to parse HTTP response text as JSON." , exception ) ; } } return jsonObject ; |
public class AmazonRoute53Client { /** * Retrieves the number of health checks that are associated with the current AWS account .
* @ param getHealthCheckCountRequest
* A request for the number of health checks that are associated with the current AWS account .
* @ return Result of the GetHealthCheckCount operation returned by the service .
* @ sample AmazonRoute53 . GetHealthCheckCount
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / GetHealthCheckCount " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public GetHealthCheckCountResult getHealthCheckCount ( GetHealthCheckCountRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetHealthCheckCount ( request ) ; |
public class RequestUtils { /** * Goes through @ RequestMapping annotations from Spring and return a list of its elements .
* @ return List with all entry points and parameters */
public List < EntryPoint > retrieveAllExternalEntryPoints ( ) { } } | final List < EntryPoint > entryPoints = new ArrayList < EntryPoint > ( ) ; final Map < RequestMappingInfo , HandlerMethod > allUrls = handlerMapping . getHandlerMethods ( ) ; for ( RequestMappingInfo mappingInfo : allUrls . keySet ( ) ) { final HandlerMethod handlerMethod = allUrls . get ( mappingInfo ) ; // If no pattern is defined , we do not add to the list
if ( mappingInfo . getPatternsCondition ( ) != null && ExternalEntryPointHelper . isAnEntryPoint ( handlerMethod . getMethod ( ) , scanEntryPointAnnotation ) ) { final EntryPoint entryPoint = new EntryPoint ( ) ; entryPoint . setParameters ( new ArrayList < EntryPointParameter > ( ) ) ; // All the url ' s this method can handle
entryPoint . setUrls ( mappingInfo . getPatternsCondition ( ) . getPatterns ( ) ) ; // If there specified methods , we add them to the returned object
if ( mappingInfo . getMethodsCondition ( ) != null ) { entryPoint . setRequestMethods ( mappingInfo . getMethodsCondition ( ) . getMethods ( ) ) ; } final Method method = handlerMethod . getMethod ( ) ; entryPoint . setMethodName ( method . getName ( ) ) ; entryPoint . setMethodDecorationName ( ExternalEntryPointHelper . getEntryPointDecoratedName ( method , scanEntryPointAnnotation ) ) ; entryPoint . setType ( method . getDeclaringClass ( ) ) ; entryPoint . setResponseType ( method . getReturnType ( ) ) ; // Get the defined parameter names , they might be overwritten by a @ RequestParam
final String [ ] parameterNames = nameDiscover . getParameterNames ( method ) ; // Consolidated blacklist , if might have details on @ ExternalEntryPoint
final Set < Class < ? > > consolidatedTypeBlacklist = ExternalEntryPointHelper . getConsolidatedTypeBlacklist ( paramTypesBlacklist , method , scanEntryPointAnnotation ) ; final Set < String > consolidatedNameBlacklist = ExternalEntryPointHelper . getConsolidatedNameBlacklist ( paramNamesBlacklist , method , scanEntryPointAnnotation ) ; // Time to retrieve all the parameters
for ( MethodParameter methodParameter : handlerMethod . getMethodParameters ( ) ) { computeInputParatemeter ( entryPoint , parameterNames , consolidatedTypeBlacklist , consolidatedNameBlacklist , methodParameter ) ; } entryPoints . add ( entryPoint ) ; } } return entryPoints ; |
public class nslimitselector { /** * Use this API to update nslimitselector resources . */
public static base_responses update ( nitro_service client , nslimitselector resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { nslimitselector updateresources [ ] = new nslimitselector [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new nslimitselector ( ) ; updateresources [ i ] . selectorname = resources [ i ] . selectorname ; updateresources [ i ] . rule = resources [ i ] . rule ; } result = update_bulk_request ( client , updateresources ) ; } return result ; |
public class DatastreamsPane { /** * Gets the index of the pane containing the
* datastream with the given id .
* @ return index , or - 1 if index is not found */
private int getDatastreamPaneIndex ( String id ) { } } | int index = - 1 ; for ( int i = 0 ; i < m_datastreamPanes . length ; i ++ ) { if ( m_datastreamPanes [ i ] . getItemId ( ) . equals ( id ) ) { index = i ; break ; } } return index ; |
public class Gant { /** * Set the target to be achieved .
* @ param t The target to achieve . */
public void setTarget ( final String t ) { } } | final GantTarget gt = new GantTarget ( ) ; gt . setValue ( t ) ; targets . add ( gt ) ; |
public class IndexFacesRequest { /** * An array of facial attributes that you want to be returned . This can be the default list of attributes or all
* attributes . If you don ' t specify a value for < code > Attributes < / code > or if you specify < code > [ " DEFAULT " ] < / code > ,
* the API returns the following subset of facial attributes : < code > BoundingBox < / code > , < code > Confidence < / code > ,
* < code > Pose < / code > , < code > Quality < / code > , and < code > Landmarks < / code > . If you provide < code > [ " ALL " ] < / code > , all
* facial attributes are returned , but the operation takes longer to complete .
* If you provide both , < code > [ " ALL " , " DEFAULT " ] < / code > , the service uses a logical AND operator to determine which
* attributes to return ( in this case , all attributes ) .
* @ param detectionAttributes
* An array of facial attributes that you want to be returned . This can be the default list of attributes or
* all attributes . If you don ' t specify a value for < code > Attributes < / code > or if you specify
* < code > [ " DEFAULT " ] < / code > , the API returns the following subset of facial attributes :
* < code > BoundingBox < / code > , < code > Confidence < / code > , < code > Pose < / code > , < code > Quality < / code > , and
* < code > Landmarks < / code > . If you provide < code > [ " ALL " ] < / code > , all facial attributes are returned , but the
* operation takes longer to complete . < / p >
* If you provide both , < code > [ " ALL " , " DEFAULT " ] < / code > , the service uses a logical AND operator to determine
* which attributes to return ( in this case , all attributes ) .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see Attribute */
public IndexFacesRequest withDetectionAttributes ( Attribute ... detectionAttributes ) { } } | java . util . ArrayList < String > detectionAttributesCopy = new java . util . ArrayList < String > ( detectionAttributes . length ) ; for ( Attribute value : detectionAttributes ) { detectionAttributesCopy . add ( value . toString ( ) ) ; } if ( getDetectionAttributes ( ) == null ) { setDetectionAttributes ( detectionAttributesCopy ) ; } else { getDetectionAttributes ( ) . addAll ( detectionAttributesCopy ) ; } return this ; |
public class DBResults { /** * Privates . . . */
private void ensureMeta ( ) { } } | if ( meta != null ) return ; try { meta = sqlRes . getMetaData ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } |
public class AbstractFungalRADeployer { /** * Does the URL represent a . rar archive
* @ param url The URL
* @ return < code > true < / code > if . rar archive , otherwise < code > false < / code > */
protected boolean isRarArchive ( URL url ) { } } | if ( url == null ) return false ; return isRarFile ( url ) || isRarDirectory ( url ) ; |
public class DayOfTheWeek { /** * Returns the next day .
* @ return Day that follows this one . In case of { @ link # PH } { @ literal null } is returned . */
public DayOfTheWeek next ( ) { } } | if ( this == PH ) { return null ; } final int nextId = id + 1 ; if ( nextId == PH . id ) { return MON ; } for ( final DayOfTheWeek dow : ALL ) { if ( dow . id == nextId ) { return dow ; } } throw new IllegalStateException ( "Wasn't able to find next day for: " + this ) ; |
public class AccessibilityNodeInfoUtils { /** * Returns a fresh copy of { @ code node } with properties that are
* less likely to be stale . Returns { @ code null } if the node can ' t be
* found anymore . */
public static AccessibilityNodeInfoCompat refreshNode ( AccessibilityNodeInfoCompat node ) { } } | if ( node == null ) { return null ; } AccessibilityNodeInfoCompat result = refreshFromChild ( node ) ; if ( result == null ) { result = refreshFromParent ( node ) ; } return result ; |
public class OidcClientUtil { /** * In case , the ReferrerURLCookieHandler is dynamic in every request , then we will need to make this into
* OidcClientRequest . And do not do static . */
public static void setReferrerURLCookieHandler ( ReferrerURLCookieHandler referrerURLCookieHandler ) { } } | // in case , the webAppSecurityConfig is dynamically changed
if ( getReferrerURLCookieHandler ( ) != referrerURLCookieHandler ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Old and new CookieHandler" , getReferrerURLCookieHandler ( ) , referrerURLCookieHandler ) ; } webAppSecurityConfigRef . set ( WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) ) ; referrerURLCookieHandlerRef . set ( referrerURLCookieHandler ) ; } |
public class StorageDirView { /** * Gets a filtered list of block metadata , for blocks that are neither pinned or being blocked .
* @ return a list of metadata for all evictable blocks */
public List < BlockMeta > getEvictableBlocks ( ) { } } | List < BlockMeta > filteredList = new ArrayList < > ( ) ; for ( BlockMeta blockMeta : mDir . getBlocks ( ) ) { long blockId = blockMeta . getBlockId ( ) ; if ( mManagerView . isBlockEvictable ( blockId ) ) { filteredList . add ( blockMeta ) ; } } return filteredList ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link CmisExtensionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = GetTypeDescendants . class ) public JAXBElement < CmisExtensionType > createGetTypeDescendantsExtension ( CmisExtensionType value ) { } } | return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , GetTypeDescendants . class , value ) ; |
public class transformpolicylabel_stats { /** * Use this API to fetch the statistics of all transformpolicylabel _ stats resources that are configured on netscaler . */
public static transformpolicylabel_stats [ ] get ( nitro_service service ) throws Exception { } } | transformpolicylabel_stats obj = new transformpolicylabel_stats ( ) ; transformpolicylabel_stats [ ] response = ( transformpolicylabel_stats [ ] ) obj . stat_resources ( service ) ; return response ; |
public class SchemaUtil { /** * Builds a Schema for the FieldPartitioner using the given Schema to
* determine types not fixed by the FieldPartitioner .
* @ param fp a FieldPartitioner
* @ param schema an entity Schema that will be partitioned
* @ return a Schema for the field partitioner */
public static Schema partitionFieldSchema ( FieldPartitioner < ? , ? > fp , Schema schema ) { } } | if ( fp instanceof IdentityFieldPartitioner ) { // copy the schema directly from the entity to preserve annotations
return fieldSchema ( schema , fp . getSourceName ( ) ) ; } else { Class < ? > fieldType = getPartitionType ( fp , schema ) ; if ( fieldType == Integer . class ) { return Schema . create ( Schema . Type . INT ) ; } else if ( fieldType == Long . class ) { return Schema . create ( Schema . Type . LONG ) ; } else if ( fieldType == String . class ) { return Schema . create ( Schema . Type . STRING ) ; } else { throw new ValidationException ( "Cannot encode partition " + fp . getName ( ) + " with type " + fp . getSourceType ( ) ) ; } } |
public class ConnectionImpl { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . sib . core . SICoreConnection # createUncoordinatedTransaction ( boolean ) */
@ Override public SIUncoordinatedTransaction createUncoordinatedTransaction ( boolean allowSubordinateResources ) throws SIConnectionUnavailableException , SIConnectionDroppedException , SIErrorException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIConnection . tc , "createUncoordinatedTransaction" , new Object [ ] { this , Boolean . valueOf ( allowSubordinateResources ) } ) ; LocalTransaction tran = null ; // Synchronize on the close object , we don ' t want the connection closing
// while we try to add the transaction .
synchronized ( this ) { // See if this connection has been closed
checkNotClosed ( ) ; checkMPStarted ( ) ; // Get a LocalTransaction from the message store ( via the MP ) .
tran = _txManager . createLocalTransaction ( ! allowSubordinateResources ) ; synchronized ( _ownedTransactions ) { // Add this transaction to the connections list in case we need to
// clean them up at close time .
_ownedTransactions . add ( tran ) ; tran . registerCallback ( this ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConnection . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConnection . tc , "createUncoordinatedTransaction" , tran ) ; return tran ; |
public class ZipFileArtifactNotifier { /** * Notification API : Register a listener with the paths which are
* specified by the registration object .
* See { @ link ArtifactNotifier # registerForNotifications } .
* The registration object must be linked to the root container of
* of this notification . If the registration is not linked to the
* root container , an < code > IllegalArgumentException < / code > is thrown .
* The new registration must not duplicate an existing registration .
* @ param newListenerPaths A notification which has the paths to which
* to register the listener .
* @ param The listener which is to be registered to the specified
* paths .
* @ return True or false , telling if the registration was accepted .
* This implementation always answers true . */
@ Override public boolean registerForNotifications ( ArtifactNotification newListenerPaths , ArtifactListener newListener ) { } } | if ( newListenerPaths . getContainer ( ) . getRoot ( ) != rootContainer ) { throw new IllegalArgumentException ( ) ; } synchronized ( listenersLock ) { boolean addedUncoveredPaths = false ; for ( String newListenerPath : newListenerPaths . getPaths ( ) ) { // Handle non - recursive listener registration as recursive registration .
if ( newListenerPath . startsWith ( "!" ) ) { newListenerPath = newListenerPath . substring ( 1 ) ; } ArtifactListenerSelector artifactSelectorCallback = new ArtifactListenerSelector ( newListener ) ; if ( registerListener ( newListenerPath , artifactSelectorCallback ) ) { addedUncoveredPaths = true ; } } if ( addedUncoveredPaths ) { updateMonitor ( ) ; } } return true ; |
public class TableFormBuilder { /** * adds a field binding to the form
* @ param binding
* the binding of the field
* @ param wrappedControl
* the optional wrapped component . If null the component of the binding is used . This Parameter should be
* used if the component of the binding is being wrapped inside this component
* @ param attributes
* optional layout attributes for the label . If null no layout attributes will be applied to the label .
* See { @ link TableLayoutBuilder } for syntax details
* @ return an array containing the label , the component of the field binding and the wrapped component */
public JComponent [ ] addBinding ( Binding binding , JComponent wrappedControl , String attributes ) { } } | return addBinding ( binding , wrappedControl , attributes , getLabelAttributes ( ) ) ; |
public class CATImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . CAT__CAT_DATA : return getCATData ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class LastaAction { /** * Return response as JSON .
* < pre >
* < span style = " color : # 70226C " > public < / span > JsonResponse & lt ; SeaResult & gt ; index ( ) {
* < span style = " color : # 70226C " > return < / span > asJson ( < span style = " color : # 553000 " > result < / span > ) ;
* < / pre >
* @ param < RESULT > The type of result serialized to JSON .
* @ param result The result object converted to JSON string . ( NotNull )
* @ return The new - created response for JSON . ( NotNull ) */
protected < RESULT > JsonResponse < RESULT > asJson ( RESULT result ) { } } | assertArgumentNotNull ( "result" , result ) ; return newJsonResponse ( result ) ; |
public class CreateTaskSetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateTaskSetRequest createTaskSetRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createTaskSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createTaskSetRequest . getService ( ) , SERVICE_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getCluster ( ) , CLUSTER_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getExternalId ( ) , EXTERNALID_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getTaskDefinition ( ) , TASKDEFINITION_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getNetworkConfiguration ( ) , NETWORKCONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getLoadBalancers ( ) , LOADBALANCERS_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getServiceRegistries ( ) , SERVICEREGISTRIES_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getLaunchType ( ) , LAUNCHTYPE_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getPlatformVersion ( ) , PLATFORMVERSION_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getScale ( ) , SCALE_BINDING ) ; protocolMarshaller . marshall ( createTaskSetRequest . getClientToken ( ) , CLIENTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class NwwPanel { /** * Move to see a given sector .
* @ param sector
* the sector to go to .
* @ param animate
* if < code > true < / code > , it animates to the position . */
public void goTo ( Sector sector , boolean animate ) { } } | View view = getWwd ( ) . getView ( ) ; view . stopAnimations ( ) ; view . stopMovement ( ) ; if ( sector == null ) { return ; } // Create a bounding box for the specified sector in order to estimate
// its size in model coordinates .
Box extent = Sector . computeBoundingBox ( getWwd ( ) . getModel ( ) . getGlobe ( ) , getWwd ( ) . getSceneController ( ) . getVerticalExaggeration ( ) , sector ) ; // Estimate the distance between the center position and the eye
// position that is necessary to cause the sector to
// fill a viewport with the specified field of view . Note that we change
// the distance between the center and eye
// position here , and leave the field of view constant .
Angle fov = view . getFieldOfView ( ) ; double zoom = extent . getRadius ( ) / fov . cosHalfAngle ( ) / fov . tanHalfAngle ( ) ; // Configure OrbitView to look at the center of the sector from our
// estimated distance . This causes OrbitView to
// animate to the specified position over several seconds . To affect
// this change immediately use the following :
if ( animate ) { view . goTo ( new Position ( sector . getCentroid ( ) , 0d ) , zoom ) ; } else { ( ( OrbitView ) wwd . getView ( ) ) . setCenterPosition ( new Position ( sector . getCentroid ( ) , 0d ) ) ; ( ( OrbitView ) wwd . getView ( ) ) . setZoom ( zoom ) ; } |
public class ConnectionManager { /** * Start offering shared dbserver sessions .
* @ throws SocketException if there is a problem opening connections */
public synchronized void start ( ) throws SocketException { } } | if ( ! isRunning ( ) ) { DeviceFinder . getInstance ( ) . addLifecycleListener ( lifecycleListener ) ; DeviceFinder . getInstance ( ) . addDeviceAnnouncementListener ( announcementListener ) ; DeviceFinder . getInstance ( ) . start ( ) ; for ( DeviceAnnouncement device : DeviceFinder . getInstance ( ) . getCurrentDevices ( ) ) { requestPlayerDBServerPort ( device ) ; } new Thread ( null , new Runnable ( ) { @ Override public void run ( ) { while ( isRunning ( ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { logger . warn ( "Interrupted sleeping to close idle dbserver clients" ) ; } closeIdleClients ( ) ; } logger . info ( "Idle dbserver client closer shutting down." ) ; } } , "Idle dbserver client closer" ) . start ( ) ; running . set ( true ) ; deliverLifecycleAnnouncement ( logger , true ) ; } |
public class SessionController { /** * Creates a chat session bound to the specified session id .
* @ param sessionId The chat session id .
* @ param originator If true , this user is originating the chat session .
* @ return The controller for the chat session . */
protected static SessionController create ( String sessionId , boolean originator ) { } } | Map < String , Object > args = new HashMap < > ( ) ; args . put ( "id" , sessionId ) ; args . put ( "title" , StrUtil . formatMessage ( "@cwf.chat.session.title" ) ) ; args . put ( "originator" , originator ? true : null ) ; Window dlg = PopupDialog . show ( DIALOG , args , true , true , false , null ) ; return ( SessionController ) FrameworkController . getController ( dlg ) ; |
public class ExpandablePanelView { /** * Checks if children number is correct and logs an error if it is not */
private void checkChildrenCount ( ) { } } | if ( getChildCount ( ) != 2 ) Log . e ( getResources ( ) . getString ( R . string . tag ) , getResources ( ) . getString ( R . string . wrong_number_children_error ) ) ; |
public class StatsEstimate { /** * Gets the totalCost value for this StatsEstimate .
* @ return totalCost * The estimated total cost . */
public com . google . api . ads . adwords . axis . v201809 . cm . Money getTotalCost ( ) { } } | return totalCost ; |
public class AbstractMutableContainer { /** * Add the given component as a child of this component . The tag is used to identify the child in this component ' s
* velocity template .
* @ param component the component to add .
* @ param tag the tag used to identify the component .
* @ deprecated Use { @ link WTemplate } instead */
@ Deprecated @ Override public void add ( final WComponent component , final String tag ) { } } | super . add ( component , tag ) ; |
public class CukedoctorMojo { /** * documentation is saved under $ { buildDir } / cukedoctor folder */
String getDocumentationDir ( ) { } } | String baseDir = project . getBuild ( ) . getDirectory ( ) ; if ( baseDir == null || new File ( baseDir ) . exists ( ) ) { baseDir = project . getBasedir ( ) . getAbsolutePath ( ) ; baseDir = baseDir + "/target" ; if ( ! new File ( baseDir ) . exists ( ) ) { // maven plugin declared in child module
baseDir = project . getBasedir ( ) . getAbsolutePath ( ) ; } } if ( ! baseDir . endsWith ( "/" ) ) { baseDir = baseDir + "/" ; } if ( outputDir == null ) { outputDir = "cukedoctor/" ; } if ( ! outputDir . endsWith ( "/" ) ) { outputDir = outputDir + "/" ; } return baseDir + outputDir ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.