signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DeviceInfo { /** * This method returns a { @ link FrameInterval } object containing information * about the supported frame intervals for capture at the given resolution * and image format . < b > Note that the returned { @ link FrameInterval } object * could have its type set to { @ link FrameInterval . Type # UNSUPPORTED } if the * driver does not support frame interval enumeration OR if the device is * currently being used by another application and frame intervals cannot * be enumerated at this time . < / b > < br > Frame interval information can also * be obtained through { @ link ResolutionInfo } objects , attached to each * { @ link ImageFormat } . See { @ link # getFormatList ( ) } . * @ return a { @ link FrameInterval } object containing information about * the supported frame intervals * @ param imf the capture image format for which the frame intervals should * be enumerated * @ param width the capture width for which the frame intervals should be * enumerated * @ param height the capture height for which the frame intervals should be * enumerated * @ throws StateException if the associated VideoDevice has been released */ public synchronized FrameInterval listIntervals ( ImageFormat imf , int width , int height ) { } }
checkRelease ( ) ; return doListIntervals ( object , imf . getIndex ( ) , width , height ) ;
public class AmazonECSClient { /** * Describes a specified task or tasks . * @ param describeTasksRequest * @ return Result of the DescribeTasks operation returned by the service . * @ throws ServerException * These errors are usually caused by a server issue . * @ throws ClientException * These errors are usually caused by a client action , such as using an action or resource on behalf of a * user that doesn ' t have permissions to use the action or resource , or specifying an identifier that is not * valid . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClusterNotFoundException * The specified cluster could not be found . You can view your available clusters with < a > ListClusters < / a > . * Amazon ECS clusters are Region - specific . * @ sample AmazonECS . DescribeTasks * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecs - 2014-11-13 / DescribeTasks " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeTasksResult describeTasks ( DescribeTasksRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeTasks ( request ) ;
public class KeyVaultClientBaseImpl { /** * Updates the policy for a certificate . * Set specified members in the certificate policy . Leave others as null . This operation requires the certificates / update permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateName The name of the certificate in the given vault . * @ param certificatePolicy The policy for the certificate . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the CertificatePolicy object if successful . */ public CertificatePolicy updateCertificatePolicy ( String vaultBaseUrl , String certificateName , CertificatePolicy certificatePolicy ) { } }
return updateCertificatePolicyWithServiceResponseAsync ( vaultBaseUrl , certificateName , certificatePolicy ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Event { /** * Add an attribute to the event * @ param key * key of attribute * @ param String * value of attribute * @ return this object , to allow event attribute chains */ public Event addAttribute ( String key , Object value ) { } }
eventAttributes . put ( key , ( value == null ? "null" : value ) ) ; return this ;
public class MMOImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . MMO__RG_LENGTH : setRGLength ( ( Integer ) newValue ) ; return ; case AfplibPackage . MMO__RG : getRg ( ) . clear ( ) ; getRg ( ) . addAll ( ( Collection < ? extends MMORG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ConfigClient { /** * Updates a sink . This method replaces the following fields in the existing sink with values from * the new sink : ` destination ` , and ` filter ` . The updated sink might also have a new * ` writer _ identity ` ; see the ` unique _ writer _ identity ` field . * < p > Sample code : * < pre > < code > * try ( ConfigClient configClient = ConfigClient . create ( ) ) { * SinkName sinkName = ProjectSinkName . of ( " [ PROJECT ] " , " [ SINK ] " ) ; * LogSink sink = LogSink . newBuilder ( ) . build ( ) ; * FieldMask updateMask = FieldMask . newBuilder ( ) . build ( ) ; * LogSink response = configClient . updateSink ( sinkName . toString ( ) , sink , updateMask ) ; * < / code > < / pre > * @ param sinkName Required . The full resource name of the sink to update , including the parent * resource and the sink identifier : * < p > " projects / [ PROJECT _ ID ] / sinks / [ SINK _ ID ] " * " organizations / [ ORGANIZATION _ ID ] / sinks / [ SINK _ ID ] " * " billingAccounts / [ BILLING _ ACCOUNT _ ID ] / sinks / [ SINK _ ID ] " * " folders / [ FOLDER _ ID ] / sinks / [ SINK _ ID ] " * < p > Example : ` " projects / my - project - id / sinks / my - sink - id " ` . * @ param sink Required . The updated sink , whose name is the same identifier that appears as part * of ` sink _ name ` . * @ param updateMask Optional . Field mask that specifies the fields in ` sink ` that need an update . * A sink field will be overwritten if , and only if , it is in the update mask . ` name ` and * output only fields cannot be updated . * < p > An empty updateMask is temporarily treated as using the following mask for backwards * compatibility purposes : destination , filter , includeChildren At some point in the future , * behavior will be removed and specifying an empty updateMask will be an error . * < p > For a detailed ` FieldMask ` definition , see * https : / / developers . google . com / protocol - buffers / docs / reference / google . protobuf # google . protobuf . FieldMask * < p > Example : ` updateMask = filter ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final LogSink updateSink ( String sinkName , LogSink sink , FieldMask updateMask ) { } }
UpdateSinkRequest request = UpdateSinkRequest . newBuilder ( ) . setSinkName ( sinkName ) . setSink ( sink ) . setUpdateMask ( updateMask ) . build ( ) ; return updateSink ( request ) ;
public class SeaGlassInternalFrameTitlePane { /** * Create the buttons and add action listeners . */ private void createButtons ( ) { } }
iconButton = new NoFocusButton ( "InternalFrameTitlePane.iconifyButtonAccessibleName" ) ; iconButton . addActionListener ( iconifyAction ) ; if ( iconButtonToolTip != null && iconButtonToolTip . length ( ) != 0 ) { iconButton . setToolTipText ( iconButtonToolTip ) ; } maxButton = new NoFocusButton ( "InternalFrameTitlePane.maximizeButtonAccessibleName" ) ; maxButton . addActionListener ( maximizeAction ) ; closeButton = new NoFocusButton ( "InternalFrameTitlePane.closeButtonAccessibleName" ) ; closeButton . addActionListener ( closeAction ) ; if ( closeButtonToolTip != null && closeButtonToolTip . length ( ) != 0 ) { closeButton . setToolTipText ( closeButtonToolTip ) ; } setButtonTooltips ( ) ;
public class BaseRedisQueueFactory { /** * Destroy method . * @ since 0.6.2.6 */ public void destroy ( ) { } }
try { super . destroy ( ) ; } finally { if ( defaultJedisConnector != null && myOwnRedis ) { try { defaultJedisConnector . destroy ( ) ; } catch ( Exception e ) { LOGGER . warn ( e . getMessage ( ) , e ) ; } finally { defaultJedisConnector = null ; } } }
public class AOStream { /** * Helper method . Unlocks STORE _ NEVER Items synchronously . * Called from withing a synchronized ( this ) block . * @ param unlockList A list of AOValue objects */ private final void unlockNonPersistentMessages ( java . util . ArrayList unlockList ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "unlockNonPersistentMessages" , unlockList ) ; try { int length = unlockList . size ( ) ; for ( int i = 0 ; i < length ; i ++ ) { AOValue value = ( AOValue ) unlockList . get ( i ) ; dem . removeTimeoutEntry ( value ) ; SIMPMessage msgItem = consumerDispatcher . getMessageByValue ( value ) ; // PK67067 We may not find a message in the store for this tick , because // it may have been removed using the SIBQueuePoint MBean if ( msgItem != null ) msgItem . unlockMsg ( msgItem . getLockID ( ) , null , true ) ; } } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AOStream.unlockNonPersistentMessages" , "1:2846:1.80.3.24" , this ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "unlockNonPersistentMessages" ) ;
public class KinesisFirehoseInputDescriptionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( KinesisFirehoseInputDescription kinesisFirehoseInputDescription , ProtocolMarshaller protocolMarshaller ) { } }
if ( kinesisFirehoseInputDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( kinesisFirehoseInputDescription . getResourceARN ( ) , RESOURCEARN_BINDING ) ; protocolMarshaller . marshall ( kinesisFirehoseInputDescription . getRoleARN ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Force { /** * Updatable */ @ Override public void update ( double extrp ) { } }
fhOld = fh ; fvOld = fv ; updateLastForce ( ) ; // Not arrived if ( ! arrivedH ) { updateNotArrivedH ( extrp ) ; } else { fh = fhDest ; } if ( ! arrivedV ) { updateNotArrivedV ( extrp ) ; } else { fv = fvDest ; } fixForce ( ) ;
public class OrchestrationShardingSchemaGroup { /** * Get data source names . * @ param shardingSchemaName sharding schema name * @ return data source names */ public Collection < String > getDataSourceNames ( final String shardingSchemaName ) { } }
return schemaGroup . containsKey ( shardingSchemaName ) ? schemaGroup . get ( shardingSchemaName ) : Collections . < String > emptyList ( ) ;
public class CacheManagerImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . service . cache . ICacheManager # deleteFileDelayed ( java . lang . String ) */ public void deleteFileDelayed ( final String fname ) { } }
_aggregator . getExecutors ( ) . getFileDeleteExecutor ( ) . schedule ( new Runnable ( ) { public void run ( ) { File file = new File ( _directory , fname ) ; try { if ( ! file . delete ( ) ) { if ( log . isLoggable ( Level . WARNING ) ) { log . warning ( MessageFormat . format ( Messages . CacheManagerImpl_8 , new Object [ ] { file . getAbsolutePath ( ) } ) ) ; } } } catch ( Exception e ) { if ( log . isLoggable ( Level . WARNING ) ) { log . log ( Level . WARNING , e . getMessage ( ) , e ) ; } } } } , _aggregator . getOptions ( ) . getDeleteDelay ( ) , TimeUnit . SECONDS ) ;
public class AbstractBuilderProcessor { /** * Returns true if pojos where generated . * @ param builderContext The builder context . * @ param buildables The set of buildables . */ public void generatePojos ( BuilderContext builderContext , Set < TypeDef > buildables ) { } }
Set < TypeDef > additonalBuildables = new HashSet < > ( ) ; Set < TypeDef > additionalTypes = new HashSet < > ( ) ; for ( TypeDef typeDef : buildables ) { try { if ( typeDef . isInterface ( ) || typeDef . isAnnotation ( ) ) { typeDef = ClazzAs . POJO . apply ( typeDef ) ; builderContext . getDefinitionRepository ( ) . register ( typeDef ) ; builderContext . getBuildableRepository ( ) . register ( typeDef ) ; generateFromResources ( typeDef , Constants . DEFAULT_SOURCEFILE_TEMPLATE_LOCATION ) ; additonalBuildables . add ( typeDef ) ; if ( typeDef . hasAttribute ( ADDITIONAL_BUILDABLES ) ) { for ( TypeDef also : typeDef . getAttribute ( ADDITIONAL_BUILDABLES ) ) { builderContext . getDefinitionRepository ( ) . register ( also ) ; builderContext . getBuildableRepository ( ) . register ( also ) ; generateFromResources ( also , Constants . DEFAULT_SOURCEFILE_TEMPLATE_LOCATION ) ; additonalBuildables . add ( also ) ; } } if ( typeDef . hasAttribute ( ADDITIONAL_TYPES ) ) { for ( TypeDef also : typeDef . getAttribute ( ADDITIONAL_TYPES ) ) { builderContext . getDefinitionRepository ( ) . register ( also ) ; generateFromResources ( also , Constants . DEFAULT_SOURCEFILE_TEMPLATE_LOCATION ) ; additionalTypes . add ( also ) ; } } } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } generateBuildables ( builderContext , additonalBuildables ) ;
public class MemoryPersistenceManagerImpl { /** * @ return The executions are ordered in sequence , from most - recent to least - recent . * The container keeps its own order and does not depend on execution id or creation time to order these . */ @ Override public List < JobExecutionEntity > getJobExecutionsFromJobInstanceId ( long jobInstanceId ) throws NoSuchJobInstanceException { } }
JobInstanceEntity instance = getJobInstance ( jobInstanceId ) ; List < JobExecutionEntity > executions = new ArrayList < JobExecutionEntity > ( instance . getJobExecutions ( ) ) ; // sorting could be optimized a bit but is it necessary for this impl ? Collections . sort ( executions , ReverseJobExecutionSequenceForOneInstanceComparator . INSTANCE ) ; return executions ;
public class Gauge { /** * Defines if the LED will be drawn ( if available ) * @ param VISIBLE */ public void setLedVisible ( final boolean VISIBLE ) { } }
if ( null == ledVisible ) { _ledVisible = VISIBLE ; fireUpdateEvent ( VISIBILITY_EVENT ) ; } else { ledVisible . set ( VISIBLE ) ; }
public class ReportDefinition { /** * A list of manifests that you want Amazon Web Services to create for this report . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAdditionalArtifacts ( java . util . Collection ) } or { @ link # withAdditionalArtifacts ( java . util . Collection ) } * if you want to override the existing values . * @ param additionalArtifacts * A list of manifests that you want Amazon Web Services to create for this report . * @ return Returns a reference to this object so that method calls can be chained together . * @ see AdditionalArtifact */ public ReportDefinition withAdditionalArtifacts ( String ... additionalArtifacts ) { } }
if ( this . additionalArtifacts == null ) { setAdditionalArtifacts ( new java . util . ArrayList < String > ( additionalArtifacts . length ) ) ; } for ( String ele : additionalArtifacts ) { this . additionalArtifacts . add ( ele ) ; } return this ;
public class SystemUtil { /** * Parse a string representation of a time duration with an optional suffix of ' s ' , ' ms ' , ' us ' , or ' ns ' to * indicate seconds , milliseconds , microseconds , or nanoseconds respectively . * If the resulting duration is greater than { @ link Long # MAX _ VALUE } then { @ link Long # MAX _ VALUE } is used . * @ param propertyName associated with the duration value . * @ param propertyValue to be parsed . * @ return the long value . * @ throws NumberFormatException if the value is negative or malformed . */ public static long parseDuration ( final String propertyName , final String propertyValue ) { } }
final char lastCharacter = propertyValue . charAt ( propertyValue . length ( ) - 1 ) ; if ( Character . isDigit ( lastCharacter ) ) { return Long . valueOf ( propertyValue ) ; } if ( lastCharacter != 's' && lastCharacter != 'S' ) { throw new NumberFormatException ( propertyName + ": " + propertyValue + " should end with: s, ms, us, or ns." ) ; } final char secondLastCharacter = propertyValue . charAt ( propertyValue . length ( ) - 2 ) ; if ( Character . isDigit ( secondLastCharacter ) ) { final long value = AsciiEncoding . parseLongAscii ( propertyValue , 0 , propertyValue . length ( ) - 1 ) ; return TimeUnit . SECONDS . toNanos ( value ) ; } final long value = AsciiEncoding . parseLongAscii ( propertyValue , 0 , propertyValue . length ( ) - 2 ) ; switch ( secondLastCharacter ) { case 'n' : case 'N' : return value ; case 'u' : case 'U' : return TimeUnit . MICROSECONDS . toNanos ( value ) ; case 'm' : case 'M' : return TimeUnit . MILLISECONDS . toNanos ( value ) ; default : throw new NumberFormatException ( propertyName + ": " + propertyValue + " should end with: s, ms, us, or ns." ) ; }
public class RPC { /** * Get a proxy connection to a remote server * @ param protocol protocol class * @ param clientVersion client version * @ param addr remote address * @ param conf configuration to use * @ param rpcTimeout timeout for each RPC * @ param timeout time in milliseconds before giving up * @ return the proxy * @ throws IOException if the far end through a RemoteException */ public static < T extends VersionedProtocol > ProtocolProxy < T > waitForProtocolProxy ( Class < T > protocol , long clientVersion , InetSocketAddress addr , Configuration conf , long timeout , int rpcTimeout ) throws IOException { } }
long startTime = System . currentTimeMillis ( ) ; UserGroupInformation ugi = null ; try { ugi = UserGroupInformation . login ( conf ) ; } catch ( LoginException le ) { throw new RuntimeException ( "Couldn't login!" ) ; } IOException ioe ; while ( true ) { try { return getProtocolProxy ( protocol , clientVersion , addr , ugi , conf , NetUtils . getDefaultSocketFactory ( conf ) , rpcTimeout ) ; } catch ( ConnectException se ) { // namenode has not been started LOG . info ( "Server at " + addr + " not available yet, Zzzzz..." ) ; ioe = se ; } catch ( SocketTimeoutException te ) { // namenode is busy LOG . info ( "Problem connecting to server: " + addr ) ; ioe = te ; } // check if timed out if ( System . currentTimeMillis ( ) - timeout >= startTime ) { throw ioe ; } // wait for retry try { Thread . sleep ( 1000 ) ; } catch ( InterruptedException ie ) { // IGNORE } }
public class DeviceManufacturerTargeting { /** * Gets the deviceManufacturers value for this DeviceManufacturerTargeting . * @ return deviceManufacturers * Device manufacturers that are being targeted or excluded by * the * { @ link LineItem } . */ public com . google . api . ads . admanager . axis . v201808 . Technology [ ] getDeviceManufacturers ( ) { } }
return deviceManufacturers ;
public class AmazonWorkLinkClient { /** * Provides information about the domain . * @ param describeDomainRequest * @ return Result of the DescribeDomain operation returned by the service . * @ throws UnauthorizedException * You are not authorized to perform this action . * @ throws InternalServerErrorException * The service is temporarily unavailable . * @ throws InvalidRequestException * The request is not valid . * @ throws ResourceNotFoundException * The requested resource was not found . * @ throws TooManyRequestsException * The number of requests exceeds the limit . * @ sample AmazonWorkLink . DescribeDomain * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / worklink - 2018-09-25 / DescribeDomain " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeDomainResult describeDomain ( DescribeDomainRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeDomain ( request ) ;
public class CollationRootElements { /** * Returns the first secondary & tertiary weights for p where index = findPrimary ( p ) + 1. */ private long getFirstSecTerForPrimary ( int index ) { } }
long secTer = elements [ index ] ; if ( ( secTer & SEC_TER_DELTA_FLAG ) == 0 ) { // No sec / ter delta . return Collation . COMMON_SEC_AND_TER_CE ; } secTer &= ~ SEC_TER_DELTA_FLAG ; if ( secTer > Collation . COMMON_SEC_AND_TER_CE ) { // Implied sec / ter . return Collation . COMMON_SEC_AND_TER_CE ; } // Explicit sec / ter below common / common . return secTer ;
public class AnnotationUtils { /** * Verifies if an annotation is present at a class type hierarchy . * @ param < A > The annotation type . * @ param clazz The class . * @ param annotationType The annotation . * @ return Whether the annotation is present at the class hierarchy or not . * @ since 0.3.0 */ public static < A extends Annotation > boolean isAnnotationPresent ( Class < ? > clazz , Class < A > annotationType ) { } }
if ( findAnnotation ( clazz , annotationType ) != null ) return true ; return false ;
public class LibertyTransactionService { /** * Called by DS to set the service reference * @ param ref the reference from DS */ @ Reference ( name = "transactionManager" , service = TransactionManager . class , policy = ReferencePolicy . DYNAMIC ) protected void setTransactionManager ( ServiceReference < TransactionManager > ref ) { } }
this . transactionManager . setReference ( ref ) ;
public class JSONObject { /** * Convert this object into a stream of JSON text , specifying verbosity . * @ param writer The writer which to serialize the JSON text to . * @ throws IOException Thrown on IO errors during serialization . */ public void serialize ( Writer writer , boolean verbose ) throws IOException { } }
Serializer serializer ; // Try to avoid double - buffering or buffering in - memory // writers . Class writerClass = writer . getClass ( ) ; if ( ! StringWriter . class . isAssignableFrom ( writerClass ) && ! CharArrayWriter . class . isAssignableFrom ( writerClass ) && ! BufferedWriter . class . isAssignableFrom ( writerClass ) ) { writer = new BufferedWriter ( writer ) ; } if ( verbose ) { serializer = new SerializerVerbose ( writer ) ; } else { serializer = new Serializer ( writer ) ; } serializer . writeObject ( this ) . flush ( ) ;
public class PreauthorizationService { /** * Authorizes the given amount with the given { @ link Payment } . * < strong > Works only for credit cards . Direct debit not supported . < / strong > * @ param payment * The { @ link Payment } itself ( only creditcard - object ) * @ param amount * Amount ( in cents ) which will be charged . * @ param currency * ISO 4217 formatted currency code . * @ param description * A short description for the preauthorization . * @ return { @ link Transaction } object with the { @ link Preauthorization } as sub object . */ public Preauthorization createWithPayment ( final Payment payment , final Integer amount , final String currency , final String description ) { } }
ValidationUtils . validatesPayment ( payment ) ; ValidationUtils . validatesAmount ( amount ) ; ValidationUtils . validatesCurrency ( currency ) ; ParameterMap < String , String > params = new ParameterMap < String , String > ( ) ; params . add ( "payment" , payment . getId ( ) ) ; params . add ( "amount" , String . valueOf ( amount ) ) ; params . add ( "currency" , currency ) ; params . add ( "source" , String . format ( "%s-%s" , PaymillContext . getProjectName ( ) , PaymillContext . getProjectVersion ( ) ) ) ; if ( StringUtils . isNotBlank ( description ) ) params . add ( "description" , description ) ; return RestfulUtils . create ( PreauthorizationService . PATH , params , Preauthorization . class , super . httpClient ) ;
public class DictTerm { /** * setter for annotType - sets * @ generated * @ param v value to set into the feature */ public void setAnnotType ( String v ) { } }
if ( DictTerm_Type . featOkTst && ( ( DictTerm_Type ) jcasType ) . casFeat_annotType == null ) jcasType . jcas . throwFeatMissing ( "annotType" , "ch.epfl.bbp.uima.types.DictTerm" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( DictTerm_Type ) jcasType ) . casFeatCode_annotType , v ) ;
public class Parseable { /** * is complete . */ public static Parseable newReader ( Reader reader , ConfigParseOptions options ) { } }
return new ParseableReader ( doNotClose ( reader ) , options ) ;
public class StringifierFactory { /** * Register a stringifier for null values . */ protected final Stringifier registerNullStringifier ( final String nullValue ) { } }
Stringifier nullStringifier = new Stringifier ( ) { public String toString ( Object object ) { return nullValue ; } } ; compositeStringifier . setNullStringifier ( nullStringifier ) ; return nullStringifier ;
public class DropDown { /** * ( non - Javadoc ) * @ see * qc . automation . framework . widget . IEditableField # getValue ( java . lang . String ) */ @ Override public String getValue ( ) throws WidgetException { } }
try { return getSelectedOption ( ) ; } catch ( Exception e ) { throw new WidgetException ( "Error while getting dropdown value" , getByLocator ( ) , e ) ; }
public class ServletRESTRequestWithParams { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . rest . handler . RESTRequest # getURI ( ) */ @ Override public String getURI ( ) { } }
ServletRESTRequestImpl ret = castRequest ( ) ; if ( ret != null ) return ret . getURI ( ) ; return null ;
public class HttpClient { /** * Enables auto - redirect support if the passed * { @ link java . util . function . Predicate } matches . * note the passed { @ link HttpClientRequest } and { @ link HttpClientResponse } * should be considered read - only and the implement SHOULD NOT consume or * write the request / response in this predicate . * @ param predicate that returns true to enable auto - redirect support . * @ return a new { @ link HttpClient } */ public final HttpClient followRedirect ( BiPredicate < HttpClientRequest , HttpClientResponse > predicate ) { } }
Objects . requireNonNull ( predicate , "predicate" ) ; return tcpConfiguration ( tcp -> tcp . bootstrap ( b -> HttpClientConfiguration . followRedirectPredicate ( b , predicate ) ) ) ;
public class HttpUtil { /** * 从流中读取内容 < br > * 首先尝试使用charset编码读取内容 ( 如果为空默认UTF - 8 ) , 如果isGetCharsetFromContent为true , 则通过正则在正文中获取编码信息 , 转换为指定编码 ; * @ param in 输入流 * @ param charset 字符集 * @ param isGetCharsetFromContent 是否从返回内容中获得编码信息 * @ return 内容 * @ throws IOException IO异常 */ public static String getString ( InputStream in , Charset charset , boolean isGetCharsetFromContent ) throws IOException { } }
final byte [ ] contentBytes = IoUtil . readBytes ( in ) ; return getString ( contentBytes , charset , isGetCharsetFromContent ) ;
public class CmsContainerPageElementPanel { /** * Puts a highlighting border around the element . < p > */ public void highlightElement ( ) { } }
CmsPositionBean position = CmsPositionBean . getBoundingClientRect ( getElement ( ) ) ; if ( m_highlighting == null ) { m_highlighting = new CmsHighlightingBorder ( position , isNew ( ) || ( CmsContainerpageController . get ( ) . getData ( ) . isModelPage ( ) && isCreateNew ( ) ) ? CmsHighlightingBorder . BorderColor . blue : CmsHighlightingBorder . BorderColor . red ) ; RootPanel . get ( ) . add ( m_highlighting ) ; } else { m_highlighting . setPosition ( position ) ; }
public class BlackLittermanModel { /** * Scaled by tau / weight on views */ protected PrimitiveMatrix getViewVariances ( ) { } }
final int tmpDim = myViews . size ( ) ; final PrimitiveMatrix . DenseReceiver retVal = MATRIX_FACTORY . makeDense ( tmpDim , tmpDim ) ; if ( myConfidence . compareTo ( BigMath . ONE ) == 0 ) { for ( int ij = 0 ; ij < tmpDim ; ij ++ ) { retVal . set ( ij , ij , myViews . get ( ij ) . getReturnVariance ( ) ) ; } } else { final double tmpScale = myConfidence . doubleValue ( ) ; double tmpVar ; for ( int ij = 0 ; ij < tmpDim ; ij ++ ) { tmpVar = myViews . get ( ij ) . getReturnVariance ( ) ; retVal . set ( ij , ij , PrimitiveMath . DIVIDE . invoke ( tmpVar , tmpScale ) ) ; } } return retVal . build ( ) ;
public class Gram { /** * Deactivates a callback handler for a given credential . * @ param cred the credential of the callback handler . * @ return the callback handler that was deactivated . Null , * if no callback handler is associated with the credential */ public static CallbackHandler deactivateCallbackHandler ( GSSCredential cred ) { } }
if ( cred == null ) { return null ; } CallbackHandler handler = ( CallbackHandler ) callbackHandlers . remove ( cred ) ; if ( handler == null ) { return null ; } handler . shutdown ( ) ; return handler ;
public class LogRecordBrowser { /** * continue the list of the records in the process filtered with < code > recFilter < / code > */ private OnePidRecordListImpl restartRecordsInProcess ( final RepositoryPointer after , long max , final IInternalRecordFilter recFilter ) { } }
if ( ! ( after instanceof RepositoryPointerImpl ) ) { throw new IllegalArgumentException ( "Specified location does not belong to this repository." ) ; } return new OnePidRecordListLocationImpl ( ( RepositoryPointerImpl ) after , max , recFilter ) ;
public class SpringLoadedPreProcessor { /** * Cache the Method objects that will be injected . */ private void ensurePreparedForInjection ( ) { } }
if ( ! prepared ) { try { Class < ReflectiveInterceptor > clazz = ReflectiveInterceptor . class ; method_jlcgdfs = clazz . getDeclaredMethod ( "jlClassGetDeclaredFields" , Class . class ) ; method_jlcgdf = clazz . getDeclaredMethod ( "jlClassGetDeclaredField" , Class . class , String . class ) ; method_jlcgf = clazz . getDeclaredMethod ( "jlClassGetField" , Class . class , String . class ) ; method_jlcgdms = clazz . getDeclaredMethod ( "jlClassGetDeclaredMethods" , Class . class ) ; method_jlcgdm = clazz . getDeclaredMethod ( "jlClassGetDeclaredMethod" , Class . class , String . class , EMPTY_CLASS_ARRAY_CLAZZ ) ; method_jlcgm = clazz . getDeclaredMethod ( "jlClassGetMethod" , Class . class , String . class , EMPTY_CLASS_ARRAY_CLAZZ ) ; method_jlcgdc = clazz . getDeclaredMethod ( "jlClassGetDeclaredConstructor" , Class . class , EMPTY_CLASS_ARRAY_CLAZZ ) ; method_jlcgc = clazz . getDeclaredMethod ( "jlClassGetConstructor" , Class . class , EMPTY_CLASS_ARRAY_CLAZZ ) ; method_jlcgmods = clazz . getDeclaredMethod ( "jlClassGetModifiers" , Class . class ) ; method_jlcgms = clazz . getDeclaredMethod ( "jlClassGetMethods" , Class . class ) ; method_jlcgdcs = clazz . getDeclaredMethod ( "jlClassGetDeclaredConstructors" , Class . class ) ; method_jlrfg = clazz . getDeclaredMethod ( "jlrFieldGet" , Field . class , Object . class ) ; method_jlrfgl = clazz . getDeclaredMethod ( "jlrFieldGetLong" , Field . class , Object . class ) ; method_jlrmi = clazz . getDeclaredMethod ( "jlrMethodInvoke" , Method . class , Object . class , Object [ ] . class ) ; method_jloObjectStream_hasInitializerMethod = clazz . getDeclaredMethod ( "jlosHasStaticInitializer" , Class . class ) ; } catch ( NoSuchMethodException nsme ) { // cant happen , a - hahaha throw new Impossible ( nsme ) ; } prepared = true ; }
public class StringValueUtils { /** * Converts the given < code > StringValue < / code > into a lower case variant . * @ param string The string to convert to lower case . */ public static void toLowerCase ( StringValue string ) { } }
final char [ ] chars = string . getCharArray ( ) ; final int len = string . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { chars [ i ] = Character . toLowerCase ( chars [ i ] ) ; }
public class SCheckBox { /** * Set this control ' s value as it was submitted by the HTML post operation . * @ return The value the field was set to . */ public String getSFieldProperty ( String strFieldName ) { } }
String strValue = super . getSFieldProperty ( strFieldName ) ; if ( strValue == null ) if ( this . getParentScreen ( ) != null ) if ( DBConstants . SUBMIT . equalsIgnoreCase ( this . getParentScreen ( ) . getProperty ( DBParams . COMMAND ) ) ) strValue = DBConstants . NO ; // If you submit a checkbox that is off , the param is not passed ( null ) so I need a No . return strValue ;
public class EurekaUtils { /** * check to see if the instanceInfo record is of a server that is deployed within EC2 ( best effort check based * on assumptions of underlying id ) . This check could be for the local server or a remote server . * @ param instanceInfo * @ return true if the record contains an EC2 style " i - * " id */ public static boolean isInEc2 ( InstanceInfo instanceInfo ) { } }
if ( instanceInfo . getDataCenterInfo ( ) instanceof AmazonInfo ) { String instanceId = ( ( AmazonInfo ) instanceInfo . getDataCenterInfo ( ) ) . getId ( ) ; if ( instanceId != null && instanceId . startsWith ( "i-" ) ) { return true ; } } return false ;
public class CProductPersistenceImpl { /** * Removes the c product with the primary key from the database . Also notifies the appropriate model listeners . * @ param primaryKey the primary key of the c product * @ return the c product that was removed * @ throws NoSuchCProductException if a c product with the primary key could not be found */ @ Override public CProduct remove ( Serializable primaryKey ) throws NoSuchCProductException { } }
Session session = null ; try { session = openSession ( ) ; CProduct cProduct = ( CProduct ) session . get ( CProductImpl . class , primaryKey ) ; if ( cProduct == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchCProductException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( cProduct ) ; } catch ( NoSuchCProductException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class Subscription { /** * Specifies how many protections of a given type you can create . * @ param limits * Specifies how many protections of a given type you can create . */ public void setLimits ( java . util . Collection < Limit > limits ) { } }
if ( limits == null ) { this . limits = null ; return ; } this . limits = new java . util . ArrayList < Limit > ( limits ) ;
public class MetadataWriter { /** * Returns the modifiers for a specified field , including internal ones . * All method modifiers are defined in the JVM specification , table 4.4. */ private static int getFieldModifiers ( VariableElement var ) { } }
int modifiers = ElementUtil . fromModifierSet ( var . getModifiers ( ) ) ; if ( ElementUtil . isSynthetic ( var ) ) { modifiers |= ElementUtil . ACC_SYNTHETIC ; } if ( ElementUtil . isEnumConstant ( var ) ) { modifiers |= ElementUtil . ACC_ENUM ; } return modifiers ;
public class DataBucket { /** * { @ inheritDoc } */ @ Override public HashCode secureHash ( ) { } }
final Hasher code = StandardSettings . HASHFUNC . newHasher ( ) . putLong ( mBucketKey ) . putLong ( mLastBucketKey ) ; for ( int i = 0 ; i < mDatas . length ; i ++ ) { if ( mDatas [ i ] != null ) { code . putObject ( mDatas [ i ] , mDatas [ i ] . getFunnel ( ) ) ; } } return code . hash ( ) ;
public class NetworkClient { /** * Adds a peering to the specified network . * < p > Sample code : * < pre > < code > * try ( NetworkClient networkClient = NetworkClient . create ( ) ) { * ProjectGlobalNetworkName network = ProjectGlobalNetworkName . of ( " [ PROJECT ] " , " [ NETWORK ] " ) ; * NetworksAddPeeringRequest networksAddPeeringRequestResource = NetworksAddPeeringRequest . newBuilder ( ) . build ( ) ; * Operation response = networkClient . addPeeringNetwork ( network , networksAddPeeringRequestResource ) ; * < / code > < / pre > * @ param network Name of the network resource to add peering to . * @ param networksAddPeeringRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation addPeeringNetwork ( ProjectGlobalNetworkName network , NetworksAddPeeringRequest networksAddPeeringRequestResource ) { } }
AddPeeringNetworkHttpRequest request = AddPeeringNetworkHttpRequest . newBuilder ( ) . setNetwork ( network == null ? null : network . toString ( ) ) . setNetworksAddPeeringRequestResource ( networksAddPeeringRequestResource ) . build ( ) ; return addPeeringNetwork ( request ) ;
public class AbstractCloud { /** * Gets first { @ link DockerSlaveTemplate } that has the matching { @ link Label } . */ @ CheckForNull public DockerSlaveTemplate getTemplate ( Label label ) { } }
List < DockerSlaveTemplate > labelTemplates = getTemplates ( label ) ; if ( ! labelTemplates . isEmpty ( ) ) { return labelTemplates . get ( 0 ) ; } return null ;
public class BatchKernelImpl { /** * @ return a new JobInstance for the given appName and JSL file . * Note : Inline JSL takes precedence over JSL within . war */ @ Override public WSJobInstance createJobInstance ( String appName , String jobXMLName , String submitter , String jsl , String correlationId ) { } }
JobInstanceEntity retMe = null ; retMe = getPersistenceManagerService ( ) . createJobInstance ( appName , jobXMLName , jsl , submitter , new Date ( ) ) ; publishEvent ( retMe , BatchEventsPublisher . TOPIC_INSTANCE_SUBMITTED , correlationId ) ; return retMe ;
public class RESTClient { /** * Old REST client uses new REST service */ public void useNewRESTServiceWithOldClient ( ) throws Exception { } }
List < Object > providers = createJAXRSProviders ( ) ; com . example . customerservice . CustomerService customerService = JAXRSClientFactory . createFromModel ( "http://localhost:" + port + "/examples/direct/new-rest" , com . example . customerservice . CustomerService . class , "classpath:/model/CustomerService-jaxrs.xml" , providers , null ) ; // The outgoing old Customer data needs to be transformed for // the new service to understand it and the response from the new service // needs to be transformed for this old client to understand it . ClientConfiguration config = WebClient . getConfig ( customerService ) ; addTransformInterceptors ( config . getInInterceptors ( ) , config . getOutInterceptors ( ) , false ) ; System . out . println ( "Using new RESTful CustomerService with old Client" ) ; customer . v1 . Customer customer = createOldCustomer ( "Smith Old to New REST" ) ; customerService . updateCustomer ( customer ) ; customer = customerService . getCustomerByName ( "Smith Old to New REST" ) ; printOldCustomerDetails ( customer ) ;
public class TimeNode { /** * We return same reference value if matches or next one if does not match . * Then we start applying shifts . * This way we ensure same value is returned if no shift is requested . * @ param reference - reference value * @ param shiftsToApply - shifts to apply * @ return NearestValue instance , never null . Holds information on nearest ( forward ) value and shifts performed . */ @ VisibleForTesting NearestValue getNearestForwardValue ( final int reference , int shiftsToApply ) { } }
final List < Integer > temporaryValues = new ArrayList < > ( this . values ) ; int index = 0 ; boolean foundGreater = false ; final AtomicInteger shift = new AtomicInteger ( 0 ) ; if ( ! temporaryValues . contains ( reference ) ) { for ( final Integer value : temporaryValues ) { if ( value > reference ) { index = temporaryValues . indexOf ( value ) ; shiftsToApply -- ; // we just moved a position ! foundGreater = true ; break ; } } if ( ! foundGreater ) { shift . incrementAndGet ( ) ; } } else { index = temporaryValues . indexOf ( reference ) ; } int value = temporaryValues . get ( index ) ; for ( int j = 0 ; j < shiftsToApply ; j ++ ) { value = getValueFromList ( temporaryValues , index + 1 , shift ) ; index = temporaryValues . indexOf ( value ) ; } return new NearestValue ( value , shift . get ( ) ) ;
public class A_CmsAjaxGallery { /** * Initializes the gallery dialog before redirecting . < p > * @ param wp the workplace object */ public static void initGallery ( CmsDialog wp ) { } }
// 1 . get " gallerytypename " by reading the folderpath String galleryTypeName = null ; if ( wp . useNewStyle ( ) ) { galleryTypeName = CmsResource . getName ( CmsResource . getFolderPath ( wp . getAdminTool ( ) . getHandler ( ) . getLink ( ) ) ) ; } else { galleryTypeName = CmsResource . getName ( CmsResource . getFolderPath ( wp . getJsp ( ) . getRequestContext ( ) . getUri ( ) ) ) ; } if ( galleryTypeName . endsWith ( "/" ) ) { galleryTypeName = galleryTypeName . substring ( 0 , galleryTypeName . length ( ) - 1 ) ; } if ( ! galleryTypeName . equals ( "commons" ) ) { // 2 . set in user settings wp . getSettings ( ) . setGalleryType ( galleryTypeName ) ; }
public class ContractJavaFileManager { /** * Sets the list of paths associated with { @ code location } . * @ param location the affected location * @ param path a list of paths , or { @ code null } to reset to default */ @ Requires ( "location != null" ) public void setLocation ( Location location , List < ? extends File > path ) throws IOException { } }
fileManager . setLocation ( location , path ) ;
public class PowerMockito { /** * Enable static mocking for all methods of a class . * @ param type the class to enable static mocking */ public static synchronized void mockStatic ( Class < ? > type , Class < ? > ... types ) { } }
DefaultMockCreator . mock ( type , true , false , null , null , ( Method [ ] ) null ) ; if ( types != null && types . length > 0 ) { for ( Class < ? > aClass : types ) { DefaultMockCreator . mock ( aClass , true , false , null , null , ( Method [ ] ) null ) ; } }
public class EasyGcm { /** * Registers the application defined by a context activity to GCM in case the registration * has not been done already . The method can be called anytime , but typically at app startup . The * registration itself is guaranteed to only run once . * @ param context Activity belonging to the app being registered */ private void onCreate ( Context context ) { } }
// The check method fails if : no network connection / app already registered / GooglePlayServices unavailable if ( GcmUtils . checkCanAndShouldRegister ( context ) ) { // Start a background service to register in a background thread context . startService ( GcmRegistrationService . createGcmRegistrationIntent ( context ) ) ; }
public class AddKeywordsUsingIncrementalBatchJob { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ param adGroupId the ID of the ad group where keywords will be added . * @ throws BatchJobException if uploading operations or downloading results failed . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . * @ throws InterruptedException if the thread was interrupted while sleeping between retries . * @ throws TimeoutException if the job did not complete after job status was polled { @ link * # MAX _ POLL _ ATTEMPTS } times . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session , Long adGroupId ) throws RemoteException , BatchJobException , InterruptedException , TimeoutException { } }
// Get the BatchJobService . BatchJobServiceInterface batchJobService = adWordsServices . get ( session , BatchJobServiceInterface . class ) ; BatchJobOperation addOp = new BatchJobOperation ( ) ; addOp . setOperator ( Operator . ADD ) ; addOp . setOperand ( new BatchJob ( ) ) ; BatchJob batchJob = batchJobService . mutate ( new BatchJobOperation [ ] { addOp } ) . getValue ( 0 ) ; System . out . printf ( "Created BatchJob with ID %d, status '%s' and upload URL %s.%n" , batchJob . getId ( ) , batchJob . getStatus ( ) , batchJob . getUploadUrl ( ) . getUrl ( ) ) ; // Create a BatchJobHelper for uploading operations . BatchJobHelper batchJobHelper = adWordsServices . getUtility ( session , BatchJobHelper . class ) ; BatchJobUploadStatus batchJobUploadStatus = new BatchJobUploadStatus ( 0 , URI . create ( batchJob . getUploadUrl ( ) . getUrl ( ) ) ) ; List < AdGroupCriterionOperation > operations = new ArrayList < > ( ) ; // Create AdGroupCriterionOperations to add keywords , and upload every 10 operations // incrementally . for ( int i = 0 ; i < NUMBER_OF_KEYWORDS_TO_ADD ; i ++ ) { // Create Keyword . String text = String . format ( "mars%d" , i ) ; // Make 10 % of keywords invalid to demonstrate error handling . if ( i % 10 == 0 ) { text = text + "!!!" ; } Keyword keyword = new Keyword ( ) ; keyword . setText ( text ) ; keyword . setMatchType ( KeywordMatchType . BROAD ) ; // Create BiddableAdGroupCriterion . BiddableAdGroupCriterion bagc = new BiddableAdGroupCriterion ( ) ; bagc . setAdGroupId ( adGroupId ) ; bagc . setCriterion ( keyword ) ; // Create AdGroupCriterionOperation . AdGroupCriterionOperation agco = new AdGroupCriterionOperation ( ) ; agco . setOperand ( bagc ) ; agco . setOperator ( Operator . ADD ) ; // Add to the list of operations . operations . add ( agco ) ; // If the current list of operations has reached KEYWORDS _ PER _ UPLOAD or this is the last // operation , upload the current list of operations . boolean isLastOperation = i == NUMBER_OF_KEYWORDS_TO_ADD - 1 ; if ( operations . size ( ) == KEYWORDS_PER_UPLOAD || isLastOperation ) { BatchJobUploadResponse uploadResponse = batchJobHelper . uploadIncrementalBatchJobOperations ( operations , isLastOperation , batchJobUploadStatus ) ; System . out . printf ( "Uploaded %d operations for batch job with ID %d.%n" , operations . size ( ) , batchJob . getId ( ) ) ; // Set the batch job upload status and clear the operations list in preparation for the // next upload . batchJobUploadStatus = uploadResponse . getBatchJobUploadStatus ( ) ; operations . clear ( ) ; } } // Poll for completion of the batch job using an exponential back off . int pollAttempts = 0 ; boolean isPending ; boolean wasCancelRequested = false ; Selector selector = new SelectorBuilder ( ) . fields ( BatchJobField . Id , BatchJobField . Status , BatchJobField . DownloadUrl , BatchJobField . ProcessingErrors , BatchJobField . ProgressStats ) . equalsId ( batchJob . getId ( ) ) . build ( ) ; do { long sleepSeconds = ( long ) Math . scalb ( 30 , pollAttempts ) ; System . out . printf ( "Sleeping %d seconds...%n" , sleepSeconds ) ; Thread . sleep ( sleepSeconds * 1000 ) ; batchJob = batchJobService . get ( selector ) . getEntries ( 0 ) ; System . out . printf ( "Batch job ID %d has status '%s'.%n" , batchJob . getId ( ) , batchJob . getStatus ( ) ) ; pollAttempts ++ ; isPending = PENDING_STATUSES . contains ( batchJob . getStatus ( ) ) ; // Optional : Cancel the job if it has not completed after polling MAX _ POLL _ ATTEMPTS // times . if ( isPending && ! wasCancelRequested && pollAttempts == MAX_POLL_ATTEMPTS ) { batchJob . setStatus ( BatchJobStatus . CANCELING ) ; BatchJobOperation batchJobSetOperation = new BatchJobOperation ( ) ; batchJobSetOperation . setOperand ( batchJob ) ; batchJobSetOperation . setOperator ( Operator . SET ) ; // Only request cancellation once per job . wasCancelRequested = true ; try { batchJob = batchJobService . mutate ( new BatchJobOperation [ ] { batchJobSetOperation } ) . getValue ( 0 ) ; System . out . printf ( "Requested cancellation of batch job with ID %d.%n" , batchJob . getId ( ) ) ; } catch ( ApiException e ) { if ( e . getErrors ( ) != null && e . getErrors ( ) . length > 0 && e . getErrors ( 0 ) instanceof BatchJobError ) { BatchJobError batchJobError = ( BatchJobError ) e . getErrors ( 0 ) ; if ( BatchJobErrorReason . INVALID_STATE_CHANGE . equals ( batchJobError . getReason ( ) ) ) { System . out . printf ( "Attempt to cancel batch job with ID %d was rejected because the job already " + "completed or was canceled." , batchJob . getId ( ) ) ; continue ; } } throw e ; } finally { // Reset the poll attempt counter to wait for cancellation . pollAttempts = 0 ; } } } while ( isPending && pollAttempts < MAX_POLL_ATTEMPTS ) ; if ( isPending ) { throw new TimeoutException ( "Job is still in pending state after polling " + MAX_POLL_ATTEMPTS + " times." ) ; } if ( batchJob . getProcessingErrors ( ) != null ) { int errorIndex = 0 ; for ( BatchJobProcessingError processingError : batchJob . getProcessingErrors ( ) ) { System . out . printf ( " Processing error [%d]: errorType=%s, trigger=%s, errorString=%s, fieldPath=%s" + ", reason=%s%n" , errorIndex ++ , processingError . getApiErrorType ( ) , processingError . getTrigger ( ) , processingError . getErrorString ( ) , processingError . getFieldPath ( ) , processingError . getReason ( ) ) ; } } else { System . out . println ( "No processing errors found." ) ; } if ( batchJob . getDownloadUrl ( ) != null && batchJob . getDownloadUrl ( ) . getUrl ( ) != null ) { BatchJobMutateResponse mutateResponse = batchJobHelper . downloadBatchJobMutateResponse ( batchJob . getDownloadUrl ( ) . getUrl ( ) ) ; System . out . printf ( "Downloaded results from %s:%n" , batchJob . getDownloadUrl ( ) . getUrl ( ) ) ; for ( MutateResult mutateResult : mutateResponse . getMutateResults ( ) ) { String outcome = mutateResult . getErrorList ( ) == null ? "SUCCESS" : "FAILURE" ; System . out . printf ( " Operation [%d] - %s%n" , mutateResult . getIndex ( ) , outcome ) ; } } else { System . out . println ( "No results available for download." ) ; }
public class TaskSchedulerFactory { /** * Gets an instance of the { @ link TaskScheduler } with the specified type and ensures that it is started . If * the type is unknown an instance of the default { @ link TaskScheduler } will be returned . * @ param type the type of the { @ link TaskScheduler } * @ param name the name associated threads created by the { @ link TaskScheduler } * @ param < K > the type of the key for the { @ link ScheduledTask } * @ param < T > the type of the { @ link ScheduledTask } * @ return an instance of { @ link TaskScheduler } */ public static < K , T extends ScheduledTask < K > > TaskScheduler < K , T > get ( String type , Optional < String > name ) { } }
TaskSchedulerType taskSchedulerType = TaskSchedulerType . parse ( type ) ; return get ( taskSchedulerType , name ) ;
public class Director { /** * Installs the features found in the inputed featureIds collection * @ param featureIds the feature ids * @ param fromDir where the features are located * @ param toExtension location of a product extension * @ param acceptLicense if license is accepted * @ param offlineOnly if features should be installed from local source only * @ throws InstallException */ public void installFeature ( Collection < String > featureIds , File fromDir , String toExtension , boolean acceptLicense , boolean offlineOnly ) throws InstallException { } }
// fireProgressEvent ( InstallProgressEvent . CHECK , 1 , Messages . INSTALL _ KERNEL _ MESSAGES . getLogMessage ( " STATE _ CHECKING " ) ) ; this . installAssets = new ArrayList < List < InstallAsset > > ( ) ; ArrayList < InstallAsset > installAssets = new ArrayList < InstallAsset > ( ) ; ArrayList < String > unresolvedFeatures = new ArrayList < String > ( ) ; Collection < ESAAsset > autoFeatures = getResolveDirector ( ) . getAutoFeature ( fromDir , toExtension ) ; getResolveDirector ( ) . resolve ( featureIds , fromDir , toExtension , offlineOnly , installAssets , unresolvedFeatures ) ; if ( ! offlineOnly && ! unresolvedFeatures . isEmpty ( ) ) { log ( Level . FINEST , "installFeature() determined unresolved features: " + unresolvedFeatures . toString ( ) + " from " + fromDir . getAbsolutePath ( ) ) ; installFeatures ( unresolvedFeatures , toExtension , acceptLicense , null , null , 5 ) ; } if ( ! installAssets . isEmpty ( ) ) { getResolveDirector ( ) . resolveAutoFeatures ( autoFeatures , installAssets ) ; this . installAssets . add ( installAssets ) ; } if ( this . installAssets . isEmpty ( ) ) { throw ExceptionUtils . createByKey ( InstallException . ALREADY_EXISTS , "ALREADY_INSTALLED" , featureIds . toString ( ) ) ; }
public class JaasSubject { /** * A convenience method , calls * < code > JaasSubject . getJaasSubject ( ) . runAs ( ) < / code > . */ public static Object doAs ( Subject subject , PrivilegedAction action ) { } }
return JaasSubject . getJaasSubject ( ) . runAs ( subject , action ) ;
public class RegisterWebAppVisitorWC { /** * Creates a default context that will be used for all following * registrations , sets the context params and registers a resource for root * of war . * @ throws NullArgumentException if web app is null * @ see WebAppVisitor # visit ( org . ops4j . pax . web . extender . war . internal . model . WebApp ) */ public void visit ( final WebApp webApp ) { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "visiting webapp: {}" , webApp ) ; } NullArgumentException . validateNotNull ( webApp , "Web app" ) ; bundleClassLoader = new BundleClassLoader ( webApp . getBundle ( ) ) ; Set < Bundle > wiredBundles = ClassPathUtil . getBundlesInClassSpace ( webApp . getBundle ( ) , new LinkedHashSet < > ( ) ) ; ArrayList < Bundle > bundles = new ArrayList < > ( ) ; bundles . add ( webApp . getBundle ( ) ) ; bundles . addAll ( wiredBundles ) ; bundleClassLoader = new ResourceDelegatingBundleClassLoader ( bundles ) ; httpContext = new WebAppWebContainerContext ( webContainer . createDefaultHttpContext ( ) , webApp . getRootPath ( ) , webApp . getBundle ( ) , webApp . getMimeMappings ( ) ) ; webApp . setHttpContext ( httpContext ) ; // CHECKSTYLE : OFF try { webContainer . setContextParam ( RegisterWebAppVisitorHS . convertInitParams ( webApp . getContextParams ( ) ) , httpContext ) ; } catch ( Exception ignore ) { LOG . error ( REGISTRATION_EXCEPTION_SKIPPING , ignore ) ; } // CHECKSTYLE : ON // set login Config PAXWEB - 210 if ( webApp . getLoginConfigs ( ) != null ) { for ( WebAppLoginConfig loginConfig : webApp . getLoginConfigs ( ) ) { visit ( loginConfig ) ; // TODO : what about more than one login // config ? shouldn ' t it be just one ? } } // CHECKSTYLE : OFF // set session timeout if ( webApp . getSessionTimeout ( ) != null ) { try { webContainer . setSessionTimeout ( Integer . parseInt ( webApp . getSessionTimeout ( ) ) , httpContext ) ; } catch ( Exception ignore ) { LOG . error ( REGISTRATION_EXCEPTION_SKIPPING , ignore ) ; } } // CHECKSTYLE : ON WebAppCookieConfig scc = webApp . getSessionCookieConfig ( ) ; if ( scc != null ) { webContainer . setSessionCookieConfig ( scc . getDomain ( ) , scc . getName ( ) , scc . getHttpOnly ( ) , scc . getSecure ( ) , scc . getPath ( ) , scc . getMaxAge ( ) , httpContext ) ; } for ( WebAppServletContainerInitializer servletContainerInitializer : webApp . getServletContainerInitializers ( ) ) { webContainer . registerServletContainerInitializer ( servletContainerInitializer . getServletContainerInitializer ( ) , servletContainerInitializer . getClasses ( ) , httpContext ) ; } ServletContainerInitializer initializer = dependencyHolder . getServletContainerInitializer ( ) ; if ( initializer != null ) { webContainer . registerServletContainerInitializer ( initializer , null , httpContext ) ; } webContainer . setConnectorsAndVirtualHosts ( webApp . getConnectorList ( ) , webApp . getVirtualHostList ( ) , httpContext ) ; if ( webApp . getJettyWebXmlURL ( ) != null ) { webContainer . registerJettyWebXml ( webApp . getJettyWebXmlURL ( ) , httpContext ) ; } LOG . debug ( "webcontainer begin!" ) ; webContainer . begin ( httpContext ) ; // CHECKSTYLE : OFF LOG . debug ( "registering welcome files" ) ; // register welcome files try { final String [ ] welcomeFiles = webApp . getWelcomeFiles ( ) ; if ( welcomeFiles != null && welcomeFiles . length > 0 ) { webContainer . registerWelcomeFiles ( welcomeFiles , true , // redirect httpContext ) ; } } catch ( Exception ignore ) { LOG . error ( REGISTRATION_EXCEPTION_SKIPPING , ignore ) ; } LOG . debug ( "registering default resources" ) ; // register resource jspServlet try { webContainer . registerResources ( "/" , "default" , httpContext ) ; } catch ( Exception ignore ) { LOG . error ( REGISTRATION_EXCEPTION_SKIPPING , ignore ) ; } LOG . debug ( "registering jsps" ) ; // register JSP support try { webContainer . registerJsps ( // Fix for PAXWEB - 208 new String [ ] { "*.jsp" , "*.jspx" , "*.jspf" , "*.xsp" , "*.JSP" , "*.JSPX" , "*.JSPF" , "*.XSP" } , httpContext ) ; } catch ( UnsupportedOperationException ignore ) { LOG . warn ( ignore . getMessage ( ) ) ; } catch ( Exception ignore ) { LOG . error ( REGISTRATION_EXCEPTION_SKIPPING , ignore ) ; } // CHECKSTYLE : ON WebAppJspConfig jspConfigDescriptor = webApp . getJspConfigDescriptor ( ) ; if ( jspConfigDescriptor != null ) { for ( WebAppTagLib webAppTagLib : jspConfigDescriptor . getTagLibConfigs ( ) ) { webContainer . registerJspConfigTagLibs ( webAppTagLib . getTagLibLocation ( ) , webAppTagLib . getTagLibUri ( ) , httpContext ) ; } for ( WebAppJspPropertyGroup webAppJspPropertyGroup : jspConfigDescriptor . getJspPropertyGroups ( ) ) { Boolean elIgnored = webAppJspPropertyGroup . getElIgnored ( ) ; List < String > includeCodes = webAppJspPropertyGroup . getIncludeCodes ( ) ; List < String > includePreludes = webAppJspPropertyGroup . getIncludePreludes ( ) ; Boolean isXml = webAppJspPropertyGroup . getIsXml ( ) ; Boolean scriptingInvalid = webAppJspPropertyGroup . getScriptingInvalid ( ) ; List < String > urlPatterns = webAppJspPropertyGroup . getUrlPatterns ( ) ; webContainer . registerJspConfigPropertyGroup ( includeCodes , includePreludes , urlPatterns , elIgnored , scriptingInvalid , isXml , httpContext ) ; } }
public class FilterProcessor { /** * This method determines whether the supplied endpoint should be * excluded . * @ param endpoint The endpoint to check * @ return Whether the supplied endpoint should be excluded */ public boolean isExcluded ( String endpoint ) { } }
for ( int i = 0 ; i < exclusions . size ( ) ; i ++ ) { if ( exclusions . get ( i ) . test ( endpoint ) ) { return true ; } } return false ;
public class DepTreeNode { /** * Method called when this object is de - serialized * @ param in The { @ link ObjectInputStream } to read from * @ throws IOException * @ throws ClassNotFoundException */ private void readObject ( ObjectInputStream in ) throws IOException , ClassNotFoundException { } }
// Call the default implementation to de - serialize our object in . defaultReadObject ( ) ; parent = new WeakReference < DepTreeNode > ( null ) ; // restore parent reference on all children if ( children != null ) { for ( Entry < String , DepTreeNode > entry : children . entrySet ( ) ) { entry . getValue ( ) . setParent ( this ) ; } }
public class Continuation { /** * Do not use - - for internal use only . */ public void reset ( ) { } }
firstPointer = null ; nextLoadPointer = null ; nextUnloadPointer = null ; firstCutpointPointer = null ; mode = MODE_NORMAL ;
public class OpenBitSet { /** * returns 1 if the bit is set , 0 if not . * The index should be less than the OpenBitSet size */ public int getBit ( int index ) { } }
assert index >= 0 && index < numBits ; int i = index >> 6 ; // div 64 int bit = index & 0x3f ; // mod 64 return ( ( int ) ( bits [ i ] >>> bit ) ) & 0x01 ;
public class SQSSession { /** * Creates a < code > QueueReceiver < / code > for the specified queue . Does not * support messageSelector . It will drop anything in messageSelector . * @ param queue * a queue destination * @ param messageSelector * @ return new message receiver * @ throws JMSException * If session is closed */ @ Override public QueueReceiver createReceiver ( Queue queue , String messageSelector ) throws JMSException { } }
return createReceiver ( queue ) ;
public class MapperImplSourceGenerator { /** * Apprends the constructor of the MapperImpl class for a dedicated class of * { @ link fr . javatronic . damapping . processor . model . InstantiationType # CONSTRUCTOR } which constructor has at least one * parameter . */ private void appendConstructor ( DAClassWriter < DAFileWriter > classWriter , DASourceClass sourceClass , DAMethod dedicatedClassConstructor ) throws IOException { } }
if ( dedicatedClassConstructor . getParameters ( ) . isEmpty ( ) ) { return ; } // constructor with the same parameters as the source class constructor DAConstructorWriter < ? > constructorWriter = classWriter . newConstructor ( ) . withAnnotations ( computeConstructorAnnotations ( sourceClass ) ) . withModifiers ( DAModifier . PUBLIC ) . withParams ( dedicatedClassConstructor . getParameters ( ) ) . start ( ) ; constructorWriter . newStatement ( ) . start ( ) . append ( "this." ) . append ( DEDICATED_CLASS_INSTANCE_PROPERTY_NAME ) . append ( " = " ) . append ( "new " ) . append ( sourceClass . getType ( ) . getSimpleName ( ) ) . appendParamValues ( dedicatedClassConstructor . getParameters ( ) ) . end ( ) ; constructorWriter . end ( ) ;
public class Converter { /** * TODO ( kak ) : Make this method final */ public Converter < B , A > reverse ( ) { } }
Converter < B , A > result = reverse ; return ( result == null ) ? reverse = new ReverseConverter < A , B > ( this ) : result ;
public class MetaClassHelper { /** * Converts a String into a standard property name . * @ param prop the original name * @ return the converted name */ public static String convertPropertyName ( String prop ) { } }
if ( Character . isDigit ( prop . charAt ( 0 ) ) ) { return prop ; } return java . beans . Introspector . decapitalize ( prop ) ;
public class CPFriendlyURLEntryPersistenceImpl { /** * Returns the last cp friendly url entry in the ordered set where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; and main = & # 63 ; . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param main the main * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp friendly url entry * @ throws NoSuchCPFriendlyURLEntryException if a matching cp friendly url entry could not be found */ @ Override public CPFriendlyURLEntry findByG_C_C_M_Last ( long groupId , long classNameId , long classPK , boolean main , OrderByComparator < CPFriendlyURLEntry > orderByComparator ) throws NoSuchCPFriendlyURLEntryException { } }
CPFriendlyURLEntry cpFriendlyURLEntry = fetchByG_C_C_M_Last ( groupId , classNameId , classPK , main , orderByComparator ) ; if ( cpFriendlyURLEntry != null ) { return cpFriendlyURLEntry ; } StringBundler msg = new StringBundler ( 10 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", classNameId=" ) ; msg . append ( classNameId ) ; msg . append ( ", classPK=" ) ; msg . append ( classPK ) ; msg . append ( ", main=" ) ; msg . append ( main ) ; msg . append ( "}" ) ; throw new NoSuchCPFriendlyURLEntryException ( msg . toString ( ) ) ;
public class ContentElement { /** * Flush the tables . * @ param util an XML util * @ param writer destination * @ throws IOException if the tables were not flushed */ public void flushTables ( final XMLUtil util , final ZipUTF8Writer writer ) throws IOException { } }
this . ensureContentBegin ( util , writer ) ; final int lastTableIndex = this . tables . size ( ) - 1 ; if ( lastTableIndex < 0 ) return ; int tableIndex = this . flushPosition . getTableIndex ( ) ; Table table = this . tables . get ( tableIndex ) ; table . flushRemainingRowsFrom ( util , writer , this . flushPosition . getLastRowIndex ( ) + 1 ) ; tableIndex ++ ; while ( tableIndex <= lastTableIndex ) { table = this . tables . get ( tableIndex ) ; table . appendXMLToContentEntry ( util , writer ) ; tableIndex ++ ; }
public class DkimAttributes { /** * A set of unique strings that you use to create a set of CNAME records that you add to the DNS configuration for * your domain . When Amazon Pinpoint detects these records in the DNS configuration for your domain , the DKIM * authentication process is complete . Amazon Pinpoint usually detects these records within about 72 hours of adding * them to the DNS configuration for your domain . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTokens ( java . util . Collection ) } or { @ link # withTokens ( java . util . Collection ) } if you want to override the * existing values . * @ param tokens * A set of unique strings that you use to create a set of CNAME records that you add to the DNS * configuration for your domain . When Amazon Pinpoint detects these records in the DNS configuration for * your domain , the DKIM authentication process is complete . Amazon Pinpoint usually detects these records * within about 72 hours of adding them to the DNS configuration for your domain . * @ return Returns a reference to this object so that method calls can be chained together . */ public DkimAttributes withTokens ( String ... tokens ) { } }
if ( this . tokens == null ) { setTokens ( new java . util . ArrayList < String > ( tokens . length ) ) ; } for ( String ele : tokens ) { this . tokens . add ( ele ) ; } return this ;
public class DataSetProvider { /** * Private methods */ private Format inferFormat ( String dataFileName ) { } }
Format format = Format . inferFromFile ( dataFileName ) ; if ( Format . UNSUPPORTED . equals ( format ) ) { throw new UnsupportedDataFormatException ( "File " + dataFileName + " is not supported as data set format." ) ; } return format ;
public class SimpleChannelPool { /** * Adds the channel back to the pool only if the channel is healthy . * @ param channel the channel to put back to the pool * @ param promise offer operation promise . * @ param future the future that contains information fif channel is healthy or not . * @ throws Exception in case when failed to notify handler about release operation . */ private void releaseAndOfferIfHealthy ( Channel channel , Promise < Void > promise , Future < Boolean > future ) throws Exception { } }
if ( future . getNow ( ) ) { // channel turns out to be healthy , offering and releasing it . releaseAndOffer ( channel , promise ) ; } else { // channel not healthy , just releasing it . handler . channelReleased ( channel ) ; promise . setSuccess ( null ) ; }
public class DefaultPDUReader { /** * / * ( non - Javadoc ) * @ see org . jsmpp . PDUReader # readPDUHeader ( java . io . DataInputStream ) */ public Command readPDUHeader ( DataInputStream in ) throws InvalidCommandLengthException , IOException { } }
Command header = new Command ( ) ; header . setCommandLength ( in . readInt ( ) ) ; if ( header . getCommandLength ( ) < 16 ) { // command length too short , read the left dump anyway byte [ ] dump = new byte [ header . getCommandLength ( ) ] ; if ( header . getCommandLength ( ) >= 4 ) { in . read ( dump , 4 , header . getCommandLength ( ) - 4 ) ; } throw new InvalidCommandLengthException ( "Command length " + header . getCommandLength ( ) + " is too short" ) ; } header . setCommandId ( in . readInt ( ) ) ; header . setCommandStatus ( in . readInt ( ) ) ; header . setSequenceNumber ( in . readInt ( ) ) ; return header ;
public class ProxyBranchImpl { /** * Stop the C Timer . */ public void cancelTimer ( ) { } }
synchronized ( cTimerLock ) { if ( proxyTimeoutTask != null && proxyBranchTimerStarted ) { proxyTimeoutTask . cancel ( ) ; proxyTimeoutTask = null ; proxyBranchTimerStarted = false ; } }
public class ContextTools { /** * Get a plain text context that uses { @ code ^ } characters in a new line as a marker of the * given string region . Ignores { @ link # setEscapeHtml ( boolean ) } . * @ since 2.3 */ public String getPlainTextContext ( int fromPos , int toPos , String contents ) { } }
String text = contents . replace ( '\n' , ' ' ) . replace ( '\r' , ' ' ) . replace ( '\t' , ' ' ) ; // calculate context region : int startContent = fromPos - contextSize ; String prefix = "..." ; String postfix = "..." ; String markerPrefix = " " ; if ( startContent < 0 ) { prefix = "" ; markerPrefix = "" ; startContent = 0 ; } int endContent = toPos + contextSize ; if ( endContent > text . length ( ) ) { postfix = "" ; endContent = text . length ( ) ; } StringBuilder marker = getMarker ( fromPos , toPos , text . length ( ) + prefix . length ( ) ) ; // now build context string plus marker : return prefix + text . substring ( startContent , endContent ) + postfix + '\n' + markerPrefix + marker . substring ( startContent , endContent ) ;
public class Buffer { /** * Advances the end and returns the character at this positio . * @ return character or Scanner . EOF */ public int read ( ) throws IOException { } }
if ( end == endFilled ) { switch ( pages . read ( endPageIdx , endFilled ) ) { case - 1 : eof = true ; return Scanner . EOF ; case 0 : endFilled = pages . getFilled ( endPageIdx ) ; break ; case 1 : endPageIdx ++ ; end = 0 ; endPage = pages . get ( endPageIdx ) ; endFilled = pages . getFilled ( endPageIdx ) ; break ; default : throw new RuntimeException ( ) ; } } return endPage [ end ++ ] ;
public class CommonOps_DDF3 { /** * Performs an element by element scalar multiplication . < br > * < br > * b < sub > i < / sub > = & alpha ; * a < sub > i < / sub > * @ param alpha the amount each element is multiplied by . * @ param a The vector that is to be scaled . Not modified . * @ param b Where the scaled matrix is stored . Modified . */ public static void scale ( double alpha , DMatrix3 a , DMatrix3 b ) { } }
b . a1 = a . a1 * alpha ; b . a2 = a . a2 * alpha ; b . a3 = a . a3 * alpha ;
public class Assignment { /** * get workers for every supervisorId ( node ) * @ param supervisorId supervisor * @ return Map [ Integer , WorkerSlot ] */ public Map < Integer , ResourceWorkerSlot > getTaskToNodePortbyNode ( String supervisorId ) { } }
Map < Integer , ResourceWorkerSlot > result = new HashMap < > ( ) ; for ( ResourceWorkerSlot worker : workers ) { if ( worker . getNodeId ( ) . equals ( supervisorId ) ) { result . put ( worker . getPort ( ) , worker ) ; } } return result ;
public class LspGetq { /** * lsp _ stability - check stability of lsp coefficients */ public static void lsp_stability ( float buf [ ] /* in / out : LSP parameters */ ) { } }
int j ; float diff , tmp ; for ( j = 0 ; j < LD8KConstants . M - 1 ; j ++ ) { diff = buf [ j + 1 ] - buf [ j ] ; if ( diff < ( float ) 0. ) { tmp = buf [ j + 1 ] ; buf [ j + 1 ] = buf [ j ] ; buf [ j ] = tmp ; } } if ( buf [ 0 ] < LD8KConstants . L_LIMIT ) { buf [ 0 ] = LD8KConstants . L_LIMIT ; System . out . println ( "warning LSP Low \n" ) ; } for ( j = 0 ; j < LD8KConstants . M - 1 ; j ++ ) { diff = buf [ j + 1 ] - buf [ j ] ; if ( diff < LD8KConstants . GAP3 ) { buf [ j + 1 ] = buf [ j ] + LD8KConstants . GAP3 ; } } if ( buf [ LD8KConstants . M - 1 ] > LD8KConstants . M_LIMIT ) { buf [ LD8KConstants . M - 1 ] = LD8KConstants . M_LIMIT ; System . out . println ( "warning LSP High \n" ) ; } return ;
public class EnableOnValidHandler { /** * Set the field or file that owns this listener . * @ param owner My owner . */ public void setOwner ( ListenerOwner owner ) { } }
super . setOwner ( owner ) ; if ( owner == null ) return ; if ( fieldName != null ) m_fldTarget = this . getOwner ( ) . getField ( fieldName ) ; if ( m_fldTarget != null ) if ( m_fldTarget . getRecord ( ) != this . getOwner ( ) ) // If field is not in this file , remember to remove it m_fldTarget . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; if ( ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_CURRENT ) || ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_IN_PROGRESS ) ) this . setEnabled ( m_bEnableOnValid ) ; if ( this . getOwner ( ) . getEditMode ( ) == DBConstants . EDIT_ADD ) this . setEnabled ( m_bEnableOnNew ) ;
public class LogMetadata { /** * Extend a metadata marker with another key - value pair . * @ param key the key to add * @ param value the value for the key */ public LogMetadata and ( String key , Object value ) { } }
metadata . put ( key , value ) ; return this ;
public class Coercions { /** * Coerces a value to a Boolean */ public static Boolean coerceToBoolean ( Object pValue , Logger pLogger ) throws ELException { } }
if ( pValue == null || "" . equals ( pValue ) ) { return Boolean . FALSE ; } else if ( pValue instanceof Boolean ) { return ( Boolean ) pValue ; } else if ( pValue instanceof String ) { String str = ( String ) pValue ; try { return Boolean . valueOf ( str ) ; } catch ( Exception exc ) { if ( pLogger . isLoggingError ( ) ) { pLogger . logError ( Constants . STRING_TO_BOOLEAN , exc , ( String ) pValue ) ; } return Boolean . FALSE ; } } else { if ( pLogger . isLoggingError ( ) ) { pLogger . logError ( Constants . COERCE_TO_BOOLEAN , pValue . getClass ( ) . getName ( ) ) ; } return Boolean . TRUE ; }
public class BootiqueSarlcMain { /** * Create the compiler runtime . * @ param args the command line arguments . * @ return the runtime . */ @ SuppressWarnings ( "static-method" ) protected BQRuntime createRuntime ( String ... args ) { } }
SARLStandaloneSetup . doPreSetup ( ) ; final BQRuntime runtime = Bootique . app ( args ) . autoLoadModules ( ) . createRuntime ( ) ; SARLStandaloneSetup . doPostSetup ( runtime . getInstance ( Injector . class ) ) ; return runtime ;
public class IntDoubleSortedMap { /** * / * ( non - Javadoc ) * @ see edu . jhu . util . vector . IntDoubleMap # get ( int ) */ @ Override public double get ( int idx ) { } }
int i = Arrays . binarySearch ( indices , 0 , used , idx ) ; if ( i < 0 ) { throw new IllegalArgumentException ( "This map does not contain the key: " + idx ) ; } return values [ i ] ;
public class LabelDictionary { /** * Adds a new label to the dictionary if it has not been added yet . Returns the ID of the new * label in the dictionary . * @ param label add this label to the dictionary if it does not exist yet * @ return ID of label in the dictionary */ public int store ( String label ) { } }
if ( StrInt . containsKey ( label ) ) { return ( StrInt . get ( label ) . intValue ( ) ) ; } else if ( ! newLabelsAllowed ) { return KEY_DUMMY_LABEL ; } else { // store label Integer intKey = new Integer ( count ++ ) ; StrInt . put ( label , intKey ) ; IntStr . put ( intKey , label ) ; return intKey . intValue ( ) ; }
public class CmsGallerySearch { /** * Searches by structure id . < p > * @ param path the resource path * @ param locale the locale for which the search result should be returned * @ return the search result * @ throws CmsException if something goes wrong */ public CmsGallerySearchResult searchByPath ( String path , Locale locale ) throws CmsException { } }
I_CmsSearchDocument sDoc = m_index . getDocument ( CmsSearchField . FIELD_PATH , path ) ; CmsGallerySearchResult result = null ; if ( ( sDoc != null ) && ( sDoc . getDocument ( ) != null ) ) { result = new CmsGallerySearchResult ( sDoc , m_cms , 100 , locale ) ; } else { CmsResource res = m_cms . readResource ( path , CmsResourceFilter . IGNORE_EXPIRATION ) ; result = new CmsGallerySearchResult ( m_cms , res ) ; } return result ;
public class Service { /** * Get the value of the parameter with the given name belonging to this service as an * integer . If the parameter is not found , the given default value is returned . If the * parameter is found but cannot be converted to an integer , an * IllegalArgumentException is thrown . * @ param paramName Name of parameter to find . * @ param defaultValue Value to return if parameter is not defined . * @ return Defined or default value . */ public int getParamInt ( String paramName , int defaultValue ) { } }
Object paramValue = getParam ( paramName ) ; if ( paramValue == null ) { return defaultValue ; } try { return Integer . parseInt ( paramValue . toString ( ) ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Value for parameter '" + paramName + "' must be an integer: " + paramValue ) ; }
public class DataSet { /** * Emits a DataSet using an { @ link OutputFormat } . This method adds a data sink to the program . * Programs may have multiple data sinks . A DataSet may also have multiple consumers ( data sinks * or transformations ) at the same time . * @ param outputFormat The OutputFormat to process the DataSet . * @ return The DataSink that processes the DataSet . * @ see OutputFormat * @ see DataSink */ public DataSink < T > output ( OutputFormat < T > outputFormat ) { } }
Validate . notNull ( outputFormat ) ; // configure the type if needed if ( outputFormat instanceof InputTypeConfigurable ) { ( ( InputTypeConfigurable ) outputFormat ) . setInputType ( this . type ) ; } DataSink < T > sink = new DataSink < T > ( this , outputFormat , this . type ) ; this . context . registerDataSink ( sink ) ; return sink ;
public class BaseMessageItemStream { /** * Set the default limits for this itemstream */ public synchronized void setDefaultDestLimits ( ) throws MessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setDefaultDestLimits" ) ; // Defaults are based on those defined to the ME , the low is 80 % of the high // Use setDestLimits ( ) to set the initial limits / watermarks ( 510343) long destHighMsgs = mp . getHighMessageThreshold ( ) ; setDestLimits ( destHighMsgs , ( destHighMsgs * 8 ) / 10 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setDefaultDestLimits" ) ;
public class TioWebsocketMsgHandler { /** * receive bytes * @ param wsRequest wsRequest * @ param bytes bytes * @ param channelContext channelContext * @ return anyObject * @ throws Exception e */ @ Override public Object onBytes ( WsRequest wsRequest , byte [ ] bytes , ChannelContext channelContext ) throws Exception { } }
log . debug ( "onBytes" ) ; TioWebsocketMethodMapper onBytes = methods . getOnBytes ( ) ; if ( onBytes != null ) { onBytes . getMethod ( ) . invoke ( onBytes . getInstance ( ) , channelContext , bytes ) ; } else { TioWebsocketMethodMapper onBeforeBytes = methods . getOnBeforeBytes ( ) ; if ( onBeforeBytes != null ) { TioWebsocketRequest invoke = ( TioWebsocketRequest ) onBeforeBytes . getMethod ( ) . invoke ( onBeforeBytes . getInstance ( ) , channelContext , bytes ) ; onMapEvent ( invoke , channelContext ) ; } } return null ;
public class GPUOptions { /** * < pre > * A comma - separated list of GPU ids that determines the ' visible ' * to ' virtual ' mapping of GPU devices . For example , if TensorFlow * can see 8 GPU devices in the process , and one wanted to map * visible GPU devices 5 and 3 as " / gpu : 0 " , and " / gpu : 1 " , then one * would specify this field as " 5,3 " . This field is similar in * spirit to the CUDA _ VISIBLE _ DEVICES environment variable , except * it applies to the visible GPU devices in the process . * NOTE : The GPU driver provides the process with the visible GPUs * in an order which is not guaranteed to have any correlation to * the * physical * GPU id in the machine . This field is used for * remapping " visible " to " virtual " , which means this operates only * after the process starts . Users are required to use vendor * specific mechanisms ( e . g . , CUDA _ VISIBLE _ DEVICES ) to control the * physical to visible device mapping prior to invoking TensorFlow . * < / pre > * < code > optional string visible _ device _ list = 5 ; < / code > */ public com . google . protobuf . ByteString getVisibleDeviceListBytes ( ) { } }
java . lang . Object ref = visibleDeviceList_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; visibleDeviceList_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class MultipleSelect { /** * Sets the character displayed in the button that separates * selected options . < br > * < br > * Defaults to < code > , < / code > . * @ param multipleSeparator */ public void setMultipleSeparator ( final String multipleSeparator ) { } }
if ( multipleSeparator != null ) attrMixin . setAttribute ( MULTIPLE_SEPARATOR , multipleSeparator ) ; else attrMixin . removeAttribute ( MULTIPLE_SEPARATOR ) ;
public class BranchUniversalObject { /** * - - - - - Share sheet - - - - - / / */ public void showShareSheet ( @ NonNull Activity activity , @ NonNull LinkProperties linkProperties , @ NonNull ShareSheetStyle style , @ Nullable Branch . BranchLinkShareListener callback ) { } }
showShareSheet ( activity , linkProperties , style , callback , null ) ;
public class DropdownAutocompleteTextFieldPanel { /** * Callback method that can be overwritten to provide an additional action * when root choice has updated . * @ param target * the current request handler */ protected void onRootChoiceUpdate ( final AjaxRequestTarget target ) { } }
childTextField . setModelObject ( getModelObject ( ) . getSelectedChildOption ( ) ) ; target . add ( DropdownAutocompleteTextFieldPanel . this . childTextField ) ;
public class Debug { /** * After detecting and extracting a table , this method enables us to save the table metadata file locally for later performance evaluation . * @ param outputDirPath * the directory path where the middle - stage results will go to * @ param pdfFile * the PDF file being processed * @ param meta * the table metadata to be printed * @ throws IOException */ public static void printTableMeta ( String outputDirPath , File pdfFile , String meta ) { } }
try { File middleDir = new File ( outputDirPath , "metadata" ) ; if ( ! middleDir . exists ( ) ) { middleDir . mkdirs ( ) ; } File tableMetaFile = new File ( middleDir , pdfFile . getName ( ) + ".metadata" ) ; BufferedWriter bw0 = new BufferedWriter ( new FileWriter ( tableMetaFile ) ) ; // System . out . println ( meta ) ; bw0 . write ( meta ) ; bw0 . close ( ) ; } catch ( IOException e ) { System . out . printf ( "[Debug Error] IOException\n" ) ; }
public class BasePanel { /** * Set the properties . * @ param strProperties The properties to set . */ public void setProperties ( Map < String , Object > properties ) { } }
if ( this . getTask ( ) != null ) this . getTask ( ) . setProperties ( properties ) ;
public class PropertiesCallbackHandler { /** * CallbackHandler Methods */ public void handle ( Callback [ ] callbacks ) throws IOException , UnsupportedCallbackException { } }
List < Callback > toRespondTo = new LinkedList < Callback > ( ) ; String userName = null ; boolean userFound = false ; Properties users = getProperties ( ) ; // A single pass may be sufficient but by using a two pass approach the Callbackhandler will not // fail if an unexpected order is encountered . // First Pass - is to double check no unsupported callbacks and to retrieve // information from the callbacks passing in information . for ( Callback current : callbacks ) { if ( current instanceof AuthorizeCallback ) { toRespondTo . add ( current ) ; } else if ( current instanceof NameCallback ) { NameCallback nameCallback = ( NameCallback ) current ; userName = nameCallback . getDefaultName ( ) ; userFound = users . containsKey ( userName ) ; } else if ( current instanceof PasswordCallback && plainText ) { toRespondTo . add ( current ) ; } else if ( current instanceof CredentialCallback ) { toRespondTo . add ( current ) ; } else if ( current instanceof EvidenceVerifyCallback && ( ( EvidenceVerifyCallback ) current ) . getEvidence ( ) instanceof PasswordGuessEvidence ) { toRespondTo . add ( current ) ; } else if ( current instanceof RealmCallback ) { String realm = ( ( RealmCallback ) current ) . getDefaultText ( ) ; if ( this . realm . equals ( realm ) == false ) { throw DomainManagementLogger . ROOT_LOGGER . invalidRealm ( realm , this . realm ) ; } } else { throw new UnsupportedCallbackException ( current ) ; } } // Second Pass - Now iterate the Callback ( s ) requiring a response . for ( Callback current : toRespondTo ) { if ( current instanceof AuthorizeCallback ) { AuthorizeCallback acb = ( AuthorizeCallback ) current ; boolean authorized = acb . getAuthenticationID ( ) . equals ( acb . getAuthorizationID ( ) ) ; if ( authorized == false ) { SECURITY_LOGGER . tracef ( "Checking 'AuthorizeCallback', authorized=false, authenticationID=%s, authorizationID=%s." , acb . getAuthenticationID ( ) , acb . getAuthorizationID ( ) ) ; } acb . setAuthorized ( authorized ) ; } else if ( current instanceof PasswordCallback ) { if ( userFound == false ) { SECURITY_LOGGER . tracef ( "User '%s' not found in properties file." , userName ) ; throw new UserNotFoundException ( userName ) ; } String password = users . get ( userName ) . toString ( ) ; ( ( PasswordCallback ) current ) . setPassword ( password . toCharArray ( ) ) ; } else if ( current instanceof CredentialCallback ) { if ( userFound == false ) { SECURITY_LOGGER . tracef ( "User '%s' not found in properties file." , userName ) ; throw new UserNotFoundException ( userName ) ; } CredentialCallback cc = ( CredentialCallback ) current ; if ( PasswordCredential . class . isAssignableFrom ( cc . getCredentialType ( ) ) ) { String algorithmName = cc . getAlgorithm ( ) ; final Password password ; if ( ( algorithmName == null || ALGORITHM_CLEAR . equals ( algorithmName ) ) && plainText ) { password = ClearPassword . createRaw ( ALGORITHM_CLEAR , ( ( String ) users . get ( userName ) ) . toCharArray ( ) ) ; } else if ( ( algorithmName == null || ALGORITHM_DIGEST_MD5 . equals ( algorithmName ) ) && plainText == false ) { byte [ ] hashed = ByteIterator . ofBytes ( ( ( String ) users . get ( userName ) ) . getBytes ( StandardCharsets . UTF_8 ) ) . asUtf8String ( ) . hexDecode ( ) . drain ( ) ; password = DigestPassword . createRaw ( ALGORITHM_DIGEST_MD5 , userName , realm , hashed ) ; } else { continue ; } cc . setCredential ( cc . getCredentialType ( ) . cast ( new PasswordCredential ( password ) ) ) ; } } else if ( current instanceof EvidenceVerifyCallback ) { if ( userFound == false ) { SECURITY_LOGGER . tracef ( "User '%s' not found in properties file." , userName ) ; throw new UserNotFoundException ( userName ) ; } EvidenceVerifyCallback evc = ( EvidenceVerifyCallback ) current ; PasswordGuessEvidence evidence = ( PasswordGuessEvidence ) evc . getEvidence ( ) ; char [ ] guess = evidence . getGuess ( ) ; if ( plainText ) { String password = users . get ( userName ) . toString ( ) ; boolean verified = password . equals ( new String ( guess ) ) ; if ( verified == false ) { SECURITY_LOGGER . tracef ( "Password verification failed for user '%s'" , userName ) ; } evc . setVerified ( verified ) ; } else { UsernamePasswordHashUtil hashUtil = getHashUtil ( ) ; String hash ; synchronized ( hashUtil ) { hash = hashUtil . generateHashedHexURP ( userName , realm , guess ) ; } String expected = users . get ( userName ) . toString ( ) ; boolean verified = expected . equals ( hash ) ; if ( verified == false ) { SECURITY_LOGGER . tracef ( "Digest verification failed for user '%s'" , userName ) ; } evc . setVerified ( verified ) ; } } }
public class OptionGroupOption { /** * The options that conflict with this option . * @ param optionsConflictsWith * The options that conflict with this option . */ public void setOptionsConflictsWith ( java . util . Collection < String > optionsConflictsWith ) { } }
if ( optionsConflictsWith == null ) { this . optionsConflictsWith = null ; return ; } this . optionsConflictsWith = new com . amazonaws . internal . SdkInternalList < String > ( optionsConflictsWith ) ;
public class DataNode { /** * Check if there is no space in disk * @ param e that caused this checkDiskError call */ protected void checkDiskError ( Exception e ) throws IOException { } }
if ( e instanceof ClosedByInterruptException || e instanceof java . io . InterruptedIOException ) { return ; } LOG . warn ( "checkDiskError: exception: " , e ) ; if ( e . getMessage ( ) != null && e . getMessage ( ) . startsWith ( "No space left on device" ) ) { throw new DiskOutOfSpaceException ( "No space left on device" ) ; } else { checkDiskError ( ) ; }
public class RelationalDbDataSourceBackend { /** * Configures which Java database API to use ( { @ link java . sql . DriverManager } * or { @ link javax . sql . DataSource } . If < code > null < / code > , the default is * assigned ( { @ link DatabaseAPI . DRIVERMANAGER } ) . * @ param databaseAPI a { @ link DatabaseAPI } . */ public void setDatabaseAPI ( DatabaseAPI databaseAPI ) { } }
this . sqlGenerator = null ; if ( databaseAPI == null ) { databaseAPI = DatabaseAPI . DRIVERMANAGER ; } this . databaseAPI = databaseAPI ;