signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ClusteringServiceConfigurationBuilder { /** * Creates a new builder connecting to the given cluster .
* @ param servers the non - { @ code null } iterable of servers in the cluster
* @ param clusterTierManager the non - { @ code null } cluster tier manager identifier
* @ return a clustering service configuration builder */
public static ClusteringServiceConfigurationBuilder cluster ( Iterable < InetSocketAddress > servers , String clusterTierManager ) { } } | return new ClusteringServiceConfigurationBuilder ( new ConnectionSource . ServerList ( servers , clusterTierManager ) , TimeoutsBuilder . timeouts ( ) . build ( ) , DEFAULT_AUTOCREATE ) ; |
public class ApiOvhDomain { /** * Delete the glue record
* REST : DELETE / domain / { serviceName } / glueRecord / { host }
* @ param serviceName [ required ] The internal name of your domain
* @ param host [ required ] Host of the glue record */
public net . minidev . ovh . api . domain . OvhTask serviceName_glueRecord_host_DELETE ( String serviceName , String host ) throws IOException { } } | String qPath = "/domain/{serviceName}/glueRecord/{host}" ; StringBuilder sb = path ( qPath , serviceName , host ) ; String resp = exec ( qPath , "DELETE" , sb . toString ( ) , null ) ; return convertTo ( resp , net . minidev . ovh . api . domain . OvhTask . class ) ; |
public class StandardRoadNetwork { /** * Fire the addition event .
* @ param segment is the removed segment */
protected void fireSegmentRemoved ( RoadSegment segment ) { } } | if ( this . listeners != null && isEventFirable ( ) ) { for ( final RoadNetworkListener listener : this . listeners ) { listener . onRoadSegmentRemoved ( this , segment ) ; } } |
public class FATServerHelper { /** * Package a WAR and add it to a server .
* The rules of { @ link # addToServer } are followed , using a null
* EAR name and a null JAR name .
* @ param targetServer The server to which to add the EAR or WAR .
* @ param targetDir The directory of the server in which to place
* the EAR or WAR .
* @ param warName The name of the WAR which is to be created and added .
* @ param warPackageNames The names of packages to be placed in the WAR .
* @ param addWarResources Control parameter : Tells if resources are to
* be added for the WAR .
* @ throws Exception Thrown if any of the steps fails . */
public static boolean addWarToServer ( LibertyServer targetServer , String targetDir , String warName , String [ ] warPackageNames , boolean addWarResources ) throws Exception { } } | String earName = null ; boolean addEarResources = DO_NOT_ADD_RESOURCES ; String jarName = null ; boolean addJarResources = DO_NOT_ADD_RESOURCES ; String [ ] jarPackageNames = null ; return addToServer ( targetServer , targetDir , earName , addEarResources , warName , warPackageNames , addWarResources , jarName , jarPackageNames , addJarResources ) ; |
public class ConcatVectorNamespace { /** * This prints out a ConcatVector by mapping to the namespace , to make debugging learning algorithms easier .
* @ param vector the vector to print
* @ param bw the output stream to write to */
public void debugVector ( ConcatVector vector , BufferedWriter bw ) throws IOException { } } | List < String > features = new ArrayList < > ( ) ; Map < String , List < Integer > > sortedFeatures = new HashMap < > ( ) ; for ( ObjectCursor < String > key : featureToIndex . keys ( ) ) { features . add ( key . value ) ; int i = featureToIndex . getOrDefault ( key . value , - 1 ) ; List < Integer > featureIndices = new ArrayList < > ( ) ; if ( vector . isComponentSparse ( i ) ) { int [ ] indices = vector . getSparseIndices ( i ) ; for ( int j : indices ) { featureIndices . add ( j ) ; } } else { double [ ] arr = vector . getDenseComponent ( i ) ; for ( int j = 0 ; j < arr . length ; j ++ ) { featureIndices . add ( j ) ; } } featureIndices . sort ( ( a , b ) -> { if ( Math . abs ( vector . getValueAt ( i , a ) ) < Math . abs ( vector . getValueAt ( i , b ) ) ) { return 1 ; } else if ( Math . abs ( vector . getValueAt ( i , a ) ) > Math . abs ( vector . getValueAt ( i , b ) ) ) { return - 1 ; } else { return 0 ; } } ) ; sortedFeatures . put ( key . value , featureIndices ) ; } features . sort ( ( a , b ) -> { double bestAValue = sortedFeatures . get ( a ) . size ( ) == 0 ? 0.0 : Math . abs ( vector . getValueAt ( featureToIndex . getOrDefault ( a , - 1 ) , sortedFeatures . get ( a ) . get ( 0 ) ) ) ; double bestBValue = sortedFeatures . get ( b ) . size ( ) == 0 ? 0.0 : Math . abs ( vector . getValueAt ( featureToIndex . getOrDefault ( b , - 1 ) , sortedFeatures . get ( b ) . get ( 0 ) ) ) ; if ( bestAValue < bestBValue ) { return 1 ; } else if ( bestAValue > bestBValue ) { return - 1 ; } else return 0 ; } ) ; for ( String key : features ) { bw . write ( "FEATURE: \"" + key ) ; bw . write ( "\"\n" ) ; for ( int j : sortedFeatures . get ( key ) ) { debugFeatureValue ( key , j , vector , bw ) ; } } // Flush the writer
bw . flush ( ) ; |
public class CmsWidgetDialog { /** * Generates the dialog starting html code . < p >
* @ return html code
* @ throws JspException if something goes wrong */
protected String defaultActionHtml ( ) throws JspException { } } | StringBuffer result = new StringBuffer ( 2048 ) ; result . append ( defaultActionHtmlStart ( ) ) ; result . append ( defaultActionHtmlContent ( ) ) ; result . append ( defaultActionHtmlEnd ( ) ) ; return result . toString ( ) ; |
public class SetTag { /** * override doTag method */
@ Override public void doTag ( ) throws JspException , IOException { } } | PageContext pageContext = ( PageContext ) getJspContext ( ) ; HttpServletRequest request = ( HttpServletRequest ) pageContext . getRequest ( ) ; HttpServletResponse response = ( HttpServletResponse ) pageContext . getResponse ( ) ; HttpSession session = request . getSession ( ) ; if ( TagUtils . isELValue ( getValue ( ) ) ) { Object var = TagUtils . getElValue ( getJspContext ( ) , TagUtils . getElName ( getValue ( ) ) ) ; if ( var != null ) { setValue ( var ) ; } } if ( StringUtils . equalsIgnoreCase ( getScope ( ) , "request" ) ) { request . setAttribute ( getName ( ) , getValue ( ) ) ; } else if ( StringUtils . equalsIgnoreCase ( getScope ( ) , "session" ) ) { session . setAttribute ( getName ( ) , getValue ( ) ) ; } else if ( StringUtils . equalsIgnoreCase ( getScope ( ) , "cookie" ) ) { Cookie cookie = new Cookie ( getName ( ) , TagUtils . getCookieValue ( getValue ( ) ) ) ; // TODO support " Comment " / / rfc2019 , " Discard " / / 2019 + + , " Expires " / / ( old cookies ) , " Secure " , " Version "
if ( dynamicAttrMap . containsKey ( "domain" ) ) { cookie . setDomain ( ObjectUtils . toString ( dynamicAttrMap . get ( "domain" ) ) ) ; } if ( dynamicAttrMap . containsKey ( "path" ) ) { cookie . setPath ( ObjectUtils . toString ( dynamicAttrMap . get ( "path" ) ) ) ; } if ( dynamicAttrMap . containsKey ( "maxAge" ) ) { int maxAge = NumberUtils . toInt ( ObjectUtils . toString ( dynamicAttrMap . get ( "maxAge" ) ) , ( 60 * 60 * 24 * 7 ) ) ; // default value 1 week ( 60*60*24*7)
cookie . setMaxAge ( maxAge ) ; // seconds
} response . addCookie ( cookie ) ; } else { throw new IllegalArgumentException ( String . format ( "[%s] is undefined scope value" , this . getScope ( ) ) ) ; } |
public class MainActivity { /** * Helper function that creates a new progress button , adds it to the given layout .
* Returns a reference to the progress button for customization . */
private ProgressButton addProgressButton ( LinearLayout container ) { } } | final LinearLayout . LayoutParams layoutParams = new LinearLayout . LayoutParams ( 0 , LinearLayout . LayoutParams . WRAP_CONTENT , 1.0f ) ; final ProgressButton progressButton = new ProgressButton ( this ) ; progressButton . setLayoutParams ( layoutParams ) ; container . addView ( progressButton ) ; return progressButton ; |
public class DbIdentityServiceProvider { /** * authorizations / / / / / */
protected void createDefaultAuthorizations ( UserEntity userEntity ) { } } | if ( Context . getProcessEngineConfiguration ( ) . isAuthorizationEnabled ( ) ) { saveDefaultAuthorizations ( getResourceAuthorizationProvider ( ) . newUser ( userEntity ) ) ; } |
public class TemplateParser { /** * Process v - model expressions . Only @ Data fields are allowed in v - model .
* @ param vModelAttribute The v - model attribute to process */
private void processVModel ( Attribute vModelAttribute ) { } } | String vModelValue = vModelAttribute . getValue ( ) ; VariableInfo vModelDataField = context . findRootVariable ( vModelValue ) ; if ( vModelDataField == null ) { if ( vModelValue . contains ( "." ) ) { logger . error ( "v-model doesn't support dot notation in Vue GWT: \"" + vModelValue + "\". Try using a @Computed with a getter and a setter. Check our documentation on v-model for more information." ) ; } else { logger . error ( "Couldn't find @Data or @Computed for v-model \"" + vModelValue + "\". V-Model is only supported on @Data and @Computed. Check our documentation on v-model for more information." ) ; } return ; } String placeHolderVModelValue = vModelFieldToPlaceHolderField ( vModelValue ) ; outputDocument . replace ( vModelAttribute . getValueSegment ( ) , placeHolderVModelValue ) ; result . addvModelDataField ( vModelDataField ) ; |
public class CodeUtil { /** * Gets javadoc .
* @ param clazz the clazz
* @ return the javadoc */
public static String getJavadoc ( @ Nullable final Class < ? > clazz ) { } } | try { if ( null == clazz ) return null ; @ Nullable final File source = com . simiacryptus . util . lang . CodeUtil . findFile ( clazz ) ; if ( null == source ) return clazz . getName ( ) + " not found" ; final List < String > lines = IOUtils . readLines ( new FileInputStream ( source ) , Charset . forName ( "UTF-8" ) ) ; final int classDeclarationLine = IntStream . range ( 0 , lines . size ( ) ) . filter ( i -> lines . get ( i ) . contains ( "class " + clazz . getSimpleName ( ) ) ) . findFirst ( ) . getAsInt ( ) ; final int firstLine = IntStream . rangeClosed ( 1 , classDeclarationLine ) . map ( i -> classDeclarationLine - i ) . filter ( i -> ! lines . get ( i ) . matches ( "\\s*[/\\*@].*" ) ) . findFirst ( ) . orElse ( - 1 ) + 1 ; final String javadoc = lines . subList ( firstLine , classDeclarationLine ) . stream ( ) . filter ( s -> s . matches ( "\\s*[/\\*].*" ) ) . map ( s -> s . replaceFirst ( "^[ \t]*[/\\*]+" , "" ) . trim ( ) ) . filter ( x -> ! x . isEmpty ( ) ) . reduce ( ( a , b ) -> a + "\n" + b ) . orElse ( "" ) ; return javadoc . replaceAll ( "<p>" , "\n" ) ; } catch ( @ javax . annotation . Nonnull final Throwable e ) { e . printStackTrace ( ) ; return "" ; } |
public class PojoSerializer { /** * Finds and returns the order ( 0 - based ) of a POJO field .
* Returns - 1 if the field does not exist for this POJO . */
private int findField ( String fieldName ) { } } | int foundIndex = 0 ; for ( Field field : fields ) { if ( field != null && fieldName . equals ( field . getName ( ) ) ) { return foundIndex ; } foundIndex ++ ; } return - 1 ; |
public class ParagraphWrapper { /** * Replaces the given placeholder String with the replacement object within the paragraph .
* The replacement object must be a valid DOCX4J Object .
* @ param placeholder the placeholder to be replaced .
* @ param replacement the object to replace the placeholder String . */
public void replace ( String placeholder , Object replacement ) { } } | String text = getText ( ) ; int matchStartIndex = text . indexOf ( placeholder ) ; if ( matchStartIndex == - 1 ) { // nothing to replace
return ; } int matchEndIndex = matchStartIndex + placeholder . length ( ) - 1 ; List < IndexedRun > affectedRuns = getAffectedRuns ( matchStartIndex , matchEndIndex ) ; boolean singleRun = affectedRuns . size ( ) == 1 ; if ( singleRun ) { IndexedRun run = affectedRuns . get ( 0 ) ; boolean placeholderSpansCompleteRun = placeholder . length ( ) == RunUtil . getText ( run . getRun ( ) ) . length ( ) ; boolean placeholderAtStartOfRun = matchStartIndex == run . getStartIndex ( ) ; boolean placeholderAtEndOfRun = matchEndIndex == run . getEndIndex ( ) ; boolean placeholderWithinRun = matchStartIndex > run . getStartIndex ( ) && matchEndIndex < run . getEndIndex ( ) ; if ( placeholderSpansCompleteRun ) { this . paragraph . getContent ( ) . remove ( run . getRun ( ) ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) , replacement ) ; recalculateRuns ( ) ; } else if ( placeholderAtStartOfRun ) { run . replace ( matchStartIndex , matchEndIndex , "" ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) , replacement ) ; recalculateRuns ( ) ; } else if ( placeholderAtEndOfRun ) { run . replace ( matchStartIndex , matchEndIndex , "" ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) + 1 , replacement ) ; recalculateRuns ( ) ; } else if ( placeholderWithinRun ) { String runText = RunUtil . getText ( run . getRun ( ) ) ; int startIndex = runText . indexOf ( placeholder ) ; int endIndex = startIndex + placeholder . length ( ) ; R run1 = RunUtil . create ( runText . substring ( 0 , startIndex ) , this . paragraph ) ; R run2 = RunUtil . create ( runText . substring ( endIndex ) , this . paragraph ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) , run2 ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) , replacement ) ; this . paragraph . getContent ( ) . add ( run . getIndexInParent ( ) , run1 ) ; this . paragraph . getContent ( ) . remove ( run . getRun ( ) ) ; recalculateRuns ( ) ; } } else { IndexedRun firstRun = affectedRuns . get ( 0 ) ; IndexedRun lastRun = affectedRuns . get ( affectedRuns . size ( ) - 1 ) ; // remove the placeholder from first and last run
firstRun . replace ( matchStartIndex , matchEndIndex , "" ) ; lastRun . replace ( matchStartIndex , matchEndIndex , "" ) ; // remove all runs between first and last
for ( IndexedRun run : affectedRuns ) { if ( run != firstRun && run != lastRun ) { this . paragraph . getContent ( ) . remove ( run . getRun ( ) ) ; } } // add replacement run between first and last run
this . paragraph . getContent ( ) . add ( firstRun . getIndexInParent ( ) + 1 , replacement ) ; recalculateRuns ( ) ; } |
public class DoubleTuples { /** * Normalize the elements of the given tuple , so that its minimum and
* maximum elements match the given minimum and maximum values .
* @ param t The input tuple
* @ param min The minimum value
* @ param max The maximum value
* @ param result The tuple that will store the result
* @ return The result tuple
* @ throws IllegalArgumentException If the given tuples do not
* have the same { @ link Tuple # getSize ( ) size } */
public static MutableDoubleTuple normalizeElements ( DoubleTuple t , double min , double max , MutableDoubleTuple result ) { } } | return rescaleElements ( t , min ( t ) , max ( t ) , min , max , result ) ; |
public class TransactionElf { /** * Rollback the current transaction . */
public static void rollback ( ) { } } | try { if ( userTransaction . getStatus ( ) != Status . STATUS_NO_TRANSACTION ) { userTransaction . rollback ( ) ; } else { LOGGER . warn ( "Request to rollback transaction when none was in started." ) ; } } catch ( Exception e ) { LOGGER . warn ( "Transaction rollback failed." , e ) ; } |
public class AbstractIntSet { /** * Retains only the elements in this set that are contained in the specified
* { @ code IntSet } . */
public boolean retainAll ( IntCollection ints ) { } } | IntIterator it = iterator ( ) ; boolean changed = false ; while ( it . hasNext ( ) ) { if ( ! ints . contains ( it . nextInt ( ) ) ) { it . remove ( ) ; changed = true ; } } return changed ; |
public class FieldValueCounterController { /** * List Counters that match the given criteria .
* @ param pageable { @ link Pageable }
* @ param pagedAssembler { @ link PagedResourcesAssembler }
* @ return counters */
@ RequestMapping ( value = "" , method = RequestMethod . GET ) public PagedResources < ? extends MetricResource > list ( Pageable pageable , PagedResourcesAssembler < String > pagedAssembler ) { } } | List < String > names = new ArrayList < > ( repository . list ( ) ) ; long count = names . size ( ) ; long pageEnd = Math . min ( count , pageable . getOffset ( ) + pageable . getPageSize ( ) ) ; Page fieldValueCounterPage = new PageImpl < > ( names . subList ( toIntExact ( pageable . getOffset ( ) ) , toIntExact ( pageEnd ) ) , pageable , names . size ( ) ) ; return pagedAssembler . toResource ( fieldValueCounterPage , shallowAssembler ) ; |
public class NumberMath { /** * Determine which NumberMath instance to use , given the supplied operands . This method implements
* the type promotion rules discussed in the documentation . Note that by the time this method is
* called , any Byte , Character or Short operands will have been promoted to Integer . For reference ,
* here is the promotion matrix :
* bD bI D F L I
* bD bD bD D D bD bD
* bI bD bI D D bI bI
* D D D D D D D
* F D D D D D D
* L bD bI D D L L
* I bD bI D D L I
* Note that for division , if either operand isFloatingPoint , the result will be floating . Otherwise ,
* the result is BigDecimal */
public static NumberMath getMath ( Number left , Number right ) { } } | // FloatingPointMath wins according to promotion Matrix
if ( isFloatingPoint ( left ) || isFloatingPoint ( right ) ) { return FloatingPointMath . INSTANCE ; } NumberMath leftMath = getMath ( left ) ; NumberMath rightMath = getMath ( right ) ; if ( leftMath == BigDecimalMath . INSTANCE || rightMath == BigDecimalMath . INSTANCE ) { return BigDecimalMath . INSTANCE ; } if ( leftMath == BigIntegerMath . INSTANCE || rightMath == BigIntegerMath . INSTANCE ) { return BigIntegerMath . INSTANCE ; } if ( leftMath == LongMath . INSTANCE || rightMath == LongMath . INSTANCE ) { return LongMath . INSTANCE ; } if ( leftMath == IntegerMath . INSTANCE || rightMath == IntegerMath . INSTANCE ) { return IntegerMath . INSTANCE ; } // also for custom Number implementations
return BigDecimalMath . INSTANCE ; |
public class ElevationUtil { /** * Creates and returns a shader , which can be used to draw a shadow , which located at a corner
* of an elevated view .
* @ param orientation
* The orientation of the shadow in relation to the elevated view as a value of the enum
* { @ link Orientation } . The orientation may either be < code > TOP _ LEFT < / code > ,
* < code > TOP _ RIGHT < / code > , < code > BOTTOM _ LEFT < / code > or < code > BOTTOM _ RIGHT < / code >
* @ param bitmapSize
* The size of the bitmap , which is used to draw the shadow , in pixels as an { @ link
* Integer } value
* @ param radius
* The radius , which should be used to draw the shadow , in pixels as a { @ link Float }
* value
* @ return The shader , which has been created as an instance of the class { @ link Shader } */
private static Shader createRadialGradient ( @ NonNull final Orientation orientation , final int bitmapSize , final float radius ) { } } | PointF center = new PointF ( ) ; switch ( orientation ) { case TOP_LEFT : center . x = bitmapSize ; center . y = bitmapSize ; break ; case TOP_RIGHT : center . y = bitmapSize ; break ; case BOTTOM_LEFT : center . x = bitmapSize ; break ; case BOTTOM_RIGHT : break ; default : throw new IllegalArgumentException ( "Invalid orientation: " + orientation ) ; } return new RadialGradient ( center . x , center . y , radius , Color . TRANSPARENT , Color . BLACK , Shader . TileMode . CLAMP ) ; |
public class DirectoryLookupService { /** * Get the UP ModelServiceInstance list of the Service .
* It only return the UP ServiceInstance of the Service .
* @ param serviceName
* the service name .
* @ return
* the ModelServiceInstance list of the Service . */
public List < ModelServiceInstance > getUPModelInstances ( String serviceName ) { } } | ModelService service = getModelService ( serviceName ) ; List < ModelServiceInstance > list = null ; if ( service != null && service . getServiceInstances ( ) . size ( ) > 0 ) { for ( ModelServiceInstance instance : new ArrayList < ModelServiceInstance > ( service . getServiceInstances ( ) ) ) { if ( instance . getStatus ( ) . equals ( OperationalStatus . UP ) ) { if ( list == null ) { list = new ArrayList < ModelServiceInstance > ( ) ; } list . add ( instance ) ; } } } if ( list == null ) { return Collections . emptyList ( ) ; } else { return list ; } |
public class Main { /** * Main method of the Command - line interface ( CLI )
* @ param args
* command - line arguments
* @ throws Throwable */
public static void main ( String [ ] args ) throws Throwable { } } | CliArguments arguments = CliArguments . parse ( args ) ; if ( arguments . isSet ( ) && ! arguments . isUsageMode ( ) ) { CliRunner runner = new CliRunner ( arguments ) ; try { runner . run ( ) ; } finally { runner . close ( ) ; } } else { PrintWriter out = new PrintWriter ( System . out ) ; printUsage ( out ) ; FileHelper . safeClose ( out ) ; } |
public class StorePackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getAuthorization ( ) { } } | if ( authorizationEClass == null ) { authorizationEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 110 ) ; } return authorizationEClass ; |
public class ExternalEventHandlerBase { /** * This method is used to create an MDW default response message . Such
* a message is only used when an exception occurred before customizable
* code is reached ( e . g . the external message is malformed so we cannot
* determine which handler to call ) , or a simple acknowledgment is sufficient .
* @ param e The exception that triggers the response message . This should be null
* if the message is for simple acknowledgment rather than for reporting an
* exception
* @ param request request String
* @ param msgdoc parsed object such XML Bean and JSON object if it is possible to parse the external message
* @ param metaInfo protocol headers
* @ return */
protected String createResponseMessage ( Exception e , String request , Object msgdoc , Map < String , String > metaInfo ) { } } | ListenerHelper helper = new ListenerHelper ( ) ; if ( e instanceof ServiceException ) return helper . createErrorResponse ( request , metaInfo , ( ServiceException ) e ) . getContent ( ) ; else if ( e != null ) return helper . createErrorResponse ( request , metaInfo , new ServiceException ( ServiceException . INTERNAL_ERROR , e . getMessage ( ) ) ) . getContent ( ) ; else return helper . createAckResponse ( request , metaInfo ) ; |
public class JdbcQueue { /** * Re - queue a message , retry if deadlock .
* Note : http : / / dev . mysql . com / doc / refman / 5.0 / en / innodb - deadlocks . html
* InnoDB uses automatic row - level locking . You can get deadlocks even in
* the case of transactions that just insert or delete a single row . That is
* because these operations are not really " atomic " ; they automatically set
* locks on the ( possibly several ) index records of the row inserted or
* deleted .
* Note : the supplied queue message is mutable .
* @ param conn
* @ param msg
* @ param numRetries
* @ param maxRetries
* @ return */
protected boolean _requeueWithRetries ( Connection conn , IQueueMessage < ID , DATA > msg , int numRetries , int maxRetries ) { } } | try { jdbcHelper . startTransaction ( conn ) ; conn . setTransactionIsolation ( transactionIsolationLevel ) ; if ( ! isEphemeralDisabled ( ) ) { removeFromEphemeralStorage ( conn , msg ) ; } Date now = new Date ( ) ; msg . incNumRequeues ( ) . setQueueTimestamp ( now ) ; boolean result = putToQueueStorage ( conn , msg ) ; jdbcHelper . commitTransaction ( conn ) ; return result ; } catch ( DuplicatedValueException dve ) { jdbcHelper . rollbackTransaction ( conn ) ; LOGGER . warn ( dve . getMessage ( ) , dve ) ; return true ; } catch ( DaoException de ) { if ( de . getCause ( ) instanceof DuplicateKeyException ) { jdbcHelper . rollbackTransaction ( conn ) ; LOGGER . warn ( de . getMessage ( ) , de ) ; return true ; } if ( de . getCause ( ) instanceof ConcurrencyFailureException ) { jdbcHelper . rollbackTransaction ( conn ) ; if ( numRetries > maxRetries ) { throw new QueueException ( de ) ; } else { /* * call _ requeueSilentWithRetries ( . . . ) here is correct
* because we do not want message ' s num - requeues is
* increased with every retry */
incRetryCounter ( "_requeueWithRetries" ) ; return _requeueSilentWithRetries ( conn , msg , numRetries + 1 , maxRetries ) ; } } throw de ; } catch ( Exception e ) { jdbcHelper . rollbackTransaction ( conn ) ; throw e instanceof QueueException ? ( QueueException ) e : new QueueException ( e ) ; } |
public class SourceStreamManager { /** * Consolidates the sourceStreams following restart recovery .
* The streams may have scattered tick values derived from persisted
* messages or references .
* @ param startMode */
public List consolidateStreams ( int startMode ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "consolidateStreams" ) ; List < List > sentMsgs = new ArrayList < List > ( ) ; StreamSet streamSet = getStreamSet ( ) ; // iterate over the non - null streams
Iterator itr = streamSet . iterator ( ) ; List temp = null ; while ( itr . hasNext ( ) ) { SourceStream stream = ( SourceStream ) itr . next ( ) ; // Get lsit of the mesages inside the sendWindow on this stream
// and add this list onto end of sentMsgs list
temp = stream . restoreStream ( startMode ) ; if ( temp != null ) sentMsgs . addAll ( temp ) ; // This is done after the restore as it may have been
// changed by Admin and we need to work towards it as usual
if ( pointTopoint ) stream . setDefinedSendWindow ( messageProcessor . getDefinedSendWindow ( ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "consolidateStreams" , sentMsgs ) ; return sentMsgs ; |
public class ModelMojoReader { /** * Retrieves the value associated with a given key . If value is not set of the key , a given default value is returned
* instead . Uses same parsing logic as { @ link ModelMojoReader # readkv ( String ) } . If default value is not null it ' s type
* is used to assist the parser to determine the return type .
* @ param key name of the key
* @ param defVal default value
* @ param < T > return type
* @ return parsed value */
@ SuppressWarnings ( "unchecked" ) protected < T > T readkv ( String key , T defVal ) { } } | Object val = _lkv . get ( key ) ; if ( ! ( val instanceof RawValue ) ) return val != null ? ( T ) val : defVal ; return ( ( RawValue ) val ) . parse ( defVal ) ; |
public class EC2Context { /** * < br >
* Needed AWS actions :
* < ul >
* < li > autoscaling : DescribeAutoScalingGroups < / li >
* < li > ec2 : DescribeInstances < / li >
* < / ul >
* @ param autoScalingGroupName the name of the group
* @ return the list of private IP addresses of the members */
public List < String > getPrivateAutoScalingMemberIPs ( String autoScalingGroupName ) { } } | Preconditions . checkArgument ( autoScalingGroupName != null && ! autoScalingGroupName . isEmpty ( ) ) ; List < String > members = this . getAutoScalingMembers ( autoScalingGroupName ) ; DescribeInstancesRequest req = new DescribeInstancesRequest ( ) ; req . setInstanceIds ( members ) ; DescribeInstancesResult result = this . ec2Client . describeInstances ( req ) ; List < String > list = new ArrayList < > ( ) ; for ( Reservation reservation : result . getReservations ( ) ) { for ( Instance instance : reservation . getInstances ( ) ) { if ( instance . getState ( ) . getName ( ) . equals ( "running" ) ) { list . add ( instance . getPrivateIpAddress ( ) ) ; } } } return list ; |
public class Morse { /** * 注册莫尔斯电码表
* @ param abc 字母和字符
* @ param dict 二进制 */
private static void registerMorse ( Character abc , String dict ) { } } | alphabets . put ( Integer . valueOf ( abc ) , dict ) ; dictionaries . put ( dict , Integer . valueOf ( abc ) ) ; |
public class VirtualMachineImagesInner { /** * Gets a list of all virtual machine image versions for the specified location , publisher , offer , and SKU .
* @ param location The name of a supported Azure region .
* @ param publisherName A valid image publisher .
* @ param offer A valid image publisher offer .
* @ param skus A valid image SKU .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; VirtualMachineImageResourceInner & gt ; object if successful . */
public List < VirtualMachineImageResourceInner > list ( String location , String publisherName , String offer , String skus ) { } } | return listWithServiceResponseAsync ( location , publisherName , offer , skus ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class GoogleMapShapeConverter { /** * Get a list of points as LatLng from a list of Markers
* @ param markers list of markers
* @ return lat lngs */
public List < LatLng > getPointsFromMarkers ( List < Marker > markers ) { } } | List < LatLng > points = new ArrayList < LatLng > ( ) ; for ( Marker marker : markers ) { points . add ( marker . getPosition ( ) ) ; } return points ; |
public class AbstractFeatureAggregator { /** * Reads a quantizer ( codebook ) from the given file and returns it in a 2 - dimensional double array .
* @ param filename
* name of the file containing the quantizer
* @ param numCentroids
* number of centroids of the quantizer
* @ param centroidLength
* length of each centroid
* @ return the quantizer as a 2 - dimensional double array
* @ throws IOException */
public static double [ ] [ ] readQuantizer ( String filename , int numCentroids , int centroidLength ) throws IOException { } } | double [ ] [ ] quantizer = new double [ numCentroids ] [ centroidLength ] ; // load the quantizer
BufferedReader in = new BufferedReader ( new FileReader ( filename ) ) ; String line ; int counter = 0 ; while ( ( line = in . readLine ( ) ) != null ) { // skip header lines
if ( ! line . contains ( "," ) ) { // not a csv data line
continue ; } String [ ] centerStrings = line . split ( "," ) ; for ( int i = 0 ; i < centerStrings . length ; i ++ ) { quantizer [ counter ] [ i ] = Double . parseDouble ( centerStrings [ i ] ) ; } counter ++ ; } in . close ( ) ; return quantizer ; |
public class FileConvert { /** * test if pdbserial has an insertion code */
private static boolean hasInsertionCode ( String pdbserial ) { } } | try { Integer . parseInt ( pdbserial ) ; } catch ( NumberFormatException e ) { return true ; } return false ; |
public class ResetPasswordRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ResetPasswordRequest resetPasswordRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( resetPasswordRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resetPasswordRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( resetPasswordRequest . getUserId ( ) , USERID_BINDING ) ; protocolMarshaller . marshall ( resetPasswordRequest . getPassword ( ) , PASSWORD_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DBCursor { /** * Converts this cursor to an array .
* @ param max the maximum number of objects to return
* @ return an array of objects
* @ throws MongoException If an error occurred */
public List < T > toArray ( int max ) throws MongoException { } } | executed ( ) ; if ( max > all . size ( ) ) { List < DBObject > objects = cursor . toArray ( max ) ; for ( int i = all . size ( ) ; i < objects . size ( ) ; i ++ ) { all . add ( jacksonDBCollection . convertFromDbObject ( objects . get ( i ) ) ) ; } } return all ; |
public class CommerceVirtualOrderItemUtil { /** * Returns the first commerce virtual order item in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce virtual order item , or < code > null < / code > if a matching commerce virtual order item could not be found */
public static CommerceVirtualOrderItem fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CommerceVirtualOrderItem > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link CmisExtensionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = CheckInResponse . class ) public JAXBElement < CmisExtensionType > createCheckInResponseExtension ( CmisExtensionType value ) { } } | return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , CheckInResponse . class , value ) ; |
public class StatsDReporter { @ Override public void report ( ) { } } | // instead of locking here , we tolerate exceptions
// we do this to prevent holding the lock for very long and blocking
// operator creation and shutdown
try { for ( Map . Entry < Gauge < ? > , String > entry : gauges . entrySet ( ) ) { if ( closed ) { return ; } reportGauge ( entry . getValue ( ) , entry . getKey ( ) ) ; } for ( Map . Entry < Counter , String > entry : counters . entrySet ( ) ) { if ( closed ) { return ; } reportCounter ( entry . getValue ( ) , entry . getKey ( ) ) ; } for ( Map . Entry < Histogram , String > entry : histograms . entrySet ( ) ) { reportHistogram ( entry . getValue ( ) , entry . getKey ( ) ) ; } for ( Map . Entry < Meter , String > entry : meters . entrySet ( ) ) { reportMeter ( entry . getValue ( ) , entry . getKey ( ) ) ; } } catch ( ConcurrentModificationException | NoSuchElementException e ) { // ignore - may happen when metrics are concurrently added or removed
// report next time
} |
public class JobSchedulesImpl { /** * Lists all of the job schedules in the specified account .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ param jobScheduleListNextOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; CloudJobSchedule & gt ; object if successful . */
public PagedList < CloudJobSchedule > listNext ( final String nextPageLink , final JobScheduleListNextOptions jobScheduleListNextOptions ) { } } | ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > response = listNextSinglePageAsync ( nextPageLink , jobScheduleListNextOptions ) . toBlocking ( ) . single ( ) ; return new PagedList < CloudJobSchedule > ( response . body ( ) ) { @ Override public Page < CloudJobSchedule > nextPage ( String nextPageLink ) { return listNextSinglePageAsync ( nextPageLink , jobScheduleListNextOptions ) . toBlocking ( ) . single ( ) . body ( ) ; } } ; |
public class ExceptionUtil { /** * 包装一个异常
* @ param throwable 异常
* @ param wrapThrowable 包装后的异常类
* @ return 包装后的异常
* @ since 3.3.0 */
@ SuppressWarnings ( "unchecked" ) public static < T extends Throwable > T wrap ( Throwable throwable , Class < T > wrapThrowable ) { } } | if ( wrapThrowable . isInstance ( throwable ) ) { return ( T ) throwable ; } return ReflectUtil . newInstance ( wrapThrowable , throwable ) ; |
public class AmazonEC2Client { /** * Adds a route to a network to a Client VPN endpoint . Each Client VPN endpoint has a route table that describes the
* available destination network routes . Each route in the route table specifies the path for traffic to specific
* resources or networks .
* @ param createClientVpnRouteRequest
* @ return Result of the CreateClientVpnRoute operation returned by the service .
* @ sample AmazonEC2 . CreateClientVpnRoute
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / CreateClientVpnRoute " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateClientVpnRouteResult createClientVpnRoute ( CreateClientVpnRouteRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateClientVpnRoute ( request ) ; |
public class InnerRankUpdate_DDRB { /** * Performs the following operation on the lower triangular portion of a block : < br >
* < br >
* c = c - a * a < sup > T < / sup > < br > */
protected static void multTransBBlockMinus_L ( double [ ] dataA , double [ ] dataC , int indexA , int indexB , int indexC , final int widthA , final int heightA , final int widthC ) { } } | // for ( int i = 0 ; i < heightA ; i + + ) {
// for ( int j = 0 ; j < = i ; j + + ) {
// double sum = 0;
// for ( int k = 0 ; k < widthA ; k + + ) {
// sum + = dataA [ i * widthA + k + indexA ] * dataA [ j * widthA + k + indexB ] ;
// dataC [ i * widthC + j + indexC ] - = sum ;
for ( int i = 0 ; i < heightA ; i ++ ) { int rowA = i * widthA + indexA ; int endA = rowA + widthA ; int rowB = indexB ; int rowC = i * widthC + indexC ; for ( int j = 0 ; j <= i ; j ++ , rowB += widthA ) { double sum = 0 ; int a = rowA ; int b = rowB ; while ( a != endA ) { sum += dataA [ a ++ ] * dataA [ b ++ ] ; } dataC [ rowC + j ] -= sum ; } } |
public class TermStatementUpdate { /** * Retrieves the list of aliases that will be added in a
* given language , after all the optimizations have been done
* ( replacing empty labels by new aliases in the same language ,
* for instance ) .
* @ param language the language code of the added aliases
* @ return the list of added aliases */
public List < MonolingualTextValue > getAddedAliases ( String language ) { } } | AliasesWithUpdate update = newAliases . get ( language ) ; if ( update == null ) { return Collections . < MonolingualTextValue > emptyList ( ) ; } return update . added ; |
public class TabColumnPrefsHandler { /** * This method is called from the TabColumnPrefsState class after a node has already been moved
* from its old parent to its new in the ILF . We can get at the new parent via the compViewNode
* moved but need a separate handle of the parent from whence it came . The goal of this method
* is to make the appropriate change in the PLF to persist this action take by the user . For ILF
* nodes this generally means adding an entry to the position set for the new parent and
* removing any entry if it existed in the position set in the old parent . For nodes that are
* owned by the user ( PLF owned nodes ) the nodes are moved outright and now position set is
* needed unless the new parent contains ILF nodes as well requiring preservation of the user ' s
* ordering of the nodes for when the ILF and PLF are merged again later on . */
public static void moveElement ( Element compViewNode , Element oldCompViewParent , IPerson person ) throws PortalException { } } | if ( LOG . isInfoEnabled ( ) ) LOG . info ( "moving " + compViewNode . getAttribute ( Constants . ATT_ID ) ) ; Element compViewParent = ( Element ) compViewNode . getParentNode ( ) ; if ( oldCompViewParent != compViewParent ) { if ( LOG . isInfoEnabled ( ) ) LOG . info ( "reparenting from " + oldCompViewParent . getAttribute ( Constants . ATT_ID ) + " to " + compViewParent . getAttribute ( Constants . ATT_ID ) ) ; // update previous parent if found in PLF
Element plfParent = HandlerUtils . getPLFNode ( oldCompViewParent , person , // only needed if creating
false , // only look , don ' t create
false ) ; // also not needed
if ( plfParent != null ) { PositionManager . updatePositionSet ( oldCompViewParent , plfParent , person ) ; if ( LOG . isInfoEnabled ( ) ) LOG . info ( "updating old parent's position set" ) ; } } // now take care of the destination
Element plfParent = HandlerUtils . getPLFNode ( compViewParent , person , true , // create parent if not found
false ) ; // don ' t create children
if ( compViewNode . getAttribute ( Constants . ATT_ID ) . startsWith ( Constants . FRAGMENT_ID_USER_PREFIX ) ) { // ilf node being inserted
if ( LOG . isInfoEnabled ( ) ) LOG . info ( "ilf node being moved, only update new parent pos set" ) ; PositionManager . updatePositionSet ( compViewParent , plfParent , person ) ; } else { // plf node
if ( LOG . isInfoEnabled ( ) ) LOG . info ( "plf node being moved, updating old parent's position set" ) ; Document plf = ( Document ) person . getAttribute ( Constants . PLF ) ; HandlerUtils . createOrMovePLFOwnedNode ( compViewNode , compViewParent , false , // should always be found
false , // irrelevant , not creating
plf , plfParent , person ) ; } |
public class IndexBuilder { /** * Return the sorted list of members , for passed Unicode Character .
* @ param index index Unicode character .
* @ return List member list for specific Unicode character . */
public List < ? extends Element > getMemberList ( Character index ) { } } | SortedSet < Element > set = indexmap . get ( index ) ; if ( set == null ) return null ; List < Element > out = new ArrayList < > ( ) ; out . addAll ( set ) ; return out ; |
public class AtomixClusterBuilder { /** * Sets the reachability failure timeout .
* The reachability timeout determines the maximum time after which a member will be marked unreachable if heartbeats
* have failed .
* @ param timeout the reachability failure timeout
* @ return the cluster builder */
@ Deprecated public AtomixClusterBuilder withReachabilityTimeout ( Duration timeout ) { } } | GroupMembershipProtocolConfig protocolConfig = config . getProtocolConfig ( ) ; if ( protocolConfig instanceof HeartbeatMembershipProtocolConfig ) { ( ( HeartbeatMembershipProtocolConfig ) protocolConfig ) . setFailureTimeout ( timeout ) ; } return this ; |
public class DescribeConnectionsOnInterconnectRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeConnectionsOnInterconnectRequest describeConnectionsOnInterconnectRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeConnectionsOnInterconnectRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeConnectionsOnInterconnectRequest . getInterconnectId ( ) , INTERCONNECTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GVRScene { /** * Performs case - sensitive search
* @ param name
* @ return null if nothing was found or name was null / empty */
public GVRSceneObject [ ] getSceneObjectsByName ( final String name ) { } } | if ( null == name || name . isEmpty ( ) ) { return null ; } return mSceneRoot . getSceneObjectsByName ( name ) ; |
public class RolloutGroupTargetsCountLabelMessage { /** * TenantAwareEvent Listener to show the message count .
* @ param event */
@ EventBusListenerMethod ( scope = EventScope . UI ) void onEvent ( final RolloutEvent event ) { } } | if ( event == RolloutEvent . SHOW_ROLLOUT_GROUP_TARGETS_COUNT ) { displayRolloutGroupTargetMessage ( ) ; } |
public class EnvironmentInformation { /** * Returns the version of the code as String . If version = = null , then the JobManager does not run from a
* Maven build . An example is a source code checkout , compile , and run from inside an IDE .
* @ return The version string . */
public static String getVersion ( ) { } } | String version = EnvironmentInformation . class . getPackage ( ) . getImplementationVersion ( ) ; return version != null ? version : UNKNOWN ; |
public class DuracloudUser { /** * Returns the set of all possible roles a user can play This method is
* implemented as part of the UserDetails interface (
* < code > UserDetails < / code > ) .
* @ return */
public Collection < GrantedAuthority > getAuthorities ( ) { } } | Set < GrantedAuthority > authorities = new HashSet < GrantedAuthority > ( ) ; for ( Role role : Role . ROLE_USER . getRoleHierarchy ( ) ) { authorities . add ( new SimpleGrantedAuthority ( role . name ( ) ) ) ; } if ( accountRights != null ) { for ( AccountRights rights : accountRights ) { Set < Role > roles = rights . getRoles ( ) ; if ( roles != null ) { for ( Role role : roles ) { authorities . add ( role . authority ( ) ) ; } } } } if ( isRoot ( ) ) { authorities . add ( new SimpleGrantedAuthority ( Role . ROLE_ROOT . name ( ) ) ) ; } return authorities ; |
public class MatchDatabase { /** * Use this only for test cases - it ' s Derby - specific . */
void createTables ( ) throws SQLException { } } | try ( PreparedStatement prepSt = conn . prepareStatement ( "CREATE TABLE pings (" + " language_code VARCHAR(5) NOT NULL," + " check_date TIMESTAMP NOT NULL" + ")" ) ) { prepSt . executeUpdate ( ) ; } try ( PreparedStatement prepSt = conn . prepareStatement ( "CREATE TABLE feed_checks (" + " language_code VARCHAR(5) NOT NULL," + " check_date TIMESTAMP NOT NULL" + ")" ) ) { prepSt . executeUpdate ( ) ; } try ( PreparedStatement prepSt = conn . prepareStatement ( "CREATE TABLE feed_matches (" + " id INT NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1)," + " language_code VARCHAR(5) NOT NULL," + " title VARCHAR(255) NOT NULL," + " rule_id VARCHAR(255) NOT NULL," + " rule_sub_id VARCHAR(255)," + " rule_description VARCHAR(255) NOT NULL," + " rule_message VARCHAR(255) NOT NULL," + " rule_category VARCHAR(255) NOT NULL," + " error_context VARCHAR(500) NOT NULL," + " edit_date TIMESTAMP NOT NULL," + " diff_id INT NOT NULL," + " fix_date TIMESTAMP," + " fix_diff_id INT" + ")" ) ) { prepSt . executeUpdate ( ) ; } |
public class EsStorage { /** * TODO : make put more lenient ( if the schema is not available just shove everything on the existing type or as a big charray ) */
@ Override public void putNext ( Tuple t ) throws IOException { } } | pigTuple . setTuple ( t ) ; if ( trace ) { log . trace ( "Writing out tuple " + t ) ; } try { writer . write ( null , pigTuple ) ; } catch ( InterruptedException ex ) { throw new EsHadoopIllegalArgumentException ( "interrupted" , ex ) ; } |
public class Yank { /** * Return a List of generic Object [ ] s given a SQL Key using an SQL statement matching the sqlKey
* String in a properties file loaded via Yank . addSQLStatements ( . . . ) .
* @ param poolName The name of the connection pool to query against
* @ param sqlKey The SQL Key found in a properties file corresponding to the desired SQL statement
* value
* @ param params The replacement parameters
* @ return The List of generic Object [ ] s
* @ throws SQLStatementNotFoundException if an SQL statement could not be found for the given
* sqlKey String */
public static List < Object [ ] > queryObjectArraysSQLKey ( String poolName , String sqlKey , Object [ ] params ) throws SQLStatementNotFoundException , YankSQLException { } } | String sql = YANK_POOL_MANAGER . getMergedSqlProperties ( ) . getProperty ( sqlKey ) ; if ( sql == null || sql . equalsIgnoreCase ( "" ) ) { throw new SQLStatementNotFoundException ( ) ; } else { return queryObjectArrays ( poolName , sql , params ) ; } |
public class MpMessages { /** * 预览图片消息
* @ param wxName
* @ param openId
* @ param image
* @ return */
public long imagePreview ( String wxName , String openId , String image ) { } } | return preview ( wxName , openId , "image" , image ) ; |
public class WPartialDateField { /** * The padding character used in the partial date value . The default padding character is a space . If the padding
* character is a space , then the date value will be right trimmed to remove the trailing spaces .
* @ param paddingChar the padding character used in the partial date value .
* @ deprecated will be removed so padding character is immutable */
@ Deprecated public void setPaddingChar ( final char paddingChar ) { } } | if ( Character . isDigit ( paddingChar ) ) { throw new IllegalArgumentException ( "Padding character should not be a digit." ) ; } getOrCreateComponentModel ( ) . paddingChar = paddingChar ; |
public class WorkbookReader { /** * Returns a { @ link ListMultimap } which represents the content of this
* workbook . Each sheet name is used as the key , and the value is a Collection
* of String List which contains all fields of a row .
* @ return { @ link ListMultimap } { @ literal < String , List < String > > } */
public ListMultimap < String , List < String > > toMultimap ( ) { } } | ListMultimap < String , List < String > > content = ArrayListMultimap . create ( ) ; String currentSheet = getCurrentSheetName ( ) ; boolean currentHeader = hasHeader ; for ( String name : getAllSheetNames ( ) ) { turnToSheet ( name ) ; withoutHeader ( ) ; for ( List < String > row : toLists ( ) ) { content . put ( name , row ) ; } } turnToSheet ( currentSheet ) ; hasHeader = currentHeader ; return content ; |
public class Sneaky { /** * Wrap a { @ link CheckedDoubleFunction } in a { @ link DoubleFunction } .
* Example :
* < code > < pre >
* DoubleStream . of ( 1.0 , 2.0 , 3.0 ) . mapToObj ( Unchecked . doubleFunction ( d - > {
* if ( d & lt ; 0.0)
* throw new Exception ( " Only positive numbers allowed " ) ;
* return " " + d ;
* < / pre > < / code > */
public static < R > DoubleFunction < R > doubleFunction ( CheckedDoubleFunction < R > function ) { } } | return Unchecked . doubleFunction ( function , Unchecked . RETHROW_ALL ) ; |
public class ServiceLocatorException { /** * Retrieves ( recursively ) the root cause exception .
* @ return the root cause exception . */
public Exception getRootCause ( ) { } } | if ( exception instanceof ServiceLocatorException ) { return ( ( ServiceLocatorException ) exception ) . getRootCause ( ) ; } return exception == null ? this : exception ; |
public class AnycastOutputHandler { /** * Helper method called by the AOStream when a persistent tick representing a persistently locked
* message should be removed since we are flushing or cleaning up state .
* @ param t the transaction
* @ param sinfo the stream this msgs is on
* @ param storedTick the persistent tick
* @ throws SIResourceException
* @ throws Exception */
public final void cleanupTicks ( StreamInfo sinfo , TransactionCommon t , ArrayList valueTicks ) throws MessageStoreException , SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "cleanupTicks" , new Object [ ] { sinfo , t , valueTicks } ) ; try { int length = valueTicks . size ( ) ; for ( int i = 0 ; i < length ; i ++ ) { AOValue storedTick = ( AOValue ) valueTicks . get ( i ) ; // If we are here then we do not know which consumerDispatcher originally
// persistently locked the message . We therefore have to use the meUuid in
// the AOValue to find / reconstitute the consumerDispatcher associated with it . This
// potentially involves creating AIHs which is not ideal .
ConsumerDispatcher cd = null ; if ( storedTick . getSourceMEUuid ( ) == null || storedTick . getSourceMEUuid ( ) . equals ( getMessageProcessor ( ) . getMessagingEngineUuid ( ) ) ) { cd = ( ConsumerDispatcher ) destinationHandler . getLocalPtoPConsumerManager ( ) ; } else { AnycastInputHandler aih = destinationHandler . getAnycastInputHandler ( storedTick . getSourceMEUuid ( ) , null , true ) ; cd = aih . getRCD ( ) ; } SIMPMessage msg = null ; synchronized ( storedTick ) { msg = ( SIMPMessage ) cd . getMessageByValue ( storedTick ) ; if ( msg == null ) { storedTick . setToBeFlushed ( ) ; } } Transaction msTran = mp . resolveAndEnlistMsgStoreTransaction ( t ) ; if ( msg != null && msg . getLockID ( ) == storedTick . getPLockId ( ) ) msg . unlockMsg ( storedTick . getPLockId ( ) , msTran , true ) ; storedTick . lockItemIfAvailable ( controlItemLockID ) ; // should always be successful
storedTick . remove ( msTran , controlItemLockID ) ; } } catch ( MessageStoreException e ) { // No FFDC code needed
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "cleanupTicks" , e ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "cleanupTicks" ) ; |
public class DefaultTypeConverter { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) @ Override public < T > T convertTo ( final Class < T > targetType , final String data , final String ... optionalFormatPattern ) { } } | Conversion < ? > conversion = CONVERSIONS . get ( targetType ) ; assert conversion != null : "Method caller must check existence of conversion. (" + targetType . getName ( ) + ")" ; if ( data == null ) { return ( T ) conversion . getDefaultValue ( data ) ; } if ( ( optionalFormatPattern != null ) && ( optionalFormatPattern . length > 0 ) && ( optionalFormatPattern [ 0 ] != null ) ) { return ( T ) conversion . convertWithPattern ( data , optionalFormatPattern [ 0 ] ) ; } return ( T ) conversion . convert ( data ) ; |
public class FessMessages { /** * Add the created action message for the key ' success . upload _ kuromoji _ file ' with parameters .
* < pre >
* message : Uploaded Kuromoji file .
* < / pre >
* @ param property The property name for the message . ( NotNull )
* @ return this . ( NotNull ) */
public FessMessages addSuccessUploadKuromojiFile ( String property ) { } } | assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_upload_kuromoji_file ) ) ; return this ; |
public class RawDataBuffer { /** * Write a generic type to the stream */
public void writeGeneric ( Object value ) { } } | if ( value == null ) write ( NULL_VALUE ) ; else { if ( value instanceof String ) { writeByte ( TYPE_STRING ) ; writeUTF ( ( String ) value ) ; } else if ( value instanceof Boolean ) { writeByte ( TYPE_BOOLEAN ) ; writeBoolean ( ( ( Boolean ) value ) . booleanValue ( ) ) ; } else if ( value instanceof Byte ) { writeByte ( TYPE_BYTE ) ; writeByte ( ( ( Byte ) value ) . byteValue ( ) ) ; } else if ( value instanceof Short ) { writeByte ( TYPE_SHORT ) ; writeShort ( ( ( Short ) value ) . shortValue ( ) ) ; } else if ( value instanceof Integer ) { writeByte ( TYPE_INT ) ; writeInt ( ( ( Integer ) value ) . intValue ( ) ) ; } else if ( value instanceof Long ) { writeByte ( TYPE_LONG ) ; writeLong ( ( ( Long ) value ) . longValue ( ) ) ; } else if ( value instanceof Float ) { writeByte ( TYPE_FLOAT ) ; writeFloat ( ( ( Float ) value ) . floatValue ( ) ) ; } else if ( value instanceof Double ) { writeByte ( TYPE_DOUBLE ) ; writeDouble ( ( ( Double ) value ) . doubleValue ( ) ) ; } else if ( value instanceof byte [ ] ) { writeByte ( TYPE_BYTEARRAY ) ; writeByteArray ( ( byte [ ] ) value ) ; } else if ( value instanceof Character ) { writeByte ( TYPE_CHARACTER ) ; writeChar ( ( ( Character ) value ) . charValue ( ) ) ; } else throw new IllegalArgumentException ( "Unsupported type : " + value . getClass ( ) . getName ( ) ) ; |
public class AuthorizationRequestManager { /** * Builds an authorization request and sends it . It also caches the request url and request options in
* order to be able to re - send the request when authorization challenges have been handled .
* @ param rootUrl Root of authorization server .
* @ param path Path to authorization endpoint .
* @ param options BaseRequest options .
* @ throws IOException
* @ throws JSONException */
private void sendRequestInternal ( String rootUrl , String path , RequestOptions options ) throws IOException , JSONException { } } | logger . debug ( "Sending request to root: " + rootUrl + " with path: " + path ) ; // create default options object with GET request method .
if ( options == null ) { options = new RequestOptions ( ) ; } // used to resend request
this . requestPath = Utils . concatenateUrls ( rootUrl , path ) ; this . requestOptions = options ; AuthorizationRequest request = new AuthorizationRequest ( this . requestPath , options . requestMethod ) ; if ( options . timeout != 0 ) { request . setTimeout ( options . timeout ) ; } else { request . setTimeout ( BMSClient . getInstance ( ) . getDefaultTimeout ( ) ) ; } if ( options . headers != null ) { for ( Map . Entry < String , String > entry : options . headers . entrySet ( ) ) { request . addHeader ( entry . getKey ( ) , entry . getValue ( ) ) ; } } if ( answers != null ) { // 0 means no spaces in the generated string
String answer = answers . toString ( 0 ) ; String authorizationHeaderValue = String . format ( "Bearer %s" , answer . replace ( "\n" , "" ) ) ; request . addHeader ( "Authorization" , authorizationHeaderValue ) ; logger . debug ( "Added authorization header to request: " + authorizationHeaderValue ) ; } if ( Request . GET . equalsIgnoreCase ( options . requestMethod ) ) { request . setQueryParameters ( options . parameters ) ; request . send ( this ) ; } else { request . send ( options . parameters , this ) ; } |
public class ConnectionsInner { /** * Retrieve the connection identified by connection name .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param connectionName The name of connection .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ConnectionInner object if successful . */
public ConnectionInner get ( String resourceGroupName , String automationAccountName , String connectionName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , automationAccountName , connectionName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Cookies { /** * Parse one cookie header value , return the cookie .
* @ param host should be lower case
* @ return return null means is not a valid cookie str . */
@ Nullable public static Cookie parseCookie ( String cookieStr , String host , String defaultPath ) { } } | String [ ] items = cookieStr . split ( ";" ) ; Parameter < String > param = parseCookieNameValue ( items [ 0 ] ) ; if ( param == null ) { return null ; } String domain = "" ; String path = "" ; long expiry = 0 ; boolean secure = false ; for ( String item : items ) { item = item . trim ( ) ; if ( item . isEmpty ( ) ) { continue ; } Parameter < String > attribute = parseCookieAttribute ( item ) ; switch ( attribute . name ( ) . toLowerCase ( ) ) { case "domain" : domain = normalizeDomain ( attribute . value ( ) ) ; break ; case "path" : path = normalizePath ( attribute . value ( ) ) ; break ; case "expires" : // If a cookie has both the Max - Age and the Expires attribute , the Max - Age attribute has precedence
// and controls the expiration date of the cookie .
if ( expiry == 0 ) { Date date = CookieDateUtil . parseDate ( attribute . value ( ) ) ; if ( date != null ) { expiry = date . getTime ( ) ; if ( expiry == 0 ) { expiry = 1 ; } } } break ; case "max-age" : try { int seconds = Integer . parseInt ( attribute . value ( ) ) ; expiry = System . currentTimeMillis ( ) + seconds * 1000 ; if ( expiry == 0 ) { expiry = 1 ; } } catch ( NumberFormatException ignore ) { } break ; case "secure" : secure = true ; break ; case "httponly" : // ignore http only now
break ; default : } } if ( path . isEmpty ( ) ) { path = defaultPath ; } boolean hostOnly ; if ( domain . isEmpty ( ) ) { domain = host ; hostOnly = true ; } else { if ( isIP ( host ) ) { // should not set
return null ; } if ( ! isDomainSuffix ( host , domain ) ) { return null ; } hostOnly = false ; } return new Cookie ( domain , path , param . name ( ) , param . value ( ) , expiry , secure , hostOnly ) ; |
public class TimeItem { /** * Gets the current time set in this TimeItem .
* @ return A new Calendar containing the time . */
public Calendar getTime ( ) { } } | Calendar result = Calendar . getInstance ( ) ; result . set ( Calendar . HOUR_OF_DAY , hour ) ; result . set ( Calendar . MINUTE , minute ) ; return result ; |
public class ParallelRunner { /** * Serialize a { @ link State } object into a file .
* This method submits a task to serialize the { @ link State } object and returns immediately
* after the task is submitted .
* @ param state the { @ link State } object to be serialized
* @ param outputFilePath the file to write the serialized { @ link State } object to
* @ param < T > the { @ link State } object type */
public < T extends State > void serializeToFile ( final T state , final Path outputFilePath ) { } } | // Use a Callable with a Void return type to allow exceptions to be thrown
this . futures . add ( new NamedFuture ( this . executor . submit ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { SerializationUtils . serializeState ( ParallelRunner . this . fs , outputFilePath , state ) ; return null ; } } ) , "Serialize state to " + outputFilePath ) ) ; |
public class FNCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setRetired ( Integer newRetired ) { } } | Integer oldRetired = retired ; retired = newRetired ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNC__RETIRED , oldRetired , retired ) ) ; |
public class Request { /** * A private method that loops through the query parameter Map , building a String to be appended to the URL .
* @ throws MalformedURLException */
private void buildQueryString ( ) throws MalformedURLException { } } | StringBuilder builder = new StringBuilder ( ) ; // Put the query parameters on the URL before issuing the request
if ( ! query . isEmpty ( ) ) { for ( Map . Entry < String , String > param : query . entrySet ( ) ) { builder . append ( param . getKey ( ) ) ; builder . append ( "=" ) ; builder . append ( param . getValue ( ) ) ; builder . append ( "&" ) ; } builder . deleteCharAt ( builder . lastIndexOf ( "&" ) ) ; // Remove the trailing ampersand
} if ( builder . length ( ) > 0 ) { // If there was any query string at all , begin it with the question mark
builder . insert ( 0 , "?" ) ; } url = new URL ( url . toString ( ) + builder . toString ( ) ) ; |
public class QDate { /** * Calculates the year , the dayOfYear and whether this is a leap year
* from the current days since the epoch . */
private void calculateYear ( ) { } } | long days = _dayOfEpoch ; // shift to using 1601 as a base
days += ( 1970 - 1601 ) * 365 + ( 1970 - 1601 ) / 4 - 3 ; long n400 = divFloor ( days , 400 * 365 + 100 - 3 ) ; days -= n400 * ( 400 * 365 + 100 - 3 ) ; long n100 = divFloor ( days , 100 * 365 + 25 - 1 ) ; if ( n100 == 4 ) n100 = 3 ; days -= n100 * ( 100 * 365 + 25 - 1 ) ; long n4 = divFloor ( days , 4 * 365 + 1 ) ; if ( n4 == 25 ) n4 = 24 ; days -= n4 * ( 4 * 365 + 1 ) ; long n1 = divFloor ( days , 365 ) ; if ( n1 == 4 ) n1 = 3 ; _year = 400 * n400 + 100 * n100 + 4 * n4 + n1 + 1601 ; _dayOfYear = ( int ) ( days - 365 * n1 ) ; _isLeapYear = isLeapYear ( _year ) ; |
public class DBConn { /** * an DBNotAvailableException and leave the Thrift connection null . */
private void reconnect ( Exception reconnectEx ) { } } | // Log the exception as a warning .
m_logger . warn ( "Reconnecting to Cassandra due to error" , reconnectEx ) ; // Reconnect up to the configured number of times , waiting a little between each attempt .
boolean bSuccess = false ; for ( int attempt = 1 ; ! bSuccess ; attempt ++ ) { try { close ( ) ; m_dbService . connectDBConn ( this ) ; m_logger . debug ( "Reconnect successful" ) ; bSuccess = true ; } catch ( Exception ex ) { // Abort if all retries failed .
if ( attempt >= m_max_reconnect_attempts ) { m_logger . error ( "All reconnect attempts failed; abandoning reconnect" , ex ) ; throw new DBNotAvailableException ( "All reconnect attempts failed" , ex ) ; } m_logger . warn ( "Reconnect attempt #" + attempt + " failed" , ex ) ; try { Thread . sleep ( m_retry_wait_millis * attempt ) ; } catch ( InterruptedException e ) { // Ignore
} } } |
public class WebContainer { /** * begin 272738 Duplicate CacheServletWrappers when url - rewriting is enabled WAS . webcontainer */
private PathInfoHelper removeExtraPathInfo ( String pathInfo ) { } } | if ( pathInfo == null ) return null ; int semicolon = pathInfo . indexOf ( ';' ) ; if ( semicolon != - 1 ) { String tmpPathInfo = pathInfo . substring ( 0 , semicolon ) ; String extraPathInfo = pathInfo . substring ( semicolon ) ; return new PathInfoHelper ( tmpPathInfo , extraPathInfo ) ; } return new PathInfoHelper ( pathInfo , null ) ; |
public class CommandLineProcess { /** * if you find a better way - go ahead and replace it */
private void printErrors ( ) { } } | if ( errorLog . isFile ( ) ) { FileReader fileReader ; try { fileReader = new FileReader ( errorLog ) ; BufferedReader bufferedReader = new BufferedReader ( fileReader ) ; String currLine ; while ( ( currLine = bufferedReader . readLine ( ) ) != null ) { logger . debug ( currLine ) ; } fileReader . close ( ) ; } catch ( Exception e ) { logger . warn ( "Error printing cmd command errors {} " , e . getMessage ( ) ) ; logger . debug ( "Error: {}" , e . getStackTrace ( ) ) ; } finally { try { FileUtils . forceDelete ( errorLog ) ; } catch ( IOException e ) { logger . warn ( "Error closing cmd command errors file {} " , e . getMessage ( ) ) ; logger . debug ( "Error: {}" , e . getStackTrace ( ) ) ; } } } |
public class JMResources { /** * Gets resource input stream for zip .
* @ param zipFile the zip file
* @ param entryName the entry name
* @ return the resource input stream for zip */
public static InputStream getResourceInputStreamForZip ( ZipFile zipFile , String entryName ) { } } | try { return zipFile . getInputStream ( zipFile . getEntry ( entryName ) ) ; } catch ( Exception e ) { return JMExceptionManager . handleExceptionAndReturnNull ( log , e , "newZipFile" , zipFile , entryName ) ; } |
public class EncodingUtilsImpl { /** * @ see com . ibm . websphere . http . EncodingUtils # stripQuotes ( java . lang . String ) */
@ Override public String stripQuotes ( String value ) { } } | if ( null == value ) { return null ; } String modvalue = value . trim ( ) ; if ( 0 == modvalue . length ( ) ) { return modvalue ; } boolean needTrimming = false ; int start = 0 ; if ( '"' == modvalue . charAt ( 0 ) || '\'' == modvalue . charAt ( 0 ) ) { start = 1 ; needTrimming = true ; } int end = modvalue . length ( ) - 1 ; if ( '"' == modvalue . charAt ( end ) || '\'' == modvalue . charAt ( end ) ) { needTrimming = true ; } else { end ++ ; } if ( needTrimming ) { return modvalue . substring ( start , end ) ; } return modvalue ; |
public class BSONDataEncoder { /** * Write a row to the file .
* @ param row A row of data .
* @ throws java . io . IOException Thrown when there are problems writing to the
* destination . */
@ Override protected void writeToOutput ( final Map < String , Object > row ) throws IOException { } } | if ( generator == null ) { ObjectMapper mapper = new ObjectMapper ( ) ; BsonFactory factory = new BsonFactory ( mapper ) ; factory . enable ( BsonGenerator . Feature . ENABLE_STREAMING ) ; generator = factory . createJsonGenerator ( getOutputStream ( ) ) ; generator . writeStartArray ( ) ; } // Convert the tuple into a map
generator . writeObject ( row ) ; |
public class ResourceChangeDetailMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ResourceChangeDetail resourceChangeDetail , ProtocolMarshaller protocolMarshaller ) { } } | if ( resourceChangeDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resourceChangeDetail . getTarget ( ) , TARGET_BINDING ) ; protocolMarshaller . marshall ( resourceChangeDetail . getEvaluation ( ) , EVALUATION_BINDING ) ; protocolMarshaller . marshall ( resourceChangeDetail . getCausingEntity ( ) , CAUSINGENTITY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GenericsUtils { /** * Resolves top class for provided type ( for example , for generified classes like { @ code List < T > } it
* returns base type List ) .
* Note : may return primitive because it might be important to differentiate actual value .
* Use { @ link TypeUtils # wrapPrimitive ( Class ) } to box possible primitive , if required .
* @ param type type to resolve
* @ param generics root class generics mapping
* @ return resolved class
* @ throws UnknownGenericException when found generic not declared on type ( e . g . method generic )
* @ see # resolveClass ( Type ) shortcut for types without variables
* @ see # resolveClassIgnoringVariables ( Type ) shortcut to resolve class ignoring passible variables */
public static Class < ? > resolveClass ( final Type type , final Map < String , Type > generics ) { } } | final Class < ? > res ; if ( type instanceof Class ) { res = ( Class ) type ; } else if ( type instanceof ExplicitTypeVariable ) { res = resolveClass ( ( ( ExplicitTypeVariable ) type ) . getBounds ( ) [ 0 ] , generics ) ; } else if ( type instanceof ParameterizedType ) { res = resolveClass ( ( ( ParameterizedType ) type ) . getRawType ( ) , generics ) ; } else if ( type instanceof TypeVariable ) { res = resolveClass ( declaredGeneric ( ( TypeVariable ) type , generics ) , generics ) ; } else if ( type instanceof WildcardType ) { final Type [ ] upperBounds = ( ( WildcardType ) type ) . getUpperBounds ( ) ; res = resolveClass ( upperBounds [ 0 ] , generics ) ; } else { res = ArrayTypeUtils . toArrayClass ( resolveClass ( ( ( GenericArrayType ) type ) . getGenericComponentType ( ) , generics ) ) ; } return res ; |
public class ExceptionSoftener { /** * Soften a CheckedIntSupplier to an IntSupplier that doesn ' t need to declare any checked exceptions thrown
* e . g .
* < pre >
* { @ code
* IntSupplier supplier = ExceptionSoftener . softenIntSupplier ( ( ) - > { throw new IOException ( ) ; } )
* supplier . getAsInt ( ) ; / / throws IOException but doesn ' t need to declare it
* / / as a method reference
* ExceptionSoftener . softenIntSupplier ( this : : getInt ) ;
* < / pre >
* @ param s CheckedIntSupplier to soften
* @ return IntSupplier that can throw checked exceptions */
public static IntSupplier softenIntSupplier ( final CheckedIntSupplier s ) { } } | return ( ) -> { try { return s . getAsInt ( ) ; } catch ( final Throwable e ) { throw throwSoftenedException ( e ) ; } } ; |
public class RecordCacheHandler { /** * Cache the target field using the current record . */
public void putCacheField ( ) { } } | try { Record record = this . getOwner ( ) ; Object objKey = record . getHandle ( DBConstants . OBJECT_ID_HANDLE ) ; m_hsCache . add ( objKey ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } |
public class ImageInfo { /** * CHECKSTYLE IGNORE LINE : ReturnCount */
public static boolean isImage ( Media media ) { } } | try { for ( final ImageHeaderReader reader : FORMATS ) { if ( reader . is ( media ) ) { return true ; } } return false ; } catch ( @ SuppressWarnings ( "unused" ) final LionEngineException exception ) { return false ; } |
public class PropertyLookup { /** * Reads propFile and then checks if specified key exists .
* @ param propKey property name
* @ param propFile property file
* @ return file if a property with that name exists . If an exception occurs while reading
* the file , false is returned . */
public static boolean hasProperty ( final String propKey , final File propFile ) { } } | if ( null == propKey ) throw new IllegalArgumentException ( "propKey param was null" ) ; if ( null == propFile ) throw new IllegalArgumentException ( "propFile param was null" ) ; if ( propFile . exists ( ) ) { final Properties p = new Properties ( ) ; try { FileInputStream fis = new FileInputStream ( propFile ) ; try { p . load ( fis ) ; } finally { if ( null != fis ) { fis . close ( ) ; } } return p . containsKey ( propKey ) ; } catch ( IOException e ) { return false ; } } else { return false ; } |
public class DefaultManagementContext { /** * Create a ServiceManagementBean for a resource address on the local Gateway instance .
* XXX We need to do something more if we ' re going to support some idea of storing services from another Gateway instance in
* the same repository , as we won ' t generally have ServiceContext to work with ( since the other instance will be in a
* different process , perhaps on a different machine . ) */
@ Override public void addServiceManagementBean ( ServiceContext serviceContext ) { } } | GatewayManagementBean gatewayManagementBean = getLocalGatewayManagementBean ( ) ; ServiceManagementBean serviceManagementBean = serviceManagmentBeanFactory . newServiceManagementBean ( serviceContext . getServiceType ( ) , gatewayManagementBean , serviceContext ) ; ServiceManagementBean tempBean = serviceManagementBeans . putIfAbsent ( serviceContext , serviceManagementBean ) ; if ( tempBean == null ) { // A bean was not already created for this service
for ( ManagementServiceHandler handler : managementServiceHandlers ) { handler . addServiceManagementBean ( serviceManagementBean ) ; } } |
public class CommonOps_DDRM { /** * < p > Performs the following operation : < br >
* < br >
* a = a + & beta ; * b < br >
* a < sub > ij < / sub > = a < sub > ij < / sub > + & beta ; * b < sub > ij < / sub >
* @ param beta The number that matrix ' b ' is multiplied by .
* @ param a A Matrix . Modified .
* @ param b A Matrix . Not modified . */
public static void addEquals ( DMatrixD1 a , double beta , DMatrixD1 b ) { } } | if ( a . numCols != b . numCols || a . numRows != b . numRows ) { throw new MatrixDimensionException ( "The 'a' and 'b' matrices do not have compatible dimensions" ) ; } final int length = a . getNumElements ( ) ; for ( int i = 0 ; i < length ; i ++ ) { a . plus ( i , beta * b . get ( i ) ) ; } |
public class MarginLayoutHelper { /** * set paddings for this layoutHelper
* @ param leftPadding left padding
* @ param topPadding top padding
* @ param rightPadding right padding
* @ param bottomPadding bottom padding */
public void setPadding ( int leftPadding , int topPadding , int rightPadding , int bottomPadding ) { } } | mPaddingLeft = leftPadding ; mPaddingRight = rightPadding ; mPaddingTop = topPadding ; mPaddingBottom = bottomPadding ; |
public class FactoryFiducialCalibration { /** * Detector for chessboard targets . Squares can be partially outside , but inside corners must be entirely
* inside the image .
* @ see DetectChessboardFiducial
* @ param configDet Configuration for chessboard detector
* @ return Square grid target detector . */
@ Deprecated public static CalibrationDetectorChessboard chessboard ( @ Nullable ConfigChessboard configDet , ConfigGridDimen configGrid ) { } } | if ( configDet == null ) configDet = new ConfigChessboard ( ) ; configDet . checkValidity ( ) ; return new CalibrationDetectorChessboard ( configDet , configGrid ) ; |
public class LocalizationPopulator { /** * Adds all values in all namespaces from those specified in the property files for that
* namespace .
* < p > Already existing values are left exactly how they are . All new values are written to the
* repository .
* < p > If no { @ link L10nString } exists yet for a certain messageID , a new one will be added . */
@ Transactional public void populateLocalizationStrings ( AllPropertiesMessageSource source ) { } } | source . getAllMessageIds ( ) . asMap ( ) . forEach ( ( namespace , messageIds ) -> updateNamespace ( source , namespace , ImmutableSet . copyOf ( messageIds ) ) ) ; |
public class DoubleMatrix { /** * Scalar multiplies each item with c
* @ param c */
public void scalarMultiply ( double c ) { } } | int m = rows ; int n = cols ; for ( int i = 0 ; i < m ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { consumer . set ( i , j , c * supplier . get ( i , j ) ) ; } } |
public class FileAwareInputStreamDataWriter { /** * Sets the { @ link FsPermission } , owner , group for the path passed . It will not throw exceptions , if operations
* cannot be executed , will warn and continue . */
private void safeSetPathPermission ( Path path , OwnerAndPermission ownerAndPermission ) { } } | try { if ( ownerAndPermission . getFsPermission ( ) != null ) { this . fs . setPermission ( path , ownerAndPermission . getFsPermission ( ) ) ; } } catch ( IOException ioe ) { log . warn ( "Failed to set permission for directory " + path , ioe ) ; } String owner = Strings . isNullOrEmpty ( ownerAndPermission . getOwner ( ) ) ? null : ownerAndPermission . getOwner ( ) ; String group = Strings . isNullOrEmpty ( ownerAndPermission . getGroup ( ) ) ? null : ownerAndPermission . getGroup ( ) ; try { if ( owner != null || group != null ) { this . fs . setOwner ( path , owner , group ) ; } } catch ( IOException ioe ) { log . warn ( "Failed to set owner and/or group for path " + path , ioe ) ; } |
public class LabelOperationMetadata { /** * < code >
* . google . cloud . datalabeling . v1beta1 . LabelImageBoundingPolyOperationMetadata image _ bounding _ poly _ details = 11;
* < / code > */
public com . google . cloud . datalabeling . v1beta1 . LabelImageBoundingPolyOperationMetadata getImageBoundingPolyDetails ( ) { } } | if ( detailsCase_ == 11 ) { return ( com . google . cloud . datalabeling . v1beta1 . LabelImageBoundingPolyOperationMetadata ) details_ ; } return com . google . cloud . datalabeling . v1beta1 . LabelImageBoundingPolyOperationMetadata . getDefaultInstance ( ) ; |
public class ParseContext { /** * Skips all whitespaces until a non whitespace character is occurring . If
* the next character is not whitespace this method does nothing .
* @ return the new parse index after skipping any whitespaces .
* @ see Character # isWhitespace ( char ) */
public int skipWhitespace ( ) { } } | for ( int i = index ; i < originalInput . length ( ) ; i ++ ) { char ch = originalInput . charAt ( i ) ; if ( Character . isSpaceChar ( ch ) ) { index ++ ; } else { break ; } } return index ; |
public class NeuralNetworkParser { /** * 初始化特征空间的长度等信息 */
void build_feature_space ( ) { } } | kFormInFeaturespace = 0 ; kNilForm = forms_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd = forms_alphabet . size ( ) ; kPostagInFeaturespace = kFeatureSpaceEnd ; kNilPostag = kFeatureSpaceEnd + postags_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd += postags_alphabet . size ( ) ; kDeprelInFeaturespace = kFeatureSpaceEnd ; kNilDeprel = kFeatureSpaceEnd + deprels_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd += deprels_alphabet . size ( ) ; kDistanceInFeaturespace = kFeatureSpaceEnd ; kNilDistance = kFeatureSpaceEnd + ( use_distance ? 8 : 0 ) ; kFeatureSpaceEnd += ( use_distance ? 9 : 0 ) ; kValencyInFeaturespace = kFeatureSpaceEnd ; kNilValency = kFeatureSpaceEnd + ( use_valency ? 8 : 0 ) ; kFeatureSpaceEnd += ( use_valency ? 9 : 0 ) ; kCluster4InFeaturespace = kFeatureSpaceEnd ; if ( use_cluster ) { kNilCluster4 = kFeatureSpaceEnd + cluster4_types_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd += cluster4_types_alphabet . size ( ) ; } else { kNilCluster4 = kFeatureSpaceEnd ; } kCluster6InFeaturespace = kFeatureSpaceEnd ; if ( use_cluster ) { kNilCluster6 = kFeatureSpaceEnd + cluster6_types_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd += cluster6_types_alphabet . size ( ) ; } else { kNilCluster6 = kFeatureSpaceEnd ; } kClusterInFeaturespace = kFeatureSpaceEnd ; if ( use_cluster ) { kNilCluster = kFeatureSpaceEnd + cluster_types_alphabet . idOf ( SpecialOption . NIL ) ; kFeatureSpaceEnd += cluster_types_alphabet . size ( ) ; } else { kNilCluster = kFeatureSpaceEnd ; } |
public class Aromaticity { /** * Obtain a subset of the vertices which can contribute { @ code electrons }
* and are allowed to be involved in an aromatic system .
* @ param electrons electron contribution
* @ return vertices which can be involved in an aromatic system */
private static int [ ] subset ( final int [ ] electrons ) { } } | int [ ] vs = new int [ electrons . length ] ; int n = 0 ; for ( int i = 0 ; i < electrons . length ; i ++ ) if ( electrons [ i ] >= 0 ) vs [ n ++ ] = i ; return Arrays . copyOf ( vs , n ) ; |
public class PanelRenderer { /** * This methods generates the HTML code of the current b : panel .
* < code > encodeBegin < / code > generates the start of the component . After the ,
* the JSF framework calls < code > encodeChildren ( ) < / code > to generate the
* HTML code between the beginning and the end of the component . For
* instance , in the case of a panel component the content of the panel is
* generated by < code > encodeChildren ( ) < / code > . After that ,
* < code > encodeEnd ( ) < / code > is called to generate the rest of the HTML code .
* @ param context
* the FacesContext .
* @ param component
* the current b : panel .
* @ throws IOException
* thrown if something goes wrong when writing the HTML code . */
@ Override public void encodeBegin ( FacesContext context , UIComponent component ) throws IOException { } } | if ( ! component . isRendered ( ) ) { return ; } Panel panel = ( Panel ) component ; ResponseWriter rw = context . getResponseWriter ( ) ; String clientId = panel . getClientId ( ) ; String jQueryClientID = clientId . replace ( ":" , "_" ) ; boolean isCollapsible = panel . isCollapsible ( ) ; String accordionParent = panel . getAccordionParent ( ) ; String responsiveCSS = Responsive . getResponsiveStyleClass ( panel , false ) . trim ( ) ; boolean isResponsive = responsiveCSS . length ( ) > 0 ; if ( null == accordionParent && ( isCollapsible || isResponsive ) ) { rw . startElement ( "div" , panel ) ; rw . writeAttribute ( "class" , "panel-group " + responsiveCSS , null ) ; rw . writeAttribute ( "id" , clientId , "id" ) ; } String _look = panel . getLook ( ) ; String _title = panel . getTitle ( ) ; String _titleClass = panel . getTitleClass ( ) ; String _styleClass = panel . getStyleClass ( ) ; if ( null == _styleClass ) { _styleClass = "" ; } else { _styleClass += " " ; } String icon = panel . getIcon ( ) ; String faicon = panel . getIconAwesome ( ) ; boolean fa = false ; // flag to indicate whether the selected icon set is
// Font Awesome or not .
if ( faicon != null ) { icon = faicon ; fa = true ; } rw . startElement ( "div" , panel ) ; if ( ! ( ( isCollapsible || isResponsive ) && null == accordionParent ) ) { rw . writeAttribute ( "id" , clientId , "id" ) ; } writeAttribute ( rw , "dir" , panel . getDir ( ) , "dir" ) ; // render all data - * attributes
renderPassThruAttributes ( context , component , null , true ) ; AJAXRenderer . generateBootsFacesAJAXAndJavaScript ( context , panel , rw , false ) ; Tooltip . generateTooltip ( context , panel , rw ) ; String _style = panel . getStyle ( ) ; if ( null != _style && _style . length ( ) > 0 ) { rw . writeAttribute ( "style" , _style , "style" ) ; } if ( _look != null ) { rw . writeAttribute ( "class" , _styleClass + "panel panel-" + _look , "class" ) ; } else { rw . writeAttribute ( "class" , _styleClass + "panel panel-default" , "class" ) ; } UIComponent head = panel . getFacet ( "heading" ) ; if ( head != null || _title != null ) { rw . startElement ( "div" , panel ) ; rw . writeAttribute ( "class" , "panel-heading" , "class" ) ; String _titleStyle = panel . getTitleStyle ( ) ; if ( null != _titleStyle ) { rw . writeAttribute ( "style" , _titleStyle , "style" ) ; } if ( _title != null ) { rw . startElement ( "h4" , panel ) ; if ( _titleClass != null ) { rw . writeAttribute ( "class" , _titleClass , "class" ) ; } else { rw . writeAttribute ( "class" , "panel-title" , "class" ) ; } if ( isCollapsible ) { writeTitleLink ( panel , rw , jQueryClientID , accordionParent ) ; } if ( icon != null ) { Object ialign = panel . getIconAlign ( ) ; // Default Left
if ( ialign != null && ialign . equals ( "right" ) ) { _title = _title != null ? _title + " " : null ; writeText ( rw , _title , null ) ; IconRenderer . encodeIcon ( rw , component , icon , fa , panel . getIconSize ( ) , panel . getIconRotate ( ) , panel . getIconFlip ( ) , panel . isIconSpin ( ) , null , null , false , false , false , false , panel . isIconBrand ( ) , panel . isIconInverse ( ) , panel . isIconLight ( ) , panel . isIconPulse ( ) , panel . isIconRegular ( ) , panel . isIconRegular ( ) ) ; } else { IconRenderer . encodeIcon ( rw , component , icon , fa , panel . getIconSize ( ) , panel . getIconRotate ( ) , panel . getIconFlip ( ) , panel . isIconSpin ( ) , null , null , false , false , false , false , panel . isIconBrand ( ) , panel . isIconInverse ( ) , panel . isIconLight ( ) , panel . isIconPulse ( ) , panel . isIconRegular ( ) , panel . isIconRegular ( ) ) ; _title = _title != null ? " " + _title : null ; writeText ( rw , _title , null ) ; } } else { if ( component . getChildCount ( ) > 0 ) { _title = _title != null ? " " + _title : null ; writeText ( rw , _title , null ) ; } else { writeText ( rw , _title , null ) ; } } // rw . writeText ( _ title , null ) ;
if ( isCollapsible ) { rw . endElement ( "a" ) ; } rw . endElement ( "h4" ) ; } else { if ( isCollapsible ) { writeTitleLink ( panel , rw , jQueryClientID , accordionParent ) ; } head . encodeAll ( context ) ; if ( isCollapsible ) { rw . endElement ( "a" ) ; } } rw . endElement ( "div" ) ; } rw . startElement ( "div" , panel ) ; rw . writeAttribute ( "id" , jQueryClientID + "content" , null ) ; writeAttribute ( rw , "dir" , panel . getDir ( ) , "dir" ) ; String _contentClass = panel . getContentClass ( ) ; if ( null == _contentClass ) _contentClass = "" ; if ( isCollapsible || isResponsive ) { _contentClass += " panel-collapse collapse" ; // in
if ( ! panel . isCollapsed ( ) ) _contentClass += " in" ; } _contentClass = _contentClass . trim ( ) ; if ( _contentClass . length ( ) > 0 ) rw . writeAttribute ( "class" , _contentClass , "class" ) ; String _contentStyle = panel . getContentStyle ( ) ; if ( null != _contentStyle && _contentStyle . length ( ) > 0 ) { rw . writeAttribute ( "style" , _contentStyle , "style" ) ; } // create the body
rw . startElement ( "div" , panel ) ; rw . writeAttribute ( "class" , "panel-body" , "class" ) ; if ( panel . isContentDisabled ( ) ) { rw . startElement ( "fieldset" , panel ) ; rw . writeAttribute ( "disabled" , "disabled" , "null" ) ; } |
public class RequestedAttributeTemplates { /** * Creates a { @ code RequestedAttribute } object for the DateOfBirth attribute .
* @ param isRequired
* flag to tell whether the attribute is required
* @ param includeFriendlyName
* flag that tells whether the friendly name should be included
* @ return a { @ code RequestedAttribute } object representing the DateOfBirth attribute */
public static RequestedAttribute DATE_OF_BIRTH ( Boolean isRequired , boolean includeFriendlyName ) { } } | return create ( AttributeConstants . EIDAS_DATE_OF_BIRTH_ATTRIBUTE_NAME , includeFriendlyName ? AttributeConstants . EIDAS_DATE_OF_BIRTH_ATTRIBUTE_FRIENDLY_NAME : null , Attribute . URI_REFERENCE , isRequired ) ; |
public class TaskTable { /** * 获得指定位置的 { @ link CronPattern }
* @ param index 位置
* @ return { @ link CronPattern }
* @ since 3.1.1 */
public CronPattern getPattern ( int index ) { } } | final Lock readLock = lock . readLock ( ) ; try { readLock . lock ( ) ; return patterns . get ( index ) ; } finally { readLock . unlock ( ) ; } |
public class StageDeclaration { /** * The actions included in a stage .
* @ param actions
* The actions included in a stage . */
public void setActions ( java . util . Collection < ActionDeclaration > actions ) { } } | if ( actions == null ) { this . actions = null ; return ; } this . actions = new java . util . ArrayList < ActionDeclaration > ( actions ) ; |
public class S { /** * Format a date with engine ' s default format corresponding
* to the engine ' s locale configured
* @ param date
* @ return the formatted String */
@ Transformer ( requireTemplate = true ) public static String format ( Date date ) { } } | return format ( date , null , null , null ) ; |
public class JobCommand { /** * < code > optional . alluxio . grpc . job . RegisterCommand registerCommand = 3 ; < / code > */
public alluxio . grpc . RegisterCommand getRegisterCommand ( ) { } } | return registerCommand_ == null ? alluxio . grpc . RegisterCommand . getDefaultInstance ( ) : registerCommand_ ; |
public class OmemoStore { /** * Store a whole bunch of preKeys .
* @ param userDevice our OmemoDevice .
* @ param preKeyHashMap HashMap of preKeys */
public void storeOmemoPreKeys ( OmemoDevice userDevice , TreeMap < Integer , T_PreKey > preKeyHashMap ) { } } | for ( Map . Entry < Integer , T_PreKey > entry : preKeyHashMap . entrySet ( ) ) { storeOmemoPreKey ( userDevice , entry . getKey ( ) , entry . getValue ( ) ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.