signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TrainableBundle { /** * { @ inheritDoc } */
@ Override public void close ( ) { } } | for ( Trainable t : bundle . values ( ) ) { if ( t != null ) { try { t . close ( ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } } } bundle . clear ( ) ; |
public class DatabaseUtil { /** * Group by columns must be removed if the columns from select sql are deleted */
public static void removeGroupColumns ( List < String > selectColumns , List < String > groups ) { } } | List < String > removedGroups = new ArrayList < String > ( ) ; for ( String group : groups ) { if ( ! containsColumnByAlias ( selectColumns , group ) ) { removedGroups . add ( group ) ; } } groups . removeAll ( removedGroups ) ; |
public class AbstractHystrixStreamController { /** * Maintain an open connection with the client . On initial connection send latest data of each requested event type and subsequently send all changes for each requested event type .
* @ return JAX - RS Response - Serialization will be handled by { @ link HystrixStreamingOutputProvider } */
protected Response handleRequest ( ) { } } | ResponseBuilder builder = null ; /* ensure we aren ' t allowing more connections than we want */
int numberConnections = getCurrentConnections ( ) . get ( ) ; int maxNumberConnectionsAllowed = getMaxNumberConcurrentConnectionsAllowed ( ) ; // may change at runtime , so look this up for each request
if ( numberConnections >= maxNumberConnectionsAllowed ) { builder = Response . status ( Status . SERVICE_UNAVAILABLE ) . entity ( "MaxConcurrentConnections reached: " + maxNumberConnectionsAllowed ) ; } else { /* initialize response */
builder = Response . status ( Status . OK ) ; builder . header ( HttpHeaders . CONTENT_TYPE , "text/event-stream;charset=UTF-8" ) ; builder . header ( HttpHeaders . CACHE_CONTROL , "no-cache, no-store, max-age=0, must-revalidate" ) ; builder . header ( "Pragma" , "no-cache" ) ; getCurrentConnections ( ) . incrementAndGet ( ) ; builder . entity ( new HystrixStream ( sampleStream , pausePollerThreadDelayInMs , getCurrentConnections ( ) ) ) ; } return builder . build ( ) ; |
public class Timex3Interval { /** * getter for beginTimex - gets
* @ generated
* @ return value of the feature */
public String getBeginTimex ( ) { } } | if ( Timex3Interval_Type . featOkTst && ( ( Timex3Interval_Type ) jcasType ) . casFeat_beginTimex == null ) jcasType . jcas . throwFeatMissing ( "beginTimex" , "de.unihd.dbs.uima.types.heideltime.Timex3Interval" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( Timex3Interval_Type ) jcasType ) . casFeatCode_beginTimex ) ; |
public class AbstractRedisStorage { /** * Resume ( un - pause ) the < code > { @ link org . quartz . Job } < / code > with the given key .
* @ param jobKey the key of the job to be resumed
* @ param jedis a thread - safe Redis connection */
public void resumeJob ( JobKey jobKey , T jedis ) throws JobPersistenceException { } } | for ( OperableTrigger trigger : getTriggersForJob ( jobKey , jedis ) ) { resumeTrigger ( trigger . getKey ( ) , jedis ) ; } |
public class AtomDataWriter { /** * Marshall an enum value .
* @ param value The value to marshall . Can be { @ code null } .
* @ param enumType The OData enum type . */
private void marshallEnum ( Object value , EnumType enumType ) throws XMLStreamException { } } | LOG . trace ( "Enum value: {} of type: {}" , value , enumType ) ; xmlWriter . writeCharacters ( value . toString ( ) ) ; |
public class JMCollections { /** * Build new list list .
* @ param < T > the type parameter
* @ param < R > the type parameter
* @ param collection the collection
* @ param transformFunction the transform function
* @ return the list */
public static < T , R > List < R > buildNewList ( Collection < T > collection , Function < T , R > transformFunction ) { } } | return collection . stream ( ) . map ( transformFunction ) . collect ( Collectors . toList ( ) ) ; |
public class AWSWAFRegionalClient { /** * Returns an array of < a > WebACLSummary < / a > objects in the response .
* @ param listWebACLsRequest
* @ return Result of the ListWebACLs operation returned by the service .
* @ throws WAFInternalErrorException
* The operation failed because of a system problem , even though the request was valid . Retry your request .
* @ throws WAFInvalidAccountException
* The operation failed because you tried to create , update , or delete an object by using an invalid account
* identifier .
* @ sample AWSWAFRegional . ListWebACLs
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / waf - regional - 2016-11-28 / ListWebACLs " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListWebACLsResult listWebACLs ( ListWebACLsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListWebACLs ( request ) ; |
public class FieldAccessor { /** * { @ link XlsArrayColumns } フィールド用の位置情報を設定します 。
* < p > 位置情報を保持するフィールドがない場合は 、 処理はスキップされます 。 < / p >
* @ param targetObj フィールドが定義されているクラスのインスタンス
* @ param position 位置情報
* @ param index インデックスのキー 。 0以上を指定します 。
* @ throws IllegalArgumentException { @ literal targetObj = = null or position = = null }
* @ throws IllegalArgumentException { @ literal index < 0} */
public void setArrayPosition ( final Object targetObj , final CellPosition position , final int index ) { } } | ArgUtils . notNull ( targetObj , "targetObj" ) ; ArgUtils . notNull ( position , "position" ) ; ArgUtils . notMin ( index , 0 , "index" ) ; arrayPositionSetter . ifPresent ( setter -> setter . set ( targetObj , position , index ) ) ; |
public class CommonOps_DDF2 { /** * Changes the sign of every element in the vector . < br >
* < br >
* a < sub > i < / sub > = - a < sub > i < / sub >
* @ param a A vector . Modified . */
public static void changeSign ( DMatrix2 a ) { } } | a . a1 = - a . a1 ; a . a2 = - a . a2 ; |
public class DynamoDB { /** * Creates the specified table in DynamoDB . */
public Table createTable ( String tableName , List < KeySchemaElement > keySchema , List < AttributeDefinition > attributeDefinitions , ProvisionedThroughput provisionedThroughput ) { } } | return createTable ( new CreateTableRequest ( ) . withTableName ( tableName ) . withKeySchema ( keySchema ) . withAttributeDefinitions ( attributeDefinitions ) . withProvisionedThroughput ( provisionedThroughput ) ) ; |
public class DifferentialFunction { /** * The left argument for this function
* @ return */
public SDVariable larg ( ) { } } | val args = args ( ) ; if ( args == null || args . length == 0 ) throw new ND4JIllegalStateException ( "No arguments found." ) ; return args ( ) [ 0 ] ; |
public class NIOFileUtil { /** * Convert the given path string to a { @ link Path } object .
* @ param path the path to convert
* @ return a { @ link Path } object */
public static Path asPath ( String path ) { } } | URI uri = URI . create ( path ) ; return uri . getScheme ( ) == null ? Paths . get ( path ) : asPath ( uri ) ; |
public class MapDictionary { /** * @ see java . util . Map # putAll ( java . util . Map ) */
@ Override public void putAll ( Map < ? extends K , ? extends V > map ) { } } | if ( isReadyOnly ( ) ) throw new UnsupportedOperationException ( "Can't add properties to read-only dictionary" ) ; this . localMap . putAll ( map ) ; |
public class ServiceFactory { /** * get a upper case key press handler .
* @ return UpperAsciiKeyPressHandler */
public static final PhoneNumberServiceAsync getPhoneNumberService ( ) { } } | // NOPMD it ' s thread save !
if ( ServiceFactory . phoneNumberService == null ) { synchronized ( PhoneNumberServiceAsync . class ) { if ( ServiceFactory . phoneNumberService == null ) { ServiceFactory . phoneNumberService = GWT . create ( PhoneNumberServiceAsync . class ) ; } } final PathDefinitionInterface pathDefinition = GWT . create ( PathDefinitionInterface . class ) ; ( ( RestServiceProxy ) ServiceFactory . phoneNumberService ) . setResource ( new Resource ( pathDefinition . getRestBasePath ( ) + "/" + PhoneNumber . ROOT ) ) ; } return ServiceFactory . phoneNumberService ; |
public class RythmEngine { /** * Render template by string typed inline template content and an array of
* template args . The render result is returned as a String
* < p > See { @ link # getTemplate ( java . io . File , Object . . . ) } for note on
* render args < / p >
* @ param template the inline template content
* @ param args the render args array
* @ return render result */
public String renderStr ( String key , String template , Object ... args ) { } } | return renderString ( key , template , args ) ; |
public class SameDiffGraphVertex { /** * Package private */
static INDArray createMask ( DataType dataType , long [ ] shape ) { } } | switch ( shape . length ) { case 2 : // FF - Type input
return Nd4j . ones ( dataType , shape [ 0 ] , 1 ) ; case 3 : // RNN - Type input
return Nd4j . ones ( dataType , shape [ 0 ] , shape [ 2 ] ) ; case 4 : // CNN input
return Nd4j . ones ( dataType , shape [ 0 ] , 1 , 1 , 1 ) ; default : Preconditions . throwEx ( "Can not create all-ones-mask for given input shape %s." , Arrays . toString ( shape ) ) ; return null ; } |
public class LoadBalancersInner { /** * Updates a load balancer tags .
* @ param resourceGroupName The name of the resource group .
* @ param loadBalancerName The name of the load balancer .
* @ param tags Resource tags .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the LoadBalancerInner object if successful . */
public LoadBalancerInner updateTags ( String resourceGroupName , String loadBalancerName , Map < String , String > tags ) { } } | return updateTagsWithServiceResponseAsync ( resourceGroupName , loadBalancerName , tags ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class PerspectiveOps { /** * Convenient function for converting from distorted image pixel coordinate to undistorted normalized
* image coordinates . If speed is a concern then { @ link PinholePtoN _ F32 } should be used instead .
* NOTE : norm and pixel can be the same instance .
* @ param param Intrinsic camera parameters
* @ param pixel Pixel coordinate
* @ param norm Optional storage for output . If null a new instance will be declared .
* @ return normalized image coordinate */
public static Point2D_F32 convertPixelToNorm ( CameraModel param , Point2D_F32 pixel , Point2D_F32 norm ) { } } | return ImplPerspectiveOps_F32 . convertPixelToNorm ( param , pixel , norm ) ; |
public class JSMessageData { /** * setting up the sharing state . */
void lazyCopy ( JSMessageData original ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "lazyCopy" , new Object [ ] { original } ) ; synchronized ( getMessageLockArtefact ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JmfTr . debug ( this , tc , "lazyCopy locked dest " , new Object [ ] { getMessageLockArtefact ( ) } ) ; // This is the only occasion that we reach out and lock another message instance
// but we work on the assumption that lazy copying is happening as part of
// instantiation of a new message instance , so the potential for deadlocking
// is nil , since no other thread knows about this instance and can not therefore
// be concurrently supplying ' this ' as the ' original ' argument in another thread .
synchronized ( original . getMessageLockArtefact ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JmfTr . debug ( this , tc , "lazyCopy locked source " , new Object [ ] { original . getMessageLockArtefact ( ) } ) ; // Copy common fields
indirect = original . indirect ; // If the message is assembled ( i . e . we have a contents buffer ) we share both the
// buffer and the cache between original and copy .
// If the message is unassembled ( no contents buffer ) we just share the cache .
// In either case if a change later needs to be made to the shared portion we
// will need to copy it before changing , so the shared flags is set to
// indicate sharing exists between unrelated parts .
if ( original . contents == null ) { contents = null ; original . sharedCache = true ; sharedCache = true ; cache = original . cache ; } else { original . sharedContents = true ; sharedContents = true ; contents = original . contents ; original . sharedCache = true ; sharedCache = true ; cache = original . cache ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JmfTr . debug ( this , tc , "lazyCopy unlocking source " , new Object [ ] { original . getMessageLockArtefact ( ) } ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) JmfTr . debug ( this , tc , "lazyCopy unlocking dest " , new Object [ ] { getMessageLockArtefact ( ) } ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "lazyCopy" ) ; |
public class JdbcQueueFactory { /** * { @ inheritDoc }
* @ throws Exception */
@ Override protected void initQueue ( T queue , QueueSpec spec ) throws Exception { } } | queue . setJdbcHelper ( defaultJdbcHelper ) . setDataSource ( defaultDataSource ) ; queue . setEphemeralDisabled ( getDefaultEphemeralDisabled ( ) ) . setEphemeralMaxSize ( getDefaultEphemeralMaxSize ( ) ) ; Boolean ephemeralDisabled = spec . getField ( QueueSpec . FIELD_EPHEMERAL_DISABLED , Boolean . class ) ; if ( ephemeralDisabled != null ) { queue . setEphemeralDisabled ( ephemeralDisabled . booleanValue ( ) ) ; } Integer maxEphemeralSize = spec . getField ( QueueSpec . FIELD_EPHEMERAL_MAX_SIZE , Integer . class ) ; if ( maxEphemeralSize != null ) { queue . setEphemeralMaxSize ( maxEphemeralSize . intValue ( ) ) ; } queue . setTableName ( defaultTableName ) . setTableNameEphemeral ( defaultTableNameEphemeral ) . setMaxRetries ( defaultMaxRetries ) . setTransactionIsolationLevel ( defaultTransactionIsolationLevel ) ; String tableName = spec . getField ( SPEC_FIELD_TABLE_NAME ) ; if ( ! StringUtils . isBlank ( tableName ) ) { queue . setTableName ( tableName ) ; } String tableNameEphemeral = spec . getField ( SPEC_FIELD_TABLE_NAME_EPHEMERAL ) ; if ( ! StringUtils . isBlank ( tableNameEphemeral ) ) { queue . setTableNameEphemeral ( tableNameEphemeral ) ; } Integer maxRetries = spec . getField ( SPEC_FIELD_MAX_RETRIES , Integer . class ) ; if ( maxRetries != null ) { queue . setMaxRetries ( maxRetries . intValue ( ) ) ; } Integer txIsolationLevel = spec . getField ( SPEC_FIELD_TRANSACTION_ISOLATION_LEVEL , Integer . class ) ; if ( txIsolationLevel != null ) { queue . setTransactionIsolationLevel ( txIsolationLevel . intValue ( ) ) ; } super . initQueue ( queue , spec ) ; |
public class HomoglyphBuilder { /** * Consumes the supplied Reader and uses it to construct a populated Homoglyph
* object .
* @ param aReader
* a Reader object that provides access to homoglyph data ( see the
* bundled char _ codes . txt file for an example of the required format )
* @ return a Homoglyph object populated using the data returned by the Reader
* object
* @ throws IOException
* if the specified Reader cannot be read */
@ Nonnull public static Homoglyph build ( @ Nonnull @ WillClose final Reader aReader ) throws IOException { } } | ValueEnforcer . notNull ( aReader , "reader" ) ; try ( final NonBlockingBufferedReader aBR = new NonBlockingBufferedReader ( aReader ) ) { final ICommonsList < IntSet > aList = new CommonsArrayList < > ( ) ; String sLine ; while ( ( sLine = aBR . readLine ( ) ) != null ) { sLine = sLine . trim ( ) ; if ( sLine . startsWith ( "#" ) || sLine . length ( ) == 0 ) continue ; final IntSet aSet = new IntSet ( sLine . length ( ) / 3 ) ; for ( final String sCharCode : StringHelper . getExploded ( ',' , sLine ) ) { final int nVal = StringParser . parseInt ( sCharCode , 16 , - 1 ) ; if ( nVal >= 0 ) aSet . add ( nVal ) ; } aList . add ( aSet ) ; } return new Homoglyph ( aList ) ; } |
public class KeystoreConfigurationFactory { /** * Sets the keyring monitor registration .
* @ param keyringMonitorRegistration */
protected void setKeyringMonitorRegistration ( ServiceRegistration < KeyringMonitor > keyringMonitorRegistration ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "setKeyringMonitorRegistration" ) ; } this . keyringMonitorRegistration = keyringMonitorRegistration ; |
public class OpenApiReader { /** * Returns the { @ link OpenAPI } instance represented by this document . */
public OpenAPI read ( ) { } } | JsonNode json ; try { ObjectMapper mapper = isYaml ( ) ? ObjectMapperFactory . createYaml ( ) : ObjectMapperFactory . createJson ( ) ; json = mapper . readTree ( getReader ( ) ) ; } catch ( Exception e ) { throw new OpenApiReaderException ( getLocation ( ) , e ) ; } SwaggerParseResult parseResult = new OpenAPIDeserializer ( ) . deserialize ( json , Objects . toString ( getLocation ( ) , null ) ) ; Optional . of ( parseResult ) . map ( SwaggerParseResult :: getMessages ) . filter ( messages -> ! messages . isEmpty ( ) ) . map ( messages -> new OpenApiReaderException ( getLocation ( ) , messages ) ) . ifPresent ( failure -> { throw failure ; } ) ; return parseResult . getOpenAPI ( ) ; |
public class AwsSecurityFindingFilters { /** * The workflow state of a finding .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setWorkflowState ( java . util . Collection ) } or { @ link # withWorkflowState ( java . util . Collection ) } if you want
* to override the existing values .
* @ param workflowState
* The workflow state of a finding .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFindingFilters withWorkflowState ( StringFilter ... workflowState ) { } } | if ( this . workflowState == null ) { setWorkflowState ( new java . util . ArrayList < StringFilter > ( workflowState . length ) ) ; } for ( StringFilter ele : workflowState ) { this . workflowState . add ( ele ) ; } return this ; |
public class JAltGridScreen { /** * Setup a new panel . */
public void setupPanel ( ) { } } | this . setLayout ( new BoxLayout ( this , BoxLayout . Y_AXIS ) ) ; this . addGridHeading ( ) ; JPanel panel = new JPanel ( ) ; panel . setOpaque ( false ) ; panel . setMinimumSize ( new Dimension ( 20 , 20 ) ) ; panel . setLayout ( new BorderLayout ( ) ) ; JScrollPane scrollPane = new JScrollPane ( JScrollPane . VERTICAL_SCROLLBAR_AS_NEEDED , JScrollPane . HORIZONTAL_SCROLLBAR_AS_NEEDED ) ; panel . add ( scrollPane ) ; this . add ( panel ) ; m_panelGrid = new JPanel ( ) ; JPanel panelAligner = new JPanel ( ) ; // This panel keeps the panelgrid in the upper left hand of the scrollpane .
panelAligner . add ( m_panelGrid ) ; panelAligner . setLayout ( new FlowLayout ( FlowLayout . LEADING ) ) ; scrollPane . getViewport ( ) . add ( panelAligner ) ; panelAligner . setOpaque ( false ) ; m_panelGrid . setOpaque ( false ) ; scrollPane . getViewport ( ) . setOpaque ( false ) ; scrollPane . setOpaque ( false ) ; this . addGridDetail ( ) ; |
public class DACLAssertor { /** * Compares the object DACL located by the searchFilter against the specified { @ code AdRoleAssertion } , and
* determines whether
* that assertion ' s principal is granted all the rights which the assertion contains . < br >
* < br >
* When comparing ACEs of the DACL , only those of { @ code AceType . ACCESS _ ALLOWED _ ACE _ TYPE } or
* { @ code AceType . ACCESS _ ALLOWED _ OBJECT _ ACE _ TYPE } will be considered for satisfying an { @ code AceAssertion } of the
* roleAssertion . < br >
* < br >
* Once completed , any unsatisfied assertions can be obtained by calling { @ linkplain getUnsatisfiedAssertions } .
* @ param roleAssertion
* the AdRoleAssertion
* @ return true if the DACL fulfills the claims of the roleAssertion , false otherwise .
* @ throws CommunicationException
* if the context for searching the DACL is invalid or the domain cannot be reached
* @ throws NameNotFoundException
* if the DACL search fails
* @ throws NamingException
* if extracting the DACL fails or another JNDI issue occurs
* @ throws SizeLimitExceededException
* if more than one AD object found during DACL search */
public boolean doAssert ( AdRoleAssertion roleAssertion ) throws NamingException { } } | boolean result = false ; if ( roleAssertion . getPrincipal ( ) == null ) { LOG . warn ( "DACLAssertor.run, unable to run against a NULL principal specified in AdRoleAssertion" ) ; return result ; } if ( dacl == null ) { getDACL ( ) ; } this . unsatisfiedAssertions = findUnsatisfiedAssertions ( roleAssertion ) ; result = this . unsatisfiedAssertions . isEmpty ( ) ? true : false ; LOG . info ( "doAssert, result: {}" , result ) ; return result ; |
public class RepositoryServiceV1 { /** * DELETE / projects / { projectName } / repos / { repoName }
* < p > Removes a repository . */
@ Delete ( "/projects/{projectName}/repos/{repoName}" ) @ RequiresRole ( roles = ProjectRole . OWNER ) public CompletableFuture < Void > removeRepository ( ServiceRequestContext ctx , @ Param ( "repoName" ) String repoName , Repository repository , Author author ) { } } | if ( Project . isReservedRepoName ( repoName ) ) { return HttpApiUtil . throwResponse ( ctx , HttpStatus . FORBIDDEN , "A reserved repository cannot be removed." ) ; } return execute ( Command . removeRepository ( author , repository . parent ( ) . name ( ) , repository . name ( ) ) ) . thenCompose ( unused -> mds . removeRepo ( author , repository . parent ( ) . name ( ) , repository . name ( ) ) ) . handle ( HttpApiUtil :: throwUnsafelyIfNonNull ) ; |
public class Hash { /** * Provides a list of algorithms for the specified service ( which , for our purposes , is
* " MessageDigest " .
* < p > This method was only very slightly adapted ( to use a TreeSet ) from the Java Almanac at
* http : / / javaalmanac . com / egs / java . security / ListServices . html
* @ param serviceType The name of the service we ' re looking for . It ' s " MessageDigest " */
private static Set getCryptoImpls ( String serviceType ) { } } | Set result = new TreeSet ( ) ; // All all providers
Provider [ ] providers = Security . getProviders ( ) ; for ( int i = 0 ; i < providers . length ; i ++ ) { // Get services provided by each provider
Set keys = providers [ i ] . keySet ( ) ; for ( Object okey : providers [ i ] . keySet ( ) ) { String key = ( String ) okey ; key = key . split ( " " ) [ 0 ] ; if ( key . startsWith ( serviceType + "." ) ) { result . add ( key . substring ( serviceType . length ( ) + 1 ) ) ; } else if ( key . startsWith ( "Alg.Alias." + serviceType + "." ) ) { // This is an alias
result . add ( key . substring ( serviceType . length ( ) + 11 ) ) ; } } } return result ; |
public class CrunchDatasets { /** * Partitions { @ code collection } to be stored efficiently in { @ code View } .
* This restructures the parallel collection so that all of the entities that
* will be stored in a given partition will be evenly distributed across a specified
* { @ code numPartitionWriters } .
* If the dataset is not partitioned , then this will structure all of the
* entities to produce a number of files equal to { @ code numWriters } .
* @ param collection a collection of entities
* @ param view a { @ link View } of a dataset to partition the collection for
* @ param numWriters the number of writers that should be used
* @ param numPartitionWriters the number of writers data for a single partition will be distributed across
* @ param < E > the type of entities in the collection and underlying dataset
* @ return an equivalent collection of entities partitioned for the view
* @ see # partition ( PCollection , View )
* @ since 1.1.0 */
public static < E > PCollection < E > partition ( PCollection < E > collection , View < E > view , int numWriters , int numPartitionWriters ) { } } | // ensure the number of writers is honored whether it is per partition or total .
DatasetDescriptor descriptor = view . getDataset ( ) . getDescriptor ( ) ; if ( descriptor . isPartitioned ( ) ) { GetStorageKey < E > getKey = new GetStorageKey < E > ( view , numPartitionWriters ) ; PTable < Pair < GenericData . Record , Integer > , E > table = collection . by ( getKey , Avros . pairs ( Avros . generics ( getKey . schema ( ) ) , Avros . ints ( ) ) ) ; PGroupedTable < Pair < GenericData . Record , Integer > , E > grouped = numWriters > 0 ? table . groupByKey ( numWriters ) : table . groupByKey ( ) ; return grouped . ungroup ( ) . values ( ) ; } else { return partition ( collection , numWriters ) ; } |
public class Verify { /** * Verify that a map contains an entry for a given key .
* @ param map to be checked .
* @ param key to get by .
* @ param name of entry .
* @ throws NullPointerException if map or key is null
* @ throws IllegalStateException if the entry does not exist . */
public static void present ( final Map < ? , ? > map , final Object key , final String name ) { } } | if ( null == map . get ( key ) ) { throw new IllegalStateException ( name + " not found in map for key: " + key ) ; } |
public class BoundingBox { /** * Creates a BoundingBox that is a fixed meter amount larger on all sides ( but does not cross date line / poles ) .
* @ param meters extension ( must be > = 0)
* @ return an extended BoundingBox or this ( if meters = = 0) */
public BoundingBox extendMeters ( int meters ) { } } | if ( meters == 0 ) { return this ; } else if ( meters < 0 ) { throw new IllegalArgumentException ( "BoundingBox extend operation does not accept negative values" ) ; } double verticalExpansion = LatLongUtils . latitudeDistance ( meters ) ; double horizontalExpansion = LatLongUtils . longitudeDistance ( meters , Math . max ( Math . abs ( minLatitude ) , Math . abs ( maxLatitude ) ) ) ; double minLat = Math . max ( MercatorProjection . LATITUDE_MIN , this . minLatitude - verticalExpansion ) ; double minLon = Math . max ( - 180 , this . minLongitude - horizontalExpansion ) ; double maxLat = Math . min ( MercatorProjection . LATITUDE_MAX , this . maxLatitude + verticalExpansion ) ; double maxLon = Math . min ( 180 , this . maxLongitude + horizontalExpansion ) ; return new BoundingBox ( minLat , minLon , maxLat , maxLon ) ; |
public class RoadPath { /** * Replies if the road segment of < var > path < / var > ( the first or the last in this order )
* that could be connected to the first point of the current path .
* @ param path is the path from which a road segment should be read .
* @ return the connectable segment from the < var > path < / var > ; or < code > null < / code >
* if no connection is possible .
* @ since 4.0 */
@ Pure public RoadSegment getConnectableSegmentToFirstPoint ( RoadPath path ) { } } | assert path != null ; if ( path . isEmpty ( ) ) { return null ; } RoadConnection first1 = getFirstPoint ( ) ; RoadConnection first2 = path . getFirstPoint ( ) ; RoadConnection last2 = path . getLastPoint ( ) ; first1 = first1 . getWrappedRoadConnection ( ) ; first2 = first2 . getWrappedRoadConnection ( ) ; last2 = last2 . getWrappedRoadConnection ( ) ; if ( first1 . equals ( first2 ) ) { return path . getFirstSegment ( ) ; } if ( first1 . equals ( last2 ) ) { return path . getLastSegment ( ) ; } return null ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link DateAttributeType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link DateAttributeType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/generics/2.0" , name = "dateAttribute" , substitutionHeadNamespace = "http://www.opengis.net/citygml/generics/2.0" , substitutionHeadName = "_genericAttribute" ) public JAXBElement < DateAttributeType > createDateAttribute ( DateAttributeType value ) { } } | return new JAXBElement < DateAttributeType > ( _DateAttribute_QNAME , DateAttributeType . class , null , value ) ; |
public class Record { /** * Copy the data in this record to the thin version .
* @ param fieldList */
public final void copyAllFields ( Record record , FieldList fieldList ) { } } | for ( int i = 0 ; i < fieldList . getFieldCount ( ) ; i ++ ) { FieldInfo fieldInfo = fieldList . getField ( i ) ; BaseField field = record . getField ( i ) ; this . moveFieldToThin ( fieldInfo , field , record ) ; } |
public class XData { /** * loads a xdata file from from an inputstream using the given marshallers . For all classes other
* than these a special marshaller is required to map the class ' data to a data node
* deSerializedObject :
* < ul >
* < li > Boolean < / li >
* < li > Long < / li >
* < li > Integer < / li >
* < li > String < / li >
* < li > Float < / li >
* < li > Double < / li >
* < li > Byte < / li >
* < li > Short < / li >
* < li > Character < / li >
* < li > DataNode < / li >
* < li > List & lt ; ? & gt ; < / li >
* < / ul >
* Also take a look at { @ link com . moebiusgames . xdata . marshaller } . There are a bunch of
* standard marshallers that ARE INCLUDED by default . So you don ' t need to add them here
* to work .
* @ param in
* @ param progressListener
* @ param marshallers
* @ return
* @ throws IOException */
public static DataNode load ( InputStream in , ProgressListener progressListener , AbstractDataMarshaller < ? > ... marshallers ) throws IOException { } } | return load ( in , ChecksumValidation . VALIDATE_IF_AVAILABLE , progressListener , false , marshallers ) ; |
public class Reflection { /** * 具有一定兼容性的类型转换 / 切换
* 将一个未知类型转化为你想要的那个类型 , 它会宽容地做转换 , 比如map映射成bean , bean映射成map
* list的第一个元素映射成bean , list的第一个元素映射成map等等 。
* 如果入参是null , 那么返回结果也是null
* @ param nonCollectionType 目标类型 , 不允许是集合类型 */
@ SuppressWarnings ( "all" ) private static < T > T toNonCollectionType ( Object data , Class < T > nonCollectionType ) { } } | if ( Collection . class . isAssignableFrom ( nonCollectionType ) ) { throw new RuntimeException ( "API使用错误,本方法不支持将目标对象转为非集合类型" ) ; } if ( data == null ) { return Defaults . defaultValue ( nonCollectionType ) ; } if ( data . getClass ( ) . isArray ( ) ) { data = ArrayUtil . toList ( data ) ; } if ( data instanceof Collection ) { Collection collection = ( Collection ) data ; if ( nonCollectionType == String . class ) { return ( T ) JSON . toJSONString ( collection ) ; } if ( collection . isEmpty ( ) ) { return null ; } if ( collection . size ( ) >= 2 ) { LOG . warn ( new Throwable ( String . format ( "集合 【 %s 】 的元素个数不只一个,我们只取出第一个做转换" , data ) ) ) ; } for ( Object element : collection ) { LOG . debug ( "集合的第一个元素会被转为目标类型" ) ; return transferNonCollection ( element , nonCollectionType ) ; } } else { return transferNonCollection ( data , nonCollectionType ) ; } throw castFailedException ( data , nonCollectionType ) ; |
public class CoverageDataCore { /** * Get the pixel value of the coverage data value
* @ param griddedTile
* gridded tile
* @ param value
* coverage data value
* @ return pixel value */
public float getFloatPixelValue ( GriddedTile griddedTile , Double value ) { } } | double pixel = 0 ; if ( value == null ) { if ( griddedCoverage != null ) { pixel = griddedCoverage . getDataNull ( ) ; } } else { pixel = valueToPixelValue ( griddedTile , value ) ; } float pixelValue = ( float ) pixel ; return pixelValue ; |
public class Condition { /** * With POST body containing string */
public static Condition withPostBodyContaining ( final String str ) { } } | return new Condition ( input -> input . getPostBody ( ) != null && input . getPostBody ( ) . contains ( str ) ) ; |
public class AWSAmplifyClient { /** * Starts a new job for a branch , part of an Amplify App .
* @ param startJobRequest
* Request structure for Start job request .
* @ return Result of the StartJob operation returned by the service .
* @ throws BadRequestException
* Exception thrown when a request contains unexpected data .
* @ throws UnauthorizedException
* Exception thrown when an operation fails due to a lack of access .
* @ throws InternalFailureException
* Exception thrown when the service fails to perform an operation due to an internal issue .
* @ throws NotFoundException
* Exception thrown when an entity has not been found during an operation .
* @ throws LimitExceededException
* Exception thrown when a resource could not be created because of service limits .
* @ sample AWSAmplify . StartJob
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / amplify - 2017-07-25 / StartJob " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public StartJobResult startJob ( StartJobRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeStartJob ( request ) ; |
public class NGWordUtiltites { /** * Converts a camelcase variable name to a human readable text .
* @ param camel the String to be converted
* @ return the hopefully human readable version */
public static String labelFromCamelCase ( String camel ) { } } | StringBuilder label = new StringBuilder ( ) ; for ( int i = 0 ; i < camel . length ( ) ; i ++ ) { char c = camel . charAt ( i ) ; if ( Character . isDigit ( c ) ) { if ( i > 0 && Character . isAlphabetic ( camel . charAt ( i - 1 ) ) ) label . append ( " " ) ; } if ( c == '_' ) { label . append ( " " ) ; } else if ( Character . isUpperCase ( c ) ) { label . append ( ' ' ) ; label . append ( Character . toLowerCase ( c ) ) ; } else { label . append ( c ) ; } } return label . toString ( ) ; |
public class CfDef { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } } | if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case KEYSPACE : return isSetKeyspace ( ) ; case NAME : return isSetName ( ) ; case COLUMN_TYPE : return isSetColumn_type ( ) ; case COMPARATOR_TYPE : return isSetComparator_type ( ) ; case SUBCOMPARATOR_TYPE : return isSetSubcomparator_type ( ) ; case COMMENT : return isSetComment ( ) ; case READ_REPAIR_CHANCE : return isSetRead_repair_chance ( ) ; case COLUMN_METADATA : return isSetColumn_metadata ( ) ; case GC_GRACE_SECONDS : return isSetGc_grace_seconds ( ) ; case DEFAULT_VALIDATION_CLASS : return isSetDefault_validation_class ( ) ; case ID : return isSetId ( ) ; case MIN_COMPACTION_THRESHOLD : return isSetMin_compaction_threshold ( ) ; case MAX_COMPACTION_THRESHOLD : return isSetMax_compaction_threshold ( ) ; case KEY_VALIDATION_CLASS : return isSetKey_validation_class ( ) ; case KEY_ALIAS : return isSetKey_alias ( ) ; case COMPACTION_STRATEGY : return isSetCompaction_strategy ( ) ; case COMPACTION_STRATEGY_OPTIONS : return isSetCompaction_strategy_options ( ) ; case COMPRESSION_OPTIONS : return isSetCompression_options ( ) ; case BLOOM_FILTER_FP_CHANCE : return isSetBloom_filter_fp_chance ( ) ; case CACHING : return isSetCaching ( ) ; case DCLOCAL_READ_REPAIR_CHANCE : return isSetDclocal_read_repair_chance ( ) ; case MEMTABLE_FLUSH_PERIOD_IN_MS : return isSetMemtable_flush_period_in_ms ( ) ; case DEFAULT_TIME_TO_LIVE : return isSetDefault_time_to_live ( ) ; case SPECULATIVE_RETRY : return isSetSpeculative_retry ( ) ; case TRIGGERS : return isSetTriggers ( ) ; case CELLS_PER_ROW_TO_CACHE : return isSetCells_per_row_to_cache ( ) ; case MIN_INDEX_INTERVAL : return isSetMin_index_interval ( ) ; case MAX_INDEX_INTERVAL : return isSetMax_index_interval ( ) ; case ROW_CACHE_SIZE : return isSetRow_cache_size ( ) ; case KEY_CACHE_SIZE : return isSetKey_cache_size ( ) ; case ROW_CACHE_SAVE_PERIOD_IN_SECONDS : return isSetRow_cache_save_period_in_seconds ( ) ; case KEY_CACHE_SAVE_PERIOD_IN_SECONDS : return isSetKey_cache_save_period_in_seconds ( ) ; case MEMTABLE_FLUSH_AFTER_MINS : return isSetMemtable_flush_after_mins ( ) ; case MEMTABLE_THROUGHPUT_IN_MB : return isSetMemtable_throughput_in_mb ( ) ; case MEMTABLE_OPERATIONS_IN_MILLIONS : return isSetMemtable_operations_in_millions ( ) ; case REPLICATE_ON_WRITE : return isSetReplicate_on_write ( ) ; case MERGE_SHARDS_CHANCE : return isSetMerge_shards_chance ( ) ; case ROW_CACHE_PROVIDER : return isSetRow_cache_provider ( ) ; case ROW_CACHE_KEYS_TO_SAVE : return isSetRow_cache_keys_to_save ( ) ; case POPULATE_IO_CACHE_ON_FLUSH : return isSetPopulate_io_cache_on_flush ( ) ; case INDEX_INTERVAL : return isSetIndex_interval ( ) ; } throw new IllegalStateException ( ) ; |
public class JPAQueryBuilder { /** * Returns true if one of the fetches specified will result in a collection being pulled back */
public boolean hasCollectionFetch ( ) { } } | if ( fetches != null ) for ( String fetch : fetches ) { QEntity parent = entity ; final String [ ] parts = StringUtils . split ( fetch , '.' ) ; for ( int i = 0 ; i < parts . length ; i ++ ) { // If this is a fully supported relation then continue checking
if ( parent . hasRelation ( parts [ i ] ) ) { final QRelation relation = parent . getRelation ( parts [ i ] ) ; parent = relation . getEntity ( ) ; if ( relation . isCollection ( ) ) { if ( log . isTraceEnabled ( ) ) log . trace ( "Encountered fetch " + fetch + ". This resolves to " + relation + " which is a collection" ) ; return true ; } } // This covers partially - supported things like Map and other basic collections that don ' t have a QRelation description
else if ( parent . hasNonEntityRelation ( parts [ i ] ) ) { if ( parent . isNonEntityRelationCollection ( parts [ i ] ) ) return true ; } else { log . warn ( "Encountered relation " + parts [ i ] + " on " + parent . getName ( ) + " as part of path " + fetch + ". Assuming QEntity simply does not know this relation. Assuming worst case scenario (collection join is involved)" ) ; return true ; } } } return false ; |
public class PlatformDb2Impl { /** * Patch provided by Avril Kotzen ( hi001 @ webmail . co . za )
* DB2 handles TINYINT ( for mapping a byte ) . */
public void setObjectForStatement ( PreparedStatement ps , int index , Object value , int sqlType ) throws SQLException { } } | if ( sqlType == Types . TINYINT ) { ps . setByte ( index , ( ( Byte ) value ) . byteValue ( ) ) ; } else { super . setObjectForStatement ( ps , index , value , sqlType ) ; } |
public class RealmTableImpl { /** * ( non - Javadoc )
* @ see org . jdiameter . client . api . controller . IRealmTable # getRealm ( java . lang . String , org . jdiameter . api . ApplicationId ) */
@ Override public Realm getRealm ( String realmName , ApplicationId applicationId ) { } } | RealmSet rs = this . realmNameToRealmSet . get ( realmName ) ; return rs == null ? null : rs . getRealm ( applicationId ) ; |
public class SerialImpl { /** * < p > Sends an array of bytes to the serial port / device identified by the given file descriptor . < / p >
* @ param data
* A ByteBuffer of data to be transmitted .
* @ param offset
* The starting index ( inclusive ) in the array to send from .
* @ param length
* The number of bytes from the byte array to transmit to the serial port .
* @ throws IllegalStateException thrown if the serial port is not already open .
* @ throws IOException thrown on any error . */
@ Override public void write ( byte [ ] data , int offset , int length ) throws IllegalStateException , IOException { } } | // validate state
if ( isClosed ( ) ) { throw new IllegalStateException ( "Serial connection is not open; cannot 'write()'." ) ; } // write serial data to transmit buffer
com . pi4j . jni . Serial . write ( fileDescriptor , data , offset , length ) ; |
public class RaftSessionService { /** * queried locally */
private Map < CPGroupId , Collection < Tuple2 < Long , Long > > > getSessionsToExpire ( ) { } } | Map < CPGroupId , Collection < Tuple2 < Long , Long > > > expired = new HashMap < CPGroupId , Collection < Tuple2 < Long , Long > > > ( ) ; for ( RaftSessionRegistry registry : registries . values ( ) ) { Collection < Tuple2 < Long , Long > > e = registry . getSessionsToExpire ( ) ; if ( ! e . isEmpty ( ) ) { expired . put ( registry . groupId ( ) , e ) ; } } return expired ; |
public class GroovyShell { /** * A helper method which runs the given cl script with the given command line arguments
* @ param scriptText is the text content of the script
* @ param fileName is the logical file name of the script ( which is used to create the class name of the script )
* @ param list the command line arguments to pass in */
public Object run ( String scriptText , String fileName , List list ) throws CompilationFailedException { } } | return run ( scriptText , fileName , ( String [ ] ) list . toArray ( EMPTY_STRING_ARRAY ) ) ; |
public class PolymerClassDefinition { /** * Validates the class definition and if valid , extracts the class definition from the AST . As
* opposed to the Polymer 1 extraction , this operation is non - destructive . */
@ Nullable static PolymerClassDefinition extractFromClassNode ( Node classNode , AbstractCompiler compiler , GlobalNamespace globalNames ) { } } | checkState ( classNode != null && classNode . isClass ( ) ) ; // The supported case is for the config getter to return an object literal descriptor .
Node propertiesDescriptor = null ; Node propertiesGetter = NodeUtil . getFirstGetterMatchingKey ( NodeUtil . getClassMembers ( classNode ) , "properties" ) ; if ( propertiesGetter != null ) { if ( ! propertiesGetter . isStaticMember ( ) ) { // report bad class definition
compiler . report ( JSError . make ( classNode , PolymerPassErrors . POLYMER_CLASS_PROPERTIES_NOT_STATIC ) ) ; } else { for ( Node child : NodeUtil . getFunctionBody ( propertiesGetter . getFirstChild ( ) ) . children ( ) ) { if ( child . isReturn ( ) ) { if ( child . hasChildren ( ) && child . getFirstChild ( ) . isObjectLit ( ) ) { propertiesDescriptor = child . getFirstChild ( ) ; break ; } else { compiler . report ( JSError . make ( propertiesGetter , PolymerPassErrors . POLYMER_CLASS_PROPERTIES_INVALID ) ) ; } } } } } Node target ; if ( NodeUtil . isNameDeclaration ( classNode . getGrandparent ( ) ) ) { target = IR . name ( classNode . getParent ( ) . getString ( ) ) ; } else if ( classNode . getParent ( ) . isAssign ( ) && classNode . getParent ( ) . getFirstChild ( ) . isQualifiedName ( ) ) { target = classNode . getParent ( ) . getFirstChild ( ) ; } else if ( ! classNode . getFirstChild ( ) . isEmpty ( ) ) { target = classNode . getFirstChild ( ) ; } else { // issue error - no name found
compiler . report ( JSError . make ( classNode , PolymerPassErrors . POLYMER_CLASS_UNNAMED ) ) ; return null ; } JSDocInfo classInfo = NodeUtil . getBestJSDocInfo ( classNode ) ; JSDocInfo ctorInfo = null ; Node constructor = NodeUtil . getEs6ClassConstructorMemberFunctionDef ( classNode ) ; if ( constructor != null ) { ctorInfo = NodeUtil . getBestJSDocInfo ( constructor ) ; } List < MemberDefinition > allProperties = PolymerPassStaticUtils . extractProperties ( propertiesDescriptor , DefinitionType . ES6Class , compiler , constructor ) ; List < MemberDefinition > methods = new ArrayList < > ( ) ; for ( Node keyNode : NodeUtil . getClassMembers ( classNode ) . children ( ) ) { if ( ! keyNode . isMemberFunctionDef ( ) ) { continue ; } methods . add ( new MemberDefinition ( NodeUtil . getBestJSDocInfo ( keyNode ) , keyNode , keyNode . getFirstChild ( ) ) ) ; } return new PolymerClassDefinition ( DefinitionType . ES6Class , classNode , target , propertiesDescriptor , classInfo , new MemberDefinition ( ctorInfo , null , constructor ) , null , allProperties , methods , null , null ) ; |
public class Loader { /** * Returns the resource
* @ param name name of the resource */
public URL getResource ( String name ) { } } | Path path ; path = getPath ( name ) ; if ( path != null && path . exists ( ) ) { try { return path . getNetURL ( ) ; } catch ( Exception e ) { log . log ( Level . FINER , e . toString ( ) , e ) ; } } return null ; |
public class GetLogGroupFieldsResult { /** * The array of fields found in the query . Each object in the array contains the name of the field , along with the
* percentage of time it appeared in the log events that were queried .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLogGroupFields ( java . util . Collection ) } or { @ link # withLogGroupFields ( java . util . Collection ) } if you want
* to override the existing values .
* @ param logGroupFields
* The array of fields found in the query . Each object in the array contains the name of the field , along
* with the percentage of time it appeared in the log events that were queried .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetLogGroupFieldsResult withLogGroupFields ( LogGroupField ... logGroupFields ) { } } | if ( this . logGroupFields == null ) { setLogGroupFields ( new com . amazonaws . internal . SdkInternalList < LogGroupField > ( logGroupFields . length ) ) ; } for ( LogGroupField ele : logGroupFields ) { this . logGroupFields . add ( ele ) ; } return this ; |
public class TheHashinator { /** * Get a VoltTable containing the partition keys for each partition that can be found for the given hashinator .
* May be missing some partitions during elastic rebalance when the partitions don ' t own
* enough of the ring to be probed
* If the type is not supported returns null
* @ param hashinator a particular hashinator to get partition keys
* @ param type key type
* @ return a VoltTable containing the partition keys */
public static VoltTable getPartitionKeys ( TheHashinator hashinator , VoltType type ) { } } | // get partitionKeys response table so we can copy it
final VoltTable partitionKeys ; switch ( type ) { case INTEGER : partitionKeys = hashinator . m_integerPartitionKeys . get ( ) ; break ; case STRING : partitionKeys = hashinator . m_stringPartitionKeys . get ( ) ; break ; case VARBINARY : partitionKeys = hashinator . m_varbinaryPartitionKeys . get ( ) ; break ; default : return null ; } // return a clone because if the table is used at all in the voltdb process ,
// ( like by an NT procedure ) ,
// you can corrupt the various offsets and positions in the underlying buffer
return partitionKeys . semiDeepCopy ( ) ; |
public class JvmDeclaredTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EList < JvmMember > getMembers ( ) { } } | if ( members == null ) { members = new EObjectContainmentWithInverseEList < JvmMember > ( JvmMember . class , this , TypesPackage . JVM_DECLARED_TYPE__MEMBERS , TypesPackage . JVM_MEMBER__DECLARING_TYPE ) ; } return members ; |
public class IonDatagramLite { public int byteSize ( ) throws IonException { } } | // TODO this is horrible , users will end up encoding multiple times !
ReverseBinaryEncoder encoder = new ReverseBinaryEncoder ( REVERSE_BINARY_ENCODER_INITIAL_SIZE ) ; encoder . serialize ( this ) ; return encoder . byteSize ( ) ; |
public class PrivateKeyBruteForceProcessor { /** * Resolve the password from the given private key file . If no password is set an empty Optional
* will be returned .
* @ param privateKeyFile
* the private key file
* @ param processor
* the processor
* @ return the optional */
public static Optional < String > resolvePassword ( File privateKeyFile , @ NonNull BruteForceProcessor processor ) { } } | Optional < String > optionalPassword = Optional . empty ( ) ; try { boolean isPasswordProtected = PrivateKeyReader . isPrivateKeyPasswordProtected ( privateKeyFile ) ; if ( ! isPasswordProtected ) { String attempt ; attempt = processor . getCurrentAttempt ( ) ; Security . addProvider ( new BouncyCastleProvider ( ) ) ; while ( true ) { try { EncryptedPrivateKeyReader . getKeyPair ( privateKeyFile , attempt ) ; optionalPassword = Optional . of ( attempt ) ; break ; } catch ( IOException e ) { attempt = processor . getCurrentAttempt ( ) ; processor . increment ( ) ; } } } } catch ( IOException ex ) { return optionalPassword ; } return optionalPassword ; |
public class ExprParser { /** * incDec : expr ( ' + + ' | ' - - ' ) */
Expr incDec ( ) { } } | Expr expr = staticMember ( ) ; Tok tok = peek ( ) ; if ( tok . sym == Sym . INC || tok . sym == Sym . DEC ) { move ( ) ; return new IncDec ( tok . sym , true , expr , location ) ; } return expr ; |
public class AbstractMapBasedWALDAO { /** * Add an item including invoking the callback . Must only be invoked inside a
* write - lock .
* @ param aNewItem
* The item to be added . May not be < code > null < / code > .
* @ return The passed parameter as - is . Never < code > null < / code > .
* @ throws IllegalArgumentException
* If an item with the same ID is already contained */
@ MustBeLocked ( ELockType . WRITE ) @ Nonnull protected final IMPLTYPE internalCreateItem ( @ Nonnull final IMPLTYPE aNewItem ) { } } | return internalCreateItem ( aNewItem , true ) ; |
public class ReflectExtensions { /** * Retrieves the value of the given accessible field of the given receiver .
* @ param receiver the container of the field , not < code > null < / code >
* @ param fieldName the field ' s name , not < code > null < / code >
* @ return the value of the field
* @ throws NoSuchFieldException see { @ link Class # getField ( String ) }
* @ throws SecurityException see { @ link Class # getField ( String ) }
* @ throws IllegalAccessException see { @ link Field # get ( Object ) }
* @ throws IllegalArgumentException see { @ link Field # get ( Object ) } */
@ SuppressWarnings ( "unchecked" ) /* @ Nullable */
public < T > T get ( Object receiver , String fieldName ) throws SecurityException , NoSuchFieldException , IllegalArgumentException , IllegalAccessException { } } | Preconditions . checkNotNull ( receiver , "receiver" ) ; Preconditions . checkNotNull ( fieldName , "fieldName" ) ; Class < ? extends Object > clazz = receiver . getClass ( ) ; Field f = getDeclaredField ( clazz , fieldName ) ; if ( ! f . isAccessible ( ) ) f . setAccessible ( true ) ; return ( T ) f . get ( receiver ) ; |
public class RtfDocumentSettings { /** * Author : Howard Shank ( hgshank @ yahoo . com )
* @ param level Document protecton level
* @ param pwd Document password - clear text
* @ since 2.1.1 */
public boolean setProtection ( int level , String pwd ) { } } | boolean result = false ; if ( this . protectionHash == null ) { if ( ! setProtectionLevel ( level ) ) { result = false ; } else { protectionHash = RtfProtection . generateHash ( pwd ) ; result = true ; } } else { if ( this . protectionHash . equals ( RtfProtection . generateHash ( pwd ) ) ) { if ( ! setProtectionLevel ( level ) ) { result = false ; } else { protectionHash = RtfProtection . generateHash ( pwd ) ; result = true ; } } } return result ; |
public class XMLOutputter { /** * Writes the specified ignorable whitespace . Ignorable whitespace may be
* written anywhere in XML output stream , except above the XML declaration .
* If the state equals { @ link # BEFORE _ XML _ DECLARATION } , then it will be set to
* { @ link # BEFORE _ DTD _ DECLARATION } , otherwise if the state is { @ link # START _ TAG _ OPEN } then it
* will be set to { @ link # WITHIN _ ELEMENT } , otherwise the state will not be changed .
* @ param whitespace the ignorable whitespace to be written , not < code > null < / code > .
* @ throws IllegalStateException if < code > getState ( ) ! = { @ link # BEFORE _ XML _ DECLARATION } & amp ; & amp ; getState ( ) ! =
* { @ link # BEFORE _ DTD _ DECLARATION } & amp ; & amp ; getState ( ) ! = { @ link # BEFORE _ ROOT _ ELEMENT }
* & amp ; & amp ; getState ( ) ! = { @ link # START _ TAG _ OPEN } & amp ; & amp ; getState ( ) ! =
* { @ link # WITHIN _ ELEMENT } & amp ; & amp ; getState ( ) ! = { @ link # AFTER _ ROOT _ ELEMENT } < / code > .
* @ throws IllegalArgumentException if < code > whitespace = = null < / code > .
* @ throws InvalidXMLException if the specified character string contains a character that is
* invalid as whitespace .
* @ throws IOException if an I / O error occurs ; this will set the state to { @ link # ERROR _ STATE } . */
@ Override public final void whitespace ( String whitespace ) throws IllegalStateException , IllegalArgumentException , InvalidXMLException , IOException { } } | // Check state
if ( _state != XMLEventListenerStates . BEFORE_XML_DECLARATION && _state != XMLEventListenerStates . BEFORE_DTD_DECLARATION && _state != XMLEventListenerStates . BEFORE_ROOT_ELEMENT && _state != XMLEventListenerStates . START_TAG_OPEN && _state != XMLEventListenerStates . WITHIN_ELEMENT && _state != XMLEventListenerStates . AFTER_ROOT_ELEMENT ) { throw new IllegalStateException ( "getState() == " + _state ) ; // Check arguments
} else if ( whitespace == null ) { throw new IllegalArgumentException ( "whitespace == null" ) ; } XMLEventListenerState oldState = _state ; // Temporarily set the state to ERROR _ STATE . Unless an exception is
// thrown in the write methods , it will be reset to a valid state .
_state = XMLEventListenerStates . ERROR_STATE ; // Write output
if ( oldState == XMLEventListenerStates . START_TAG_OPEN ) { closeStartTag ( ) ; } // Do the actual output
_encoder . whitespace ( _out , whitespace ) ; // Change state
if ( oldState == XMLEventListenerStates . BEFORE_XML_DECLARATION ) { _state = XMLEventListenerStates . BEFORE_DTD_DECLARATION ; } else if ( oldState == XMLEventListenerStates . START_TAG_OPEN ) { _state = XMLEventListenerStates . WITHIN_ELEMENT ; } else { _state = oldState ; } // State has changed , check
checkInvariants ( ) ; |
public class TedDriver { /** * create tasks by list and batch task for them . return batch taskId */
public Long createBatch ( String batchTaskName , String data , String key1 , String key2 , List < TedTask > tedTasks ) { } } | return tedDriverImpl . createBatch ( batchTaskName , data , key1 , key2 , tedTasks ) ; |
public class TVRageParser { /** * Process AKA information
* @ param showInfo
* @ param eShowInfo */
private static void processAka ( ShowInfo showInfo , Element eShowInfo ) { } } | NodeList nlAkas = eShowInfo . getElementsByTagName ( "aka" ) ; for ( int loop = 0 ; loop < nlAkas . getLength ( ) ; loop ++ ) { Node nShowInfo = nlAkas . item ( loop ) ; if ( nShowInfo . getNodeType ( ) == Node . ELEMENT_NODE ) { Element eAka = ( Element ) nShowInfo ; CountryDetail newAka = new CountryDetail ( ) ; newAka . setCountry ( eAka . getAttribute ( COUNTRY ) ) ; newAka . setDetail ( eAka . getTextContent ( ) ) ; showInfo . addAka ( newAka ) ; } } |
public class ImmutableRoaringBitmap { /** * Computes OR between input bitmaps in the given range , from rangeStart ( inclusive ) to rangeEnd
* ( exclusive )
* @ param bitmaps input bitmaps , these are not modified
* @ param rangeStart inclusive beginning of range
* @ param rangeEnd exclusive ending of range
* @ return new result bitmap */
public static MutableRoaringBitmap or ( final Iterator < ? extends ImmutableRoaringBitmap > bitmaps , final long rangeStart , final long rangeEnd ) { } } | MutableRoaringBitmap . rangeSanityCheck ( rangeStart , rangeEnd ) ; Iterator < ImmutableRoaringBitmap > bitmapsIterator ; bitmapsIterator = selectRangeWithoutCopy ( bitmaps , rangeStart , rangeEnd ) ; return or ( bitmapsIterator ) ; |
public class StartContentModerationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StartContentModerationRequest startContentModerationRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( startContentModerationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startContentModerationRequest . getVideo ( ) , VIDEO_BINDING ) ; protocolMarshaller . marshall ( startContentModerationRequest . getMinConfidence ( ) , MINCONFIDENCE_BINDING ) ; protocolMarshaller . marshall ( startContentModerationRequest . getClientRequestToken ( ) , CLIENTREQUESTTOKEN_BINDING ) ; protocolMarshaller . marshall ( startContentModerationRequest . getNotificationChannel ( ) , NOTIFICATIONCHANNEL_BINDING ) ; protocolMarshaller . marshall ( startContentModerationRequest . getJobTag ( ) , JOBTAG_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LObjSrtPredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T > LObjSrtPredicateBuilder < T > objSrtPredicate ( Consumer < LObjSrtPredicate < T > > consumer ) { } } | return new LObjSrtPredicateBuilder ( consumer ) ; |
public class KinesisConfigUtil { /** * Replace deprecated configuration properties for { @ link FlinkKinesisProducer } .
* This should be remove along with deprecated keys */
public static Properties replaceDeprecatedProducerKeys ( Properties configProps ) { } } | // Replace deprecated key
if ( configProps . containsKey ( ProducerConfigConstants . COLLECTION_MAX_COUNT ) ) { configProps . setProperty ( COLLECTION_MAX_COUNT , configProps . getProperty ( ProducerConfigConstants . COLLECTION_MAX_COUNT ) ) ; configProps . remove ( ProducerConfigConstants . COLLECTION_MAX_COUNT ) ; } // Replace deprecated key
if ( configProps . containsKey ( ProducerConfigConstants . AGGREGATION_MAX_COUNT ) ) { configProps . setProperty ( AGGREGATION_MAX_COUNT , configProps . getProperty ( ProducerConfigConstants . AGGREGATION_MAX_COUNT ) ) ; configProps . remove ( ProducerConfigConstants . AGGREGATION_MAX_COUNT ) ; } return configProps ; |
public class ZooKeeperClient { /** * Checks if a path exists .
* @ param path
* @ return
* @ throws ZooKeeperException */
public boolean nodeExists ( String path ) throws ZooKeeperException { } } | try { Stat stat = curatorFramework . checkExists ( ) . forPath ( path ) ; return stat != null ; } catch ( Exception e ) { if ( e instanceof ZooKeeperException ) { throw ( ZooKeeperException ) e ; } else { throw new ZooKeeperException ( e ) ; } } |
public class ObjectWrapper { /** * Returns the value of the specified mapped property from the wrapped object .
* @ param propertyName the name of the mapped property whose value is to be extracted , cannot be { @ code null }
* @ param key the key of the property value to be extracted , can be { @ code null }
* @ return the mapped property value
* @ throws ReflectionException if a reflection error occurs
* @ throws IllegalArgumentException if the propertyName parameter is { @ code null }
* @ throws IllegalArgumentException if the mapped object in the wrapped object is not a { @ link Map } type
* @ throws NullPointerException if the mapped object in the wrapped object is { @ code null }
* @ throws NullPointerException if the wrapped object does not have a property with the given name */
public Object getMappedValue ( String propertyName , Object key ) { } } | return getMappedValue ( object , getPropertyOrThrow ( bean , propertyName ) , key ) ; |
public class ColumnsImpl { /** * Actually perform the create call to Kanbanery , and don ' t use a flow to build the call .
* You will most probably want to use the { @ link # create ( Column ) } method most of the time .
* @ param column the column data to be used for the new column
* @ return the freshly created ( and populated ) column
* @ throws ServerCommunicationException if the server response could not be fetched */
public Column doCreate ( Column column ) { } } | String url = getDefaultUrl ( ) ; return restClient . doPost ( url , column , GsonTypeTokens . COLUMN ) ; |
public class ComputeFunction { /** * Sends the given message to all vertices that adjacent to the changed vertex .
* This method is mutually exclusive to the method { @ link # getEdges ( ) } and may be called only once .
* @ param m The message to send . */
public final void sendMessageToAllNeighbors ( Message m ) { } } | verifyEdgeUsage ( ) ; outMsg . f1 = m ; while ( edges . hasNext ( ) ) { Tuple next = edges . next ( ) ; outMsg . f0 = next . getField ( 1 ) ; out . collect ( Either . Right ( outMsg ) ) ; } |
public class AbstractTransactionalConnectionListener { /** * { @ inheritDoc } */
public void enlist ( ) throws ResourceException { } } | if ( isEnlisted ( ) || getState ( ) == DESTROY || getState ( ) == DESTROYED ) return ; log . tracef ( "Enlisting: %s" , this ) ; try { TransactionalConnectionManager txCM = ( TransactionalConnectionManager ) cm ; Transaction tx = txCM . getTransactionIntegration ( ) . getTransactionManager ( ) . getTransaction ( ) ; transactionSynchronization = createTransactionSynchronization ( ) ; transactionSynchronization . init ( tx ) ; transactionSynchronization . enlist ( ) ; txCM . getTransactionIntegration ( ) . getTransactionSynchronizationRegistry ( ) . registerInterposedSynchronization ( transactionSynchronization ) ; enlisted = true ; log . tracef ( "Enlisted: %s" , this ) ; } catch ( ResourceException re ) { throw re ; } catch ( Exception e ) { throw new ResourceException ( e ) ; } |
public class MediaDescriptorField { /** * Creates or updates format using payload number and text format description .
* @ param payload the payload number of the format .
* @ param description text description of the format
* @ return format object */
private RTPFormat createFormat ( int payload , Text description ) { } } | MediaType mtype = MediaType . fromDescription ( mediaType ) ; switch ( mtype ) { case AUDIO : return createAudioFormat ( payload , description ) ; case VIDEO : return createVideoFormat ( payload , description ) ; case APPLICATION : return createApplicationFormat ( payload , description ) ; default : return null ; } |
public class CmsWidgetUtil { /** * Collects widget information for a given content value . < p >
* @ param value a content value
* @ return the widget information for the given value */
public static WidgetInfo collectWidgetInfo ( I_CmsXmlContentValue value ) { } } | CmsXmlContentDefinition contentDef = value . getDocument ( ) . getContentDefinition ( ) ; String path = value . getPath ( ) ; return collectWidgetInfo ( contentDef , path ) ; |
public class BinaryLog { /** * Prints the appropriate help , based on the arg */
private void helpAction ( String arg ) { } } | if ( arg == null ) { printMainUsage ( ) ; return ; } String helpTarget = arg . toLowerCase ( ) ; if ( helpTarget . equals ( ACTION_VIEW ) ) { printViewUsage ( ) ; } else if ( helpTarget . equals ( ACTION_COPY ) ) { printCopyUsage ( ) ; } else if ( helpTarget . equals ( ACTION_LISTINSTANCES ) ) { printListInstancesUsage ( ) ; } else { printMainUsage ( ) ; } |
public class HttpClientFinalizer { /** * RequestSender methods */
@ Override public HttpClientFinalizer send ( Publisher < ? extends ByteBuf > requestBody ) { } } | Objects . requireNonNull ( requestBody , "requestBody" ) ; return send ( ( req , out ) -> out . sendObject ( requestBody ) ) ; |
public class ValidStorageOptions { /** * The valid range of provisioned IOPS . For example , 1000-20000.
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setProvisionedIops ( java . util . Collection ) } or { @ link # withProvisionedIops ( java . util . Collection ) } if you
* want to override the existing values .
* @ param provisionedIops
* The valid range of provisioned IOPS . For example , 1000-20000.
* @ return Returns a reference to this object so that method calls can be chained together . */
public ValidStorageOptions withProvisionedIops ( Range ... provisionedIops ) { } } | if ( this . provisionedIops == null ) { setProvisionedIops ( new java . util . ArrayList < Range > ( provisionedIops . length ) ) ; } for ( Range ele : provisionedIops ) { this . provisionedIops . add ( ele ) ; } return this ; |
public class SerialArrayList { /** * Get u .
* @ param i the
* @ return the u */
public U get ( int i ) { } } | ByteBuffer view = getView ( i ) ; try { return factory . read ( view ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } |
public class PoolManager { /** * This method fatalErrorNotification marks a ManagedConnection stale in the
* pool manager ' s connection pools .
* @ param Managed connection wrapper
* @ param object affinity
* @ concurrency concurrent */
public void fatalErrorNotification ( ManagedConnectionFactory managedConnectionFactory , MCWrapper mcWrapper , Object affinity ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "fatalErrorNotification" ) ; } requestingAccessToPool ( ) ; if ( mcWrapper != null ) { mcWrapper . markStale ( ) ; } if ( gConfigProps . connectionPoolingEnabled ) { if ( gConfigProps . getPurgePolicy ( ) != null ) { /* * New with jdbc 4.1 support , if the connection was aborted , skip the entire pool connection purge . */
boolean aborted = mcWrapper != null && mcWrapper . getManagedConnectionWithoutStateCheck ( ) instanceof WSManagedConnection && ( ( WSManagedConnection ) mcWrapper . getManagedConnectionWithoutStateCheck ( ) ) . isAborted ( ) ; if ( gConfigProps . getPurgePolicy ( ) == PurgePolicy . EntirePool && ! aborted ) { // The remove parked connection code was delete here
// Reset fatalErrorNotificationTime and remove all free connections
ArrayList < MCWrapper > destroyMCWrappeList = new ArrayList < MCWrapper > ( ) ; synchronized ( destroyMCWrapperListLock ) { for ( int j = 0 ; j < gConfigProps . getMaxFreePoolHashSize ( ) ; ++ j ) { // ffdc uses this method , , , and was locking freepool
// without locking
// waiter pool first , causing a deadlock .
synchronized ( waiterFreePoolLock ) { synchronized ( freePool [ j ] . freeConnectionLockObject ) { /* * If a connection gets away , by setting
* fatalErrorNotificationTime will guaranty when the
* connection is returned to the free pool , it will be */
freePool [ j ] . incrementFatalErrorValue ( j ) ; /* * Move as many connections as we can in the free pool to
* the destroy list */
if ( freePool [ j ] . mcWrapperList . size ( ) > 0 ) { // freePool [ j ] . removeCleanupAndDestroyAllFreeConnections ( ) ;
int mcWrapperListIndex = freePool [ j ] . mcWrapperList . size ( ) - 1 ; for ( int k = mcWrapperListIndex ; k >= 0 ; -- k ) { MCWrapper mcw = ( MCWrapper ) freePool [ j ] . mcWrapperList . remove ( k ) ; mcw . setPoolState ( 0 ) ; destroyMCWrappeList . add ( mcw ) ; -- freePool [ j ] . numberOfConnectionsAssignedToThisFreePool ; } } // end if
} // end free lock
} // end waiter lock
} // end for j
} // end syc for destroyMCWrapperListLock
/* * we need to cleanup and destroy connections in the local
* destroy list . */
for ( int i = 0 ; i < destroyMCWrappeList . size ( ) ; ++ i ) { MCWrapper mcw = destroyMCWrappeList . get ( i ) ; freePool [ 0 ] . cleanupAndDestroyMCWrapper ( mcw ) ; this . totalConnectionCount . decrementAndGet ( ) ; } } // end " EntirePool " purge policy
else { /* * We only need to check for the validating mcf support if purge
* policy is not " EntirePool " If the purge policy is " EntierPool " all
* of the connection will be destroyed . We will have no connection to
* validate .
* If ValidatingMCFSupported = = true this code will attempt to cleanup
* and destroy the connections returned by the method
* getInvalidConnections ( ) .
* If the connection is active , the connections will be marked stale .
* New with jdbc 4.1 support , if the connection was aborted , skip the validate connections call . */
if ( gConfigProps . validatingMCFSupported && ! aborted ) { validateConnections ( managedConnectionFactory , false ) ; } /* * Mark all connections to be pretested for
* purge policy = ValidateAllConnections . If pretest fails a new
* connection will be created . Moved to here and corrected to avoid
* marking empty wrappers - - */
if ( gConfigProps . getPurgePolicy ( ) == PurgePolicy . ValidateAllConnections ) { mcToMCWMapWrite . lock ( ) ; try { Collection < MCWrapper > s = mcToMCWMap . values ( ) ; Iterator < MCWrapper > i = s . iterator ( ) ; while ( i . hasNext ( ) ) { com . ibm . ejs . j2c . MCWrapper mcw = ( com . ibm . ejs . j2c . MCWrapper ) i . next ( ) ; if ( mcw . getState ( ) != com . ibm . ejs . j2c . MCWrapper . STATE_INACTIVE ) { mcw . setPretestThisConnection ( true ) ; } } } finally { mcToMCWMapWrite . unlock ( ) ; } } } // end of PurgePolicy NOT EntirePool
} // end of PurgePolicy not null
} // end of connection pooling enabled
else { /* * Connection pooling in the free pool is disabled . No other proccessing
* in this method is needed . */
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Pooling disabled, fatal error processing completed." ) ; } } activeRequest . decrementAndGet ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "fatalErrorNotification" ) ; } |
public class BoxTableExtractor { /** * Adjusts the table heading rows and footnote rows
* @ param tc
* the object of the table candidate */
private void determineFootnoteHeading ( TableCandidate tc ) { } } | /* * Gets the maximal column number based on all table rows before footnote */
int maxColumnNum = 0 ; for ( int i = 0 ; i < tc . getFootnoteBeginRow ( ) ; i ++ ) { TableRow row = tc . getRows ( ) . get ( i ) ; if ( row . getCells ( ) . size ( ) > maxColumnNum ) { maxColumnNum = row . getCells ( ) . size ( ) ; } } tc . setMaxColumnNumber ( maxColumnNum ) ; /* * Counts the number of table column heading lines
* TODO : this heading finding algorithm should be improved */
int headingLineNumber = 0 ; for ( int i = 0 ; i < tc . getFootnoteBeginRow ( ) ; i ++ ) { TableRow row = tc . getRows ( ) . get ( i ) ; if ( row . getCells ( ) . size ( ) < maxColumnNum ) { headingLineNumber ++ ; } else break ; } /* * Based on observation , usually we have missing cells , especially happen in the first column */
if ( headingLineNumber > 0 ) headingLineNumber -- ; tc . setHeadingLineNumber ( Math . max ( 0 , headingLineNumber ) ) ; |
public class FileUtil { /** * 将String写入文件 , UTF - 8编码追加模式
* @ param content 写入的内容
* @ param path 文件路径
* @ return 写入的文件
* @ throws IORuntimeException IO异常
* @ since 3.1.2 */
public static File appendUtf8String ( String content , String path ) throws IORuntimeException { } } | return appendString ( content , path , CharsetUtil . CHARSET_UTF_8 ) ; |
public class ServiceRefObjectFactory { /** * This method will create an instance of a Service sub - class based on our metadata . */
private Service getServiceInstance ( TransientWebServiceRefInfo tInfo , WebServiceRefInfo wsrInfo ) throws Exception { } } | Class < ? > svcSubClass = null ; Service instance = null ; if ( tInfo . getServiceRefTypeClass ( ) != null && Service . class . getName ( ) . equals ( tInfo . getServiceRefTypeClassName ( ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Creating service instance using generic Service.create(QName)" ) ; } return Service . create ( tInfo . getServiceQName ( ) ) ; } if ( tInfo . getServiceRefTypeClass ( ) != null && Service . class . isAssignableFrom ( tInfo . getServiceRefTypeClass ( ) ) ) { svcSubClass = tInfo . getServiceRefTypeClass ( ) ; } else { svcSubClass = tInfo . getServiceInterfaceClass ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Attempting to create instance of service sub-class: " + svcSubClass . getName ( ) ) ; } final Class < ? > finalSvcSubClass = svcSubClass ; Constructor < ? > constructor = null ; // first we get the constructor for the service subclass , we will always use
// the URL , QName constructor because this constructor allow for null arguments
try { final Constructor < ? > finalConstructor = ( Constructor < ? > ) AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { @ Override public Object run ( ) throws NoSuchMethodException { return finalSvcSubClass . getDeclaredConstructor ( new Class [ ] { URL . class , QName . class } ) ; } } ) ; constructor = finalConstructor ; } catch ( PrivilegedActionException e ) { if ( e . getException ( ) != null ) { throw e . getException ( ) ; } else { throw e ; } } // now we will create the service instance with the constructor that was
// previously created , it ' s okay if the URL we try to obtain turns out
// to be null , a service instance will be created without the use of
// the WSDL document we supplied
final URL url = tInfo . getWsdlURL ( ) ; // jsr - 109:
// For co - located clients ( where the client and the server are in the same Java EE application unit ) with
// generated Service class , the location of the final WSDL document is resolved by comparing the Service name
// on the @ WebServiceClient annotation on the the generated Service to the Service names of all the deployed
// port components in the Java EE application unit
// Future plan need to consider :
// if it is a co - located clients , need to verify the defined wsdlLocation and make it use the dynamic one
// if there is no wsdlLocation defined , need to use the dynamic one
// need to consider in the ear level
// need to consider virtual host
if ( url == null ) { JaxWsModuleMetaData jaxwsModuleMetaData = tInfo . getClientMetaData ( ) . getModuleMetaData ( ) ; String applicationName = jaxwsModuleMetaData . getJ2EEName ( ) . getApplication ( ) ; String contextRoot = jaxwsModuleMetaData . getContextRoot ( ) ; Map < String , String > appNameURLMap = jaxwsModuleMetaData . getAppNameURLMap ( ) ; Container moduleContainer = jaxwsModuleMetaData . getModuleContainer ( ) ; NonPersistentCache overlayCache ; try { overlayCache = moduleContainer . adapt ( NonPersistentCache . class ) ; JaxWsModuleInfo jaxWsModuleInfo = ( JaxWsModuleInfo ) overlayCache . getFromCache ( JaxWsModuleInfo . class ) ; if ( jaxWsModuleInfo != null ) { for ( EndpointInfo endpointInfo : jaxWsModuleInfo . getEndpointInfos ( ) ) { String address = endpointInfo . getAddress ( 0 ) . substring ( 1 ) ; String serviceName = wsrInfo . getServiceQName ( ) . getLocalPart ( ) ; if ( serviceName . equals ( address ) ) { String wsdlLocation = null ; if ( ( appNameURLMap != null ) && ( ! appNameURLMap . isEmpty ( ) ) ) { String applicationURL = appNameURLMap . get ( applicationName ) ; wsdlLocation = applicationURL + "/" + address + "?wsdl" ; } else { wsdlLocation = getWsdlUrl ( ) + contextRoot + "/" + address + "?wsdl" ; } final URL newURl = new URL ( wsdlLocation ) ; wsrInfo . setWsdlLocation ( wsdlLocation ) ; try { final Constructor < ? > finalConstructor = constructor ; final QName serviceQName = tInfo . getServiceQName ( ) ; instance = ( Service ) AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { @ Override public Object run ( ) throws InstantiationException , IllegalAccessException , InvocationTargetException { finalConstructor . setAccessible ( true ) ; return finalConstructor . newInstance ( new Object [ ] { newURl , serviceQName } ) ; } } ) ; } catch ( PrivilegedActionException e ) { if ( e . getException ( ) != null ) { throw e . getException ( ) ; } else { throw e ; } } break ; } } } } catch ( UnableToAdaptException e ) { } } if ( instance != null ) { return instance ; } if ( url != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Creating Service with WSDL URL: " + url + " and QName: " + tInfo . getServiceQName ( ) + " for class: " + svcSubClass . getName ( ) ) ; } } try { final Constructor < ? > finalConstructor = constructor ; final QName serviceQName = tInfo . getServiceQName ( ) ; instance = ( Service ) AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { @ Override public Object run ( ) throws InstantiationException , IllegalAccessException , InvocationTargetException { finalConstructor . setAccessible ( true ) ; return finalConstructor . newInstance ( new Object [ ] { url , serviceQName } ) ; } } ) ; } catch ( PrivilegedActionException e ) { if ( e . getException ( ) != null ) { throw e . getException ( ) ; } else { throw e ; } } return instance ; |
import java . util . ArrayList ; import java . util . Arrays ; public class Main { /** * This function removes all elements from the provided tuple ( implemented as ArrayList in Java )
* and returns an empty tuple ( ArrayList ) .
* Args :
* input _ tuple : An ArrayList that needs to be emptied .
* Returns :
* An empty ArrayList
* Examples :
* > > > empty _ tuple ( new ArrayList < > ( Arrays . asList ( 1 , 5 , 3 , 6 , 8 ) ) )
* > > > empty _ tuple ( new ArrayList < > ( Arrays . asList ( 2 , 1 , 4 , 5 , 6 ) ) )
* > > > empty _ tuple ( new ArrayList < > ( Arrays . asList ( 3 , 2 , 5 , 6 , 8 ) ) ) */
public static ArrayList < Integer > emptyTuple ( ArrayList < Integer > inputTuple ) { } public static void main ( String [ ] args ) { ArrayList < Integer > inputTuple = new ArrayList < > ( Arrays . asList ( 1 , 5 , 3 , 6 , 8 ) ) ; ArrayList < Integer > outputTuple = emptyTuple ( inputTuple ) ; System . out . println ( outputTuple ) ; } } | inputTuple . clear ( ) ; return inputTuple ; |
public class BigRational { /** * Creates a rational number of the specified numerator / denominator BigDecimal values .
* @ param numerator the numerator { @ link BigDecimal } value
* @ param denominator the denominator { @ link BigDecimal } value ( 0 not allowed )
* @ return the rational number
* @ throws ArithmeticException if the denominator is 0 ( division by zero ) */
public static BigRational valueOf ( BigDecimal numerator , BigDecimal denominator ) { } } | return valueOf ( numerator ) . divide ( valueOf ( denominator ) ) ; |
public class VdmCompletionContext { /** * Constructs the completion context for a ' mk _ ' call */
private void consMkContext ( ) { } } | final int MK_LENGTH = "mk_" . length ( ) ; CharSequence subSeq = rawScan . subSequence ( MK_LENGTH , rawScan . length ( ) ) ; processedScan = new StringBuffer ( subSeq ) ; proposalPrefix = processedScan . toString ( ) . trim ( ) ; type = SearchType . Mk ; |
public class EasyPredictModelWrapper { /** * Sort in descending order . */
private SortedClassProbability [ ] sortByDescendingClassProbability ( String [ ] domainValues , double [ ] classProbabilities ) { } } | assert ( classProbabilities . length == domainValues . length ) ; SortedClassProbability [ ] arr = new SortedClassProbability [ domainValues . length ] ; for ( int i = 0 ; i < domainValues . length ; i ++ ) { arr [ i ] = new SortedClassProbability ( ) ; arr [ i ] . name = domainValues [ i ] ; arr [ i ] . probability = classProbabilities [ i ] ; } Arrays . sort ( arr , Collections . reverseOrder ( ) ) ; return arr ; |
public class VatIdValidator { /** * check the VAT identification number , country version for Denmark .
* @ param pvatId vat id to check
* @ return true if checksum is ok */
private boolean checkDkVatId ( final String pvatId ) { } } | final int sum = ( pvatId . charAt ( 2 ) - '0' ) * 2 + ( pvatId . charAt ( 3 ) - '0' ) * 7 + ( pvatId . charAt ( 4 ) - '0' ) * 6 + ( pvatId . charAt ( 5 ) - '0' ) * 5 + ( pvatId . charAt ( 6 ) - '0' ) * 4 + ( pvatId . charAt ( 7 ) - '0' ) * 3 + ( pvatId . charAt ( 8 ) - '0' ) * 2 + pvatId . charAt ( 9 ) - '0' ; return sum % MODULO_11 == 0 ; |
public class AioServer { /** * 关闭服务 */
public void close ( ) { } } | IoUtil . close ( this . channel ) ; if ( null != this . group && false == this . group . isShutdown ( ) ) { try { this . group . shutdownNow ( ) ; } catch ( IOException e ) { // ignore
} } // 结束阻塞
synchronized ( this ) { this . notify ( ) ; } |
public class Links { /** * Returns the { @ link Link } with the given rel .
* @ param rel the relation type to lookup a link for .
* @ return the link with the given rel or { @ literal Optional # empty ( ) } if none found . */
public Optional < Link > getLink ( LinkRelation rel ) { } } | return links . stream ( ) . filter ( it -> it . hasRel ( rel ) ) . findFirst ( ) ; |
public class MemcachedBackupSession { /** * Filter map of attributes using our name pattern .
* @ return the filtered attribute map that only includes attributes that shall be stored in memcached . */
public ConcurrentMap < String , Object > getAttributesFiltered ( ) { } } | if ( this . manager == null ) { throw new IllegalStateException ( "There's no manager set." ) ; } final Pattern pattern = ( ( SessionManager ) manager ) . getMemcachedSessionService ( ) . getSessionAttributePattern ( ) ; final ConcurrentMap < String , Object > attributes = getAttributesInternal ( ) ; if ( pattern == null ) { return attributes ; } final ConcurrentMap < String , Object > result = new ConcurrentHashMap < String , Object > ( attributes . size ( ) ) ; for ( final Map . Entry < String , Object > entry : attributes . entrySet ( ) ) { if ( pattern . matcher ( entry . getKey ( ) ) . matches ( ) ) { result . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return result ; |
public class StrBase { /** * Returns the index within this str of the first occurrence of
* the specified object . The function will check if the object is
* an instance of StrBase , String , Character or Integer , and delegate
* to the { @ code indexOf ( . . . ) } override methods respectively .
* < p > If the object is not of the above types , then { @ code - 1 } returned < / p >
* @ param o the object
* @ return the index of the first occurrence of the object in this str */
@ Override public int indexOf ( Object o ) { } } | if ( getClass ( ) . isAssignableFrom ( o . getClass ( ) ) ) { T str = $ . cast ( o ) ; return indexOf ( str ) ; } else if ( o instanceof CharSequence ) { CharSequence str = ( CharSequence ) o ; return indexOf ( str ) ; } else if ( o instanceof Character ) { Character c = ( Character ) o ; return indexOf ( ( int ) c ) ; } else if ( o instanceof Integer ) { Integer n = ( Integer ) o ; return indexOf ( n ) ; } return - 1 ; |
public class RiakMap { /** * Returns a RiakMap to which the specified key is mapped , or { @ literal null }
* if no RiakMap is present .
* @ param key key whose associated RiakMap is to be returned .
* @ return a RiakMap , or null if one is not present . */
public RiakMap getMap ( BinaryValue key ) { } } | if ( entries . containsKey ( key ) ) { for ( RiakDatatype dt : entries . get ( key ) ) { if ( dt . isMap ( ) ) { return dt . getAsMap ( ) ; } } } return null ; |
public class ConcatVectorNamespace { /** * This writes a feature ' s individual value , using the human readable name if possible , to a StringBuilder */
private void debugFeatureValue ( String feature , int index , ConcatVector vector , BufferedWriter bw ) throws IOException { } } | bw . write ( "\t" ) ; if ( sparseFeatureIndex . containsKey ( feature ) && sparseFeatureIndex . get ( feature ) . values ( ) . contains ( index ) ) { // we can map this index to an interpretable string , so we do
bw . write ( "SPARSE VALUE \"" ) ; bw . write ( reverseSparseFeatureIndex . get ( feature ) . get ( index ) ) ; bw . write ( "\"" ) ; } else { // we can ' t map this to a useful string , so we default to the number
bw . write ( Integer . toString ( index ) ) ; } bw . write ( ": " ) ; bw . write ( Double . toString ( vector . getValueAt ( featureToIndex . getOrDefault ( feature , - 1 ) , index ) ) ) ; bw . write ( "\n" ) ; |
public class Nd4jBase64 { /** * Returns a set of arrays
* from base 64 that is tab delimited .
* @ param base64 the base 64 that ' s tab delimited
* @ return the set of arrays */
public static INDArray [ ] arraysFromBase64 ( String base64 ) throws IOException { } } | String [ ] base64Arr = base64 . split ( "\t" ) ; INDArray [ ] ret = new INDArray [ base64Arr . length ] ; for ( int i = 0 ; i < base64Arr . length ; i ++ ) { byte [ ] decode = Base64 . decodeBase64 ( base64Arr [ i ] ) ; ByteArrayInputStream bis = new ByteArrayInputStream ( decode ) ; DataInputStream dis = new DataInputStream ( bis ) ; INDArray predict = Nd4j . read ( dis ) ; ret [ i ] = predict ; } return ret ; |
import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; class ShortestSublist { /** * This method is used to find the shortest sublist within a list of lists .
* > > > findShortestSublist ( Arrays . asList ( Arrays . asList ( 0 ) , Arrays . asList ( 1 , 3 ) ,
* Arrays . asList ( 5 , 7 ) , Arrays . asList ( 9 , 11 ) , Arrays . asList ( 13 , 15 , 17 ) ) ) ;
* ( 1 , [ 0 ] )
* > > > findShortestSublist ( Arrays . asList ( Arrays . asList ( 1 ) , Arrays . asList ( 5 , 7 ) ,
* Arrays . asList ( 10 , 12 , 14 , 15 ) ) ) ;
* ( 1 , [ 1 ] )
* > > > findShortestSublist ( Arrays . asList ( Arrays . asList ( 5 ) , Arrays . asList ( 15 , 20 , 25 ) ) ) ;
* ( 1 , [ 5 ] )
* Parameters :
* master _ list : List of lists for which shortest sublist is to be found .
* Returns :
* Object array with two elements . First is length of shortest sublist and second is the shortest sublist itself . */
public static Object [ ] findShortestSublist ( List < List < Integer > > masterList ) { } } | int shortestLength = Integer . MAX_VALUE ; List < Integer > shortestSublist = new ArrayList < > ( ) ; for ( List < Integer > currentList : masterList ) { if ( currentList . size ( ) < shortestLength ) { shortestLength = currentList . size ( ) ; shortestSublist = currentList ; } } return new Object [ ] { shortestLength , shortestSublist } ; |
public class sent_mails { /** * < pre >
* Use this operation to get sent mail details . .
* < / pre > */
public static sent_mails [ ] get ( nitro_service client ) throws Exception { } } | sent_mails resource = new sent_mails ( ) ; resource . validate ( "get" ) ; return ( sent_mails [ ] ) resource . get_resources ( client ) ; |
public class CacheCluster { /** * A list of VPC Security Groups associated with the cluster .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSecurityGroups ( java . util . Collection ) } or { @ link # withSecurityGroups ( java . util . Collection ) } if you want
* to override the existing values .
* @ param securityGroups
* A list of VPC Security Groups associated with the cluster .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CacheCluster withSecurityGroups ( SecurityGroupMembership ... securityGroups ) { } } | if ( this . securityGroups == null ) { setSecurityGroups ( new com . amazonaws . internal . SdkInternalList < SecurityGroupMembership > ( securityGroups . length ) ) ; } for ( SecurityGroupMembership ele : securityGroups ) { this . securityGroups . add ( ele ) ; } return this ; |
public class SQLiteExecutor { /** * Update the records in data store with the properties which have been updated / set in the specified < code > entity < / code > by the specified condition .
* if the entity implements < code > DirtyMarker < / code > interface , just update the dirty properties .
* To exclude the some properties or default value , invoke { @ code com . landawn . abacus . util . N # entity2Map ( Object , boolean , Collection , NamingPolicy ) }
* @ param table
* @ param record can be < code > Map < / code > or < code > entity < / code > with getter / setter methods
* @ param whereClause Only binary ( = , < > , like , IS NULL . . . ) / between / junction ( or , and . . . ) are supported .
* @ return
* @ see com . landawn . abacus . util . Maps # entity2Map ( Object , boolean , Collection , NamingPolicy ) */
public int update ( String table , Object record , Condition whereClause ) { } } | table = formatName ( table ) ; final ContentValues contentValues = record instanceof ContentValues ? ( ContentValues ) record : toContentValues ( record , readOrWriteOnlyPropNamesMap . get ( record . getClass ( ) ) , columnNamingPolicy , true ) ; removeIdDefaultValue ( contentValues ) ; if ( whereClause == null ) { return sqliteDB . update ( table , contentValues , null , N . EMPTY_STRING_ARRAY ) ; } else { final Command cmd = interpretCondition ( whereClause ) ; return sqliteDB . update ( table , contentValues , cmd . getSql ( ) , cmd . getArgs ( ) ) ; } |
public class GatewayFactory { /** * Creates an implementation of an Gateway .
* @ return a Gateway */
public static Gateway createGateway ( ) { } } | Gateway gateway = null ; for ( GatewayCreator factory : loader ) { gateway = factory . createGateway ( gateway ) ; factory . configureGateway ( gateway ) ; } if ( gateway == null ) { throw new RuntimeException ( "Failed to load GatewayCreator implementation class." ) ; } return gateway ; |
public class SpiderDataAger { /** * Scan the given table for expired objects relative to the given date . */
private void checkTable ( ) { } } | // Documentation says that " 0 xxx " means data - aging is disabled .
if ( m_retentionAge . getValue ( ) == 0 ) { m_logger . info ( "Data aging disabled for table: {}" , m_tableDef . getTableName ( ) ) ; return ; } m_logger . info ( "Checking expired objects for: {}" , m_tableDef . getTableName ( ) ) ; GregorianCalendar checkDate = new GregorianCalendar ( Utils . UTC_TIMEZONE ) ; GregorianCalendar expireDate = m_retentionAge . getExpiredDate ( checkDate ) ; int objsExpired = 0 ; String fixedQuery = buildFixedQuery ( expireDate ) ; String contToken = null ; StringBuilder uriParam = new StringBuilder ( ) ; do { uriParam . setLength ( 0 ) ; uriParam . append ( fixedQuery ) ; if ( ! Utils . isEmpty ( contToken ) ) { uriParam . append ( "&g=" ) ; uriParam . append ( contToken ) ; } ObjectQuery objQuery = new ObjectQuery ( m_tableDef , uriParam . toString ( ) ) ; SearchResultList resultList = SpiderService . instance ( ) . objectQuery ( m_tableDef , objQuery ) ; List < String > objIDs = new ArrayList < > ( ) ; for ( SearchResult result : resultList . results ) { objIDs . add ( result . id ( ) ) ; } if ( deleteBatch ( objIDs ) ) { contToken = resultList . continuation_token ; } else { contToken = null ; } objsExpired += objIDs . size ( ) ; reportProgress ( "Expired " + objsExpired + " objects" ) ; } while ( ! Utils . isEmpty ( contToken ) ) ; m_logger . info ( "Deleted {} objects for {}" , objsExpired , m_tableDef . getTableName ( ) ) ; |
public class CmsSerialDateValue { /** * Converts a collection of dates to a JSON array with the long representation of the dates as strings .
* @ param dates the list to convert .
* @ return JSON array with long values of dates as string */
private JSONValue datesToJsonArray ( Collection < Date > dates ) { } } | if ( null != dates ) { JSONArray result = new JSONArray ( ) ; for ( Date d : dates ) { result . set ( result . size ( ) , dateToJson ( d ) ) ; } return result ; } return null ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.