signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class OpenSslKeyMaterialProvider { /** * Returns the { @ link OpenSslKeyMaterial } or { @ code null } ( if none ) that should be used during the handshake by
* OpenSSL . */
OpenSslKeyMaterial chooseKeyMaterial ( ByteBufAllocator allocator , String alias ) throws Exception { } } | X509Certificate [ ] certificates = keyManager . getCertificateChain ( alias ) ; if ( certificates == null || certificates . length == 0 ) { return null ; } PrivateKey key = keyManager . getPrivateKey ( alias ) ; PemEncoded encoded = PemX509Certificate . toPEM ( allocator , true , certificates ) ; long chainBio = 0 ; long pkeyBio = 0 ; long chain = 0 ; long pkey = 0 ; try { chainBio = toBIO ( allocator , encoded . retain ( ) ) ; chain = SSL . parseX509Chain ( chainBio ) ; OpenSslKeyMaterial keyMaterial ; if ( key instanceof OpenSslPrivateKey ) { keyMaterial = ( ( OpenSslPrivateKey ) key ) . newKeyMaterial ( chain , certificates ) ; } else { pkeyBio = toBIO ( allocator , key ) ; pkey = key == null ? 0 : SSL . parsePrivateKey ( pkeyBio , password ) ; keyMaterial = new DefaultOpenSslKeyMaterial ( chain , pkey , certificates ) ; } // See the chain and pkey to 0 so we will not release it as the ownership was
// transferred to OpenSslKeyMaterial .
chain = 0 ; pkey = 0 ; return keyMaterial ; } finally { SSL . freeBIO ( chainBio ) ; SSL . freeBIO ( pkeyBio ) ; if ( chain != 0 ) { SSL . freeX509Chain ( chain ) ; } if ( pkey != 0 ) { SSL . freePrivateKey ( pkey ) ; } encoded . release ( ) ; } |
public class Validate { /** * Checks if the given iterable object contains < code > null < / code > values .
* @ param coll The iterable object to validate .
* @ throws ParameterException if the given object contains any < code > null < / code > values . */
public static < O extends Object > void noNullElements ( Iterable < O > coll ) { } } | if ( ! validation ) return ; notNull ( coll ) ; for ( O o : coll ) { try { notNull ( o ) ; } catch ( ParameterException e1 ) { throw new ParameterException ( ErrorCode . NULLELEMENTS ) ; } } |
public class AvailBlankingMarshaller { /** * Marshall the given parameter object . */
public void marshall ( AvailBlanking availBlanking , ProtocolMarshaller protocolMarshaller ) { } } | if ( availBlanking == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( availBlanking . getAvailBlankingImage ( ) , AVAILBLANKINGIMAGE_BINDING ) ; protocolMarshaller . marshall ( availBlanking . getState ( ) , STATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Hashids { /** * Encrypt hexa to string
* @ param hexa the hexa to encrypt
* @ return the encrypt string */
public String encodeHex ( String hexa ) { } } | if ( ! hexa . matches ( "^[0-9a-fA-F]+$" ) ) return "" ; List < Long > matched = new ArrayList < Long > ( ) ; Matcher matcher = Pattern . compile ( "[\\w\\W]{1,12}" ) . matcher ( hexa ) ; while ( matcher . find ( ) ) matched . add ( Long . parseLong ( "1" + matcher . group ( ) , 16 ) ) ; // conversion
long [ ] result = new long [ matched . size ( ) ] ; for ( int i = 0 ; i < matched . size ( ) ; i ++ ) result [ i ] = matched . get ( i ) ; return this . _encode ( result ) ; |
public class LemmatizerFixedTrainer { /** * Instantiate the { @ code LemmatizerFactory } according to the features
* specified in the parameters properties file .
* @ param params
* the training parameters
* @ return the factory */
private final LemmatizerFactory getTrainerFactory ( final TrainingParameters params ) { } } | LemmatizerFactory lemmatizerFactory = null ; final String featureSet = Flags . getFeatureSet ( params ) ; if ( featureSet . equalsIgnoreCase ( "chunk" ) ) { try { lemmatizerFactory = LemmatizerFactory . create ( LemmatizerFactory . class . getName ( ) ) ; } catch ( final InvalidFormatException e ) { e . printStackTrace ( ) ; } } else { try { lemmatizerFactory = LemmatizerFactory . create ( LemmatizerFactory . class . getName ( ) ) ; } catch ( final InvalidFormatException e ) { e . printStackTrace ( ) ; } } return lemmatizerFactory ; |
public class SampleRibbonApplication { /** * Use this for debugging ( or if there is no Zipkin server running on port 9411) */
@ Bean @ ConditionalOnProperty ( value = "sample.zipkin.enabled" , havingValue = "false" ) public Reporter < Span > spanReporter ( ) { } } | return new Reporter < Span > ( ) { @ Override public void report ( Span span ) { logger . info ( span ) ; } } ; |
public class ForwardingDrawable { /** * TransformationCallback methods */
protected void getParentTransform ( Matrix transform ) { } } | if ( mTransformCallback != null ) { mTransformCallback . getTransform ( transform ) ; } else { transform . reset ( ) ; } |
public class EnumJsonSerializer { /** * { @ inheritDoc } */
@ Override public void doSerialize ( JsonWriter writer , E value , JsonSerializationContext ctx , JsonSerializerParameters params ) { } } | writer . unescapeValue ( value . name ( ) ) ; |
public class DiagnosticManager { /** * Reports an error . */
public void error ( String message , String sourceString , int position , int startPosition , int endPosition , Object info ) { } } | report ( new AnnotationReport ( Kind . ERROR , message , sourceString , position , startPosition , endPosition , info ) ) ; |
public class MDict { /** * / * Encodable */
@ Override public void encodeTo ( Encoder enc ) { } } | if ( ! isMutated ( ) ) { if ( flDict == null ) { enc . beginDict ( 0 ) ; enc . endDict ( ) ; } else { enc . writeValue ( flDict ) ; } } else { enc . beginDict ( valCount ) ; for ( Map . Entry < String , MValue > entry : valueMap . entrySet ( ) ) { final MValue value = entry . getValue ( ) ; if ( ! value . isEmpty ( ) ) { enc . writeKey ( entry . getKey ( ) ) ; value . encodeTo ( enc ) ; } } if ( flDict != null ) { final FLDictIterator itr = new FLDictIterator ( ) ; try { itr . begin ( flDict ) ; String key ; while ( ( key = itr . getKeyString ( ) ) != null ) { if ( ! valueMap . containsKey ( key ) ) { enc . writeKey ( key ) ; enc . writeValue ( itr . getValue ( ) ) ; } if ( ! itr . next ( ) ) { break ; } } } finally { itr . free ( ) ; } } enc . endDict ( ) ; } |
public class H2GISFunctions { /** * Return a boolean property of the function
* @ param function
* @ param propertyKey
* @ param defaultValue
* @ return */
private static boolean getBooleanProperty ( Function function , String propertyKey , boolean defaultValue ) { } } | Object value = function . getProperty ( propertyKey ) ; return value instanceof Boolean ? ( Boolean ) value : defaultValue ; |
public class CreateSnapshotScheduleResult { /** * A list of clusters associated with the schedule . A maximum of 100 clusters is returned .
* @ return A list of clusters associated with the schedule . A maximum of 100 clusters is returned . */
public java . util . List < ClusterAssociatedToSchedule > getAssociatedClusters ( ) { } } | if ( associatedClusters == null ) { associatedClusters = new com . amazonaws . internal . SdkInternalList < ClusterAssociatedToSchedule > ( ) ; } return associatedClusters ; |
public class ItemProcessorResolverChain { /** * Returns the { @ link ItemProcessor } to use for the given item . Iterates through the chain of resolvers until one
* of them returns a non - null processor . If non of them returns a processor , the { @ code defaultProcessor } will be
* returned . */
@ Override public ItemProcessor getProcessor ( Item item ) { } } | ItemProcessor processor ; if ( CollectionUtils . isNotEmpty ( resolvers ) ) { for ( ItemProcessorResolver resolver : resolvers ) { processor = resolver . getProcessor ( item ) ; if ( processor != null ) { return processor ; } } } return defaultProcessor ; |
public class PlaintextSignatureService { /** * { @ inheritDoc } */
@ Override public String getSignature ( String baseString , String apiSecret , String tokenSecret ) { } } | try { Preconditions . checkEmptyString ( apiSecret , "Api secret cant be null or empty string" ) ; return OAuthEncoder . encode ( apiSecret ) + '&' + OAuthEncoder . encode ( tokenSecret ) ; } catch ( Exception e ) { throw new OAuthSignatureException ( baseString , e ) ; } |
public class RobustLoaderWriterResilienceStrategy { /** * Write the value to the loader - writer if it doesn ' t already exist in it . Note that the load and write pair
* is not atomic . This atomicity , if needed , should be handled by the something else .
* @ param key the key being put
* @ param value the value being put
* @ param e the triggered failure
* @ return the existing value or null if the new was set */
@ Override public V putIfAbsentFailure ( K key , V value , StoreAccessException e ) { } } | // FIXME : Should I care about useLoaderInAtomics ?
try { try { V loaded = loaderWriter . load ( key ) ; if ( loaded != null ) { return loaded ; } } catch ( Exception e1 ) { throw ExceptionFactory . newCacheLoadingException ( e1 , e ) ; } try { loaderWriter . write ( key , value ) ; } catch ( Exception e1 ) { throw ExceptionFactory . newCacheWritingException ( e1 , e ) ; } } finally { cleanup ( key , e ) ; } return null ; |
public class HttpInstanceSummary { /** * If you included any attributes when you registered the instance , the values of those attributes .
* @ param attributes
* If you included any attributes when you registered the instance , the values of those attributes .
* @ return Returns a reference to this object so that method calls can be chained together . */
public HttpInstanceSummary withAttributes ( java . util . Map < String , String > attributes ) { } } | setAttributes ( attributes ) ; return this ; |
public class FactoryEdgeDetectors { /** * Detects the edge of an object using the canny edge detector . The output can be a binary image and / or a
* graph of connected contour points .
* @ see CannyEdge
* @ see CannyEdgeDynamic
* @ param blurRadius Size of the kernel used to blur the image . Try 1 or 2
* @ param dynamicThreshold If true then the thresholds have a range from 0 to 1 and are relative to the
* maximum edge intensity , if false then they are absolute intensity values .
* @ param imageType Type of input image .
* @ param derivType Type of image derivative .
* @ return Canny edge detector */
public static < T extends ImageGray < T > , D extends ImageGray < D > > CannyEdge < T , D > canny ( int blurRadius , boolean saveTrace , boolean dynamicThreshold , Class < T > imageType , Class < D > derivType ) { } } | BlurFilter < T > blur = FactoryBlurFilter . gaussian ( ImageType . single ( imageType ) , - 1 , blurRadius ) ; ImageGradient < T , D > gradient = FactoryDerivative . three ( imageType , derivType ) ; if ( dynamicThreshold ) return new CannyEdgeDynamic < > ( blur , gradient , saveTrace ) ; else return new CannyEdge < > ( blur , gradient , saveTrace ) ; |
public class Day { /** * Find the first of a specific day in a given month . For instance
* first Tuesday of May :
* getFirstOfMonth ( Calendar . TUESDAY , Calendar . MAY , 2005 ) ;
* @ param dayOfWeek Weekday to get .
* @ param month Month of day to get .
* @ param year Year of day to get .
* @ return The requested day . */
public static Day getFirstOfMonth ( int dayOfWeek , int month , int year ) { } } | return Day . getNthOfMonth ( 1 , dayOfWeek , month , year ) ; |
public class BarrierBuffer { /** * Releases the blocks on all channels and resets the barrier count .
* Makes sure the just written data is the next to be consumed . */
private void releaseBlocksAndResetBarriers ( ) throws IOException { } } | LOG . debug ( "{}: End of stream alignment, feeding buffered data back." , inputGate . getOwningTaskName ( ) ) ; for ( int i = 0 ; i < blockedChannels . length ; i ++ ) { blockedChannels [ i ] = false ; } if ( currentBuffered == null ) { // common case : no more buffered data
currentBuffered = bufferBlocker . rollOverReusingResources ( ) ; if ( currentBuffered != null ) { currentBuffered . open ( ) ; } } else { // uncommon case : buffered data pending
// push back the pending data , if we have any
LOG . debug ( "{}: Checkpoint skipped via buffered data:" + "Pushing back current alignment buffers and feeding back new alignment data first." , inputGate . getOwningTaskName ( ) ) ; // since we did not fully drain the previous sequence , we need to allocate a new buffer for this one
BufferOrEventSequence bufferedNow = bufferBlocker . rollOverWithoutReusingResources ( ) ; if ( bufferedNow != null ) { bufferedNow . open ( ) ; queuedBuffered . addFirst ( currentBuffered ) ; numQueuedBytes += currentBuffered . size ( ) ; currentBuffered = bufferedNow ; } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "{}: Size of buffered data: {} bytes" , inputGate . getOwningTaskName ( ) , currentBuffered == null ? 0L : currentBuffered . size ( ) ) ; } // the next barrier that comes must assume it is the first
numBarriersReceived = 0 ; if ( startOfAlignmentTimestamp > 0 ) { latestAlignmentDurationNanos = System . nanoTime ( ) - startOfAlignmentTimestamp ; startOfAlignmentTimestamp = 0 ; } |
public class ConnecClient { /** * Return the url to the instance endpoint
* @ param entity
* name
* @ param customer
* group id
* @ param entity
* id
* @ return instance url */
public String getInstanceUrl ( String entityName , String groupId , String id ) { } } | return connec . getHost ( ) + getInstanceEndpoint ( entityName , groupId , id ) ; |
public class MediaPanel { /** * Add dirty regions for all our obscurers . */
protected void addObscurerDirtyRegions ( boolean changedOnly ) { } } | if ( _obscurerList != null ) { for ( Obscurer obscurer : _obscurerList ) { Rectangle obscured = obscurer . getObscured ( changedOnly ) ; if ( obscured != null ) { Point pt = new Point ( obscured . x , obscured . y ) ; SwingUtilities . convertPointFromScreen ( pt , this ) ; addObscurerDirtyRegion ( new Rectangle ( pt . x , pt . y , obscured . width , obscured . height ) ) ; } } } |
public class StnemucodResource { /** * should read http : / / www . pixeldonor . com / 2013 / oct / 12 / concurrent - zip - compression - java - nio / */
@ GET @ javax . ws . rs . Path ( "/zip/{path : .*}" ) @ Produces ( "application/zip" ) public Response getDocumentsAsZip ( @ DefaultValue ( "" ) @ PathParam ( "path" ) final String pathAsString , @ Context final UriInfo uriInfo ) throws IOException { } } | final StreamingOutput streamingOutput = new StreamingOutput ( ) { @ Override public void write ( OutputStream output ) throws IOException , WebApplicationException { try { final List < Path > list = documentHelper . list ( pathAsString . length ( ) == 0 ? get ( "/" ) : get ( pathAsString ) ) ; try ( ZipOutputStream zipOutputStream = new ZipOutputStream ( new BufferedOutputStream ( output ) ) ) { zipOutputStream . setLevel ( 9 ) ; for ( Path path : list ) { Path relativized = documentHelper . relativize ( path ) ; zipOutputStream . putNextEntry ( new ZipEntry ( relativized . toString ( ) ) ) ; Serioulizer . Stream stream = documentHelper . getStream ( relativized ) ; if ( stream != null ) { pipe ( stream . getInputStream ( ) , zipOutputStream ) ; } zipOutputStream . closeEntry ( ) ; } } } catch ( Exception e ) { throw new WebApplicationException ( e ) ; } } } ; String filenameWithoutExtension = Joiner . on ( '-' ) . skipNulls ( ) . join ( uriInfo . getBaseUri ( ) . getHost ( ) , "stnemucod" , Strings . emptyToNull ( pathAsString . replace ( '/' , ' ' ) . replace ( '\\' , ' ' ) . trim ( ) . replace ( ' ' , '_' ) ) ) ; return ok ( streamingOutput ) . type ( "application/zip" ) . header ( "Content-Disposition" , "attachment; filename=\"" + filenameWithoutExtension + ".zip\"" ) . build ( ) ; |
public class Jzvd { /** * / / 重力感应的时候调用的函数 , 、 、 这里有重力感应的参数 , 暂时不能删除 */
public void autoFullscreen ( float x ) { } } | if ( CURRENT_JZVD != null && ( currentState == CURRENT_STATE_PLAYING || currentState == CURRENT_STATE_PAUSE ) && currentScreen != SCREEN_WINDOW_FULLSCREEN && currentScreen != SCREEN_WINDOW_TINY ) { if ( x > 0 ) { JZUtils . setRequestedOrientation ( getContext ( ) , ActivityInfo . SCREEN_ORIENTATION_LANDSCAPE ) ; } else { JZUtils . setRequestedOrientation ( getContext ( ) , ActivityInfo . SCREEN_ORIENTATION_REVERSE_LANDSCAPE ) ; } gotoScreenFullscreen ( ) ; } |
public class BasePanel { /** * Set the default button for this basepanel .
* @ param The button to default to on return . */
public void setDefaultButton ( SBaseButton button ) { } } | this . getScreenFieldView ( ) . setDefaultButton ( button == null ? null : button . getScreenFieldView ( ) ) ; |
public class DevicesManagementApi { /** * Returns the all the tasks for a device type . ( asynchronously )
* Returns the all the tasks for a device type .
* @ param dtid Device Type ID . ( required )
* @ param count Max results count . ( optional )
* @ param offset Result starting offset . ( optional )
* @ param status Status filter . Comma - separated statuses . ( optional )
* @ param order Sort results by a field . Valid fields : createdOn . ( optional )
* @ param sort Sort order . Valid values : asc or desc . ( optional )
* @ param callback The callback to be executed when the API call finishes
* @ return The request call
* @ throws ApiException If fail to process the API call , e . g . serializing the request body object */
public com . squareup . okhttp . Call getTasksAsync ( String dtid , Integer count , Integer offset , String status , String order , String sort , final ApiCallback < TaskListEnvelope > callback ) throws ApiException { } } | ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = getTasksValidateBeforeCall ( dtid , count , offset , status , order , sort , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < TaskListEnvelope > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ; |
public class BatchedTimeoutManager { /** * Stop the BatchedTimeoutManager */
public void stopTimer ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "stopTimer" ) ; btmLockManager . lockExclusive ( ) ; try { // only stop if we are currently started
if ( ! isStopped ) { // set stopped to true
isStopped = true ; // iterate over the entries in the active list
LinkedListEntry entry = ( LinkedListEntry ) activeEntries . getFirst ( ) ; while ( entry != null && activeEntries . contains ( entry ) ) { // cancel the alarm for each one
if ( entry . alarm != null ) { entry . alarm . cancel ( ) ; entry . alarm = null ; } entry = ( LinkedListEntry ) entry . getNext ( ) ; } } } finally { btmLockManager . unlockExclusive ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "stopTimer" ) ; |
public class SassRubyGenerator { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . generator . AbstractCachedGenerator #
* isCacheValid ( ) */
@ Override protected boolean isCacheValid ( ) { } } | String cachedUrlMode = cacheProperties . getProperty ( JawrConstant . SASS_GENERATOR_URL_MODE ) ; return super . isCacheValid ( ) && StringUtils . equals ( cachedUrlMode , config . getProperty ( JawrConstant . SASS_GENERATOR_URL_MODE , SASS_GENERATOR_DEFAULT_URL_MODE ) ) ; |
public class CmsSearchReplaceThread { /** * Search and replace function for properties . < p >
* @ param resources to be considered */
private void searchProperties ( List < CmsResource > resources ) { } } | if ( CmsSourceSearchForm . REGEX_ALL . equals ( m_settings . getSearchpattern ( ) ) ) { for ( CmsResource resource : resources ) { m_matchedResources . add ( resource ) ; getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SOURCESEARCH_MATCHED_0 ) , I_CmsReport . FORMAT_OK ) ; } } else { for ( CmsResource resource : resources ) { Matcher matcher ; try { CmsProperty prop = getCms ( ) . readPropertyObject ( resource , m_settings . getProperty ( ) . getName ( ) , false ) ; matcher = Pattern . compile ( m_settings . getSearchpattern ( ) ) . matcher ( prop . getValue ( ) ) ; if ( matcher . find ( ) ) { m_matchedResources . add ( resource ) ; getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SOURCESEARCH_MATCHED_0 ) , I_CmsReport . FORMAT_OK ) ; } else { getReport ( ) . println ( Messages . get ( ) . container ( Messages . RPT_SOURCESEARCH_NOT_MATCHED_0 ) , I_CmsReport . FORMAT_NOTE ) ; } } catch ( CmsException e ) { LOG . error ( "Ubable to read property" , e ) ; } } } if ( m_replace ) { replaceProperties ( m_matchedResources ) ; } // report results
reportResults ( resources . size ( ) ) ; |
public class FileChunker { /** * This method loops the arg baseDir and pushes the found content to the
* arg destSpace .
* @ param baseDir of content to push to DataStore
* @ param destSpaceId of content destination */
protected void addContentFrom ( File baseDir , String destSpaceId ) { } } | Collection < File > files = listFiles ( baseDir , options . getFileFilter ( ) , options . getDirFilter ( ) ) ; for ( File file : files ) { try { doAddContent ( baseDir , destSpaceId , file ) ; } catch ( Exception e ) { StringBuilder sb = new StringBuilder ( "Error: " ) ; sb . append ( "Unable to addContentFrom [" ) ; sb . append ( baseDir ) ; sb . append ( ", " ) ; sb . append ( destSpaceId ) ; sb . append ( "] : " ) ; sb . append ( e . getMessage ( ) ) ; sb . append ( "\n" ) ; sb . append ( ExceptionUtil . getStackTraceAsString ( e ) ) ; log . error ( sb . toString ( ) ) ; } } |
public class Args { /** * Produces the flag form of a path object , using { @ link Path # toAbsolutePath ( ) } as a string .
* @ return { @ code [ - - name , " / absolute / path " ] } or { @ code [ ] } if path is null or not set . */
static List < String > path ( String name , @ Nullable Path path ) { } } | if ( path != null && ! path . toString ( ) . isEmpty ( ) ) { return Arrays . asList ( "--" + name , path . toAbsolutePath ( ) . toString ( ) ) ; } return Collections . emptyList ( ) ; |
public class PropertyUpdate { /** * Add a new property with a specific value to the resource .
* @ param property
* The { @ link ElementDescriptor } of the property to add .
* @ param value
* The value of the property . */
public < T > void set ( ElementDescriptor < T > property , T value ) { } } | if ( mSet == null ) { mSet = new HashMap < ElementDescriptor < ? > , Object > ( 16 ) ; } mSet . put ( property , value ) ; if ( mRemove != null ) { mRemove . remove ( property ) ; } |
public class ArrayUtils { /** * < p > Converts an array of primitive booleans to objects . < / p >
* < p > This method returns < code > null < / code > for a < code > null < / code > input array . < / p >
* @ param array a < code > boolean < / code > array
* @ return a < code > Boolean < / code > array , < code > null < / code > if null array input */
public static Boolean [ ] toObject ( boolean [ ] array ) { } } | if ( array == null ) { return null ; } else if ( array . length == 0 ) { return EMPTY_BOOLEAN_OBJECT_ARRAY ; } final Boolean [ ] result = new Boolean [ array . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { result [ i ] = ( array [ i ] ? Boolean . TRUE : Boolean . FALSE ) ; } return result ; |
public class PersistableItemsContainer { /** * add a new element , throw a RuntimeException if the element already exists
* @ param element
* @ return the added element */
public T addElement ( T element ) { } } | // make sure that elements are initialized
getElements ( ) ; if ( ! containsElement ( this . elements , element ) ) { this . elements . add ( element ) ; element . addChangeListener ( this . elementChangeListener ) ; element . notifyState ( ) ; return element ; } throw new RuntimeException ( element . toString ( ) + " already exists" ) ; |
public class LanguageProfileReader { /** * Load profiles from the classpath in a specific directory .
* < p > This is usually used to load built - in profiles , shipped with the jar . < / p >
* @ param classLoader the ClassLoader to load the profiles from . Use { @ code MyClass . class . getClassLoader ( ) }
* @ param profileDirectory profile directory path inside the classpath . The default profiles are in " languages " .
* @ param profileFileNames for example [ " en " , " fr " , " de " ] . */
public List < LanguageProfile > read ( ClassLoader classLoader , String profileDirectory , Collection < String > profileFileNames ) throws IOException { } } | List < LanguageProfile > loaded = new ArrayList < > ( profileFileNames . size ( ) ) ; for ( String profileFileName : profileFileNames ) { String path = makePathForClassLoader ( profileDirectory , profileFileName ) ; try ( InputStream in = classLoader . getResourceAsStream ( path ) ) { if ( in == null ) { throw new IOException ( "No language file available named " + profileFileName + " at " + path + "!" ) ; } loaded . add ( read ( in ) ) ; } } return loaded ; |
public class CMinMTTR { /** * Try to place the VMs associated on the actions in a random node while trying first to stay on the current node */
private void placeVMs ( Parameters ps , List < AbstractStrategy < ? > > strategies , List < VMTransition > actions , OnStableNodeFirst schedHeuristic , Map < IntVar , VM > map ) { } } | IntValueSelector rnd = new WorstFit ( map , rp , new BiggestDimension ( ) ) ; if ( ! useResources ) { rnd = new RandomVMPlacement ( rp , map , true , ps . getRandomSeed ( ) ) ; } IntVar [ ] hosts = dSlices ( actions ) . map ( Slice :: getHoster ) . filter ( v -> ! v . isInstantiated ( ) ) . toArray ( IntVar [ ] :: new ) ; if ( hosts . length > 0 ) { strategies . add ( new IntStrategy ( hosts , new HostingVariableSelector ( rp . getModel ( ) , schedHeuristic ) , rnd ) ) ; } |
public class AbstractFFmpegStreamBuilder { /** * Builds the arguments
* @ param parent The parent FFmpegBuilder
* @ param pass The particular pass . For one - pass this value will be zero , for multi - pass , it will
* be 1 for the first pass , 2 for the second , and so on .
* @ return The arguments */
protected List < String > build ( FFmpegBuilder parent , int pass ) { } } | checkNotNull ( parent ) ; if ( pass > 0 ) { // TODO Write a test for this :
checkArgument ( format != null , "Format must be specified when using two-pass" ) ; } ImmutableList . Builder < String > args = new ImmutableList . Builder < > ( ) ; addGlobalFlags ( parent , args ) ; if ( video_enabled ) { addVideoFlags ( parent , args ) ; } else { args . add ( "-vn" ) ; } if ( audio_enabled && pass != 1 ) { addAudioFlags ( args ) ; } else { args . add ( "-an" ) ; } if ( subtitle_enabled ) { if ( ! Strings . isNullOrEmpty ( subtitle_preset ) ) { args . add ( "-spre" , subtitle_preset ) ; } } else { args . add ( "-sn" ) ; } args . addAll ( extra_args ) ; if ( filename != null && uri != null ) { throw new IllegalStateException ( "Only one of filename and uri can be set" ) ; } // Output
if ( pass == 1 ) { args . add ( DEVNULL ) ; } else if ( filename != null ) { args . add ( filename ) ; } else if ( uri != null ) { args . add ( uri . toString ( ) ) ; } else { assert ( false ) ; } return args . build ( ) ; |
public class Form { /** * Returns the parameters containing all name value params beside submit button , image button , unchecked radio
* buttons and checkboxes and without Upload information . */
private Parameters composeParameters ( SubmitButton button , SubmitImage image , int x , int y ) { } } | Parameters params = new Parameters ( ) ; for ( FormElement element : this ) { String name = element . getName ( ) ; String value = element . getValue ( ) ; if ( element instanceof Checkable ) { if ( ( ( Checkable ) element ) . isChecked ( ) ) params . add ( name , value != null ? value : "" ) ; } else if ( element instanceof Select ) { Select select = ( Select ) element ; for ( Select . Option option : select . getOptions ( ) ) if ( option . isSelected ( ) ) params . add ( select . getName ( ) , option . getValue ( ) != null ? option . getValue ( ) : "" ) ; } else if ( ! ( element instanceof SubmitButton ) && ! ( element instanceof SubmitImage ) && ! ( element instanceof Upload ) ) { if ( name != null ) params . add ( name , value != null ? value : "" ) ; } } if ( button != null && button . getName ( ) != null ) params . add ( button . getName ( ) , button . getValue ( ) != null ? button . getValue ( ) : "" ) ; if ( image != null ) { if ( image . getValue ( ) != null && image . getValue ( ) . length ( ) > 1 ) params . add ( image . getName ( ) , image . getValue ( ) ) ; params . add ( image . getName ( ) + ".x" , "" + x ) ; params . add ( image . getName ( ) + ".y" , "" + y ) ; } return params ; |
public class RequestLaunchTemplateData { /** * One or more network interfaces .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setNetworkInterfaces ( java . util . Collection ) } or { @ link # withNetworkInterfaces ( java . util . Collection ) } if
* you want to override the existing values .
* @ param networkInterfaces
* One or more network interfaces .
* @ return Returns a reference to this object so that method calls can be chained together . */
public RequestLaunchTemplateData withNetworkInterfaces ( LaunchTemplateInstanceNetworkInterfaceSpecificationRequest ... networkInterfaces ) { } } | if ( this . networkInterfaces == null ) { setNetworkInterfaces ( new com . amazonaws . internal . SdkInternalList < LaunchTemplateInstanceNetworkInterfaceSpecificationRequest > ( networkInterfaces . length ) ) ; } for ( LaunchTemplateInstanceNetworkInterfaceSpecificationRequest ele : networkInterfaces ) { this . networkInterfaces . add ( ele ) ; } return this ; |
public class FSElsewhere { @ Override public long getTotalSpace ( ) { } } | try { return Filess . getFileStore ( root ) . getTotalSpace ( ) ; } catch ( IOException e ) { throw u ( e ) ; } |
public class NumberUtil { /** * Return the string array that has been split using { @ link # RANGE _ PATTERN }
* @ param str string to split
* @ return string array that has been split using { @ link # RANGE _ PATTERN } */
private static String [ ] splitRange ( final String str ) { } } | if ( StringUtils . isBlank ( str ) ) { return new String [ ] { "" } ; } return str . split ( RANGE_PATTERN ) ; |
public class ManagementChannelHandler { /** * { @ inheritDoc } */
@ Override public < T , A > AsyncFuture < T > executeRequest ( final Integer operationId , final ManagementRequest < T , A > request ) throws IOException { } } | final ActiveOperation < T , A > operation = super . getActiveOperation ( operationId ) ; if ( operation == null ) { throw ProtocolLogger . ROOT_LOGGER . responseHandlerNotFound ( operationId ) ; } return executeRequest ( operation , request ) ; |
public class GosuFragment { /** * - - - - - Private helper methods */
private FragmentInstance createNewInstance ( ) { } } | try { return ( FragmentInstance ) getBackingClass ( ) . newInstance ( ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; } |
public class RepositoryConfiguration { /** * Obtain the names of the workspaces that were listed as being predefined . This includes the name
* { @ link # getDefaultWorkspaceName ( ) default workspace } .
* @ return the set of predefined ( non - system ) workspace names ; never null */
public Set < String > getPredefinedWorkspaceNames ( ) { } } | Set < String > names = new HashSet < String > ( ) ; Document workspaces = doc . getDocument ( FieldName . WORKSPACES ) ; if ( workspaces != null ) { List < ? > predefined = workspaces . getArray ( FieldName . PREDEFINED ) ; if ( predefined != null ) { for ( Object value : predefined ) { if ( value instanceof String ) names . add ( ( String ) value ) ; } } } names . add ( getDefaultWorkspaceName ( ) ) ; return names ; |
public class CmsContainerpageController { /** * Calls the edit handler to prepare the given content element for editing . < p >
* @ param clientId the element id
* @ param editOption the selected edit option
* @ param callback the callback to execute */
public void prepareForEdit ( final String clientId , final String editOption , final I_CmsSimpleCallback < CmsUUID > callback ) { } } | CmsRpcAction < CmsUUID > action = new CmsRpcAction < CmsUUID > ( ) { @ Override public void execute ( ) { getContainerpageService ( ) . prepareForEdit ( clientId , editOption , getData ( ) . getRpcContext ( ) . getPageStructureId ( ) , getData ( ) . getRequestParams ( ) , this ) ; } @ Override protected void onResponse ( CmsUUID result ) { callback . execute ( result ) ; } } ; action . execute ( ) ; |
public class DefaultSentryClientFactory { /** * Creates an HTTP connection to the Sentry server .
* @ param dsn Data Source Name of the Sentry server .
* @ return an { @ link HttpConnection } to the server . */
protected Connection createHttpConnection ( Dsn dsn ) { } } | URL sentryApiUrl = HttpConnection . getSentryApiUrl ( dsn . getUri ( ) , dsn . getProjectId ( ) ) ; String proxyHost = getProxyHost ( dsn ) ; String proxyUser = getProxyUser ( dsn ) ; String proxyPass = getProxyPass ( dsn ) ; int proxyPort = getProxyPort ( dsn ) ; Proxy proxy = null ; if ( proxyHost != null ) { InetSocketAddress proxyAddr = new InetSocketAddress ( proxyHost , proxyPort ) ; proxy = new Proxy ( Proxy . Type . HTTP , proxyAddr ) ; if ( proxyUser != null && proxyPass != null ) { Authenticator . setDefault ( new ProxyAuthenticator ( proxyUser , proxyPass ) ) ; } } Double sampleRate = getSampleRate ( dsn ) ; EventSampler eventSampler = null ; if ( sampleRate != null ) { eventSampler = new RandomEventSampler ( sampleRate ) ; } HttpConnection httpConnection = new HttpConnection ( sentryApiUrl , dsn . getPublicKey ( ) , dsn . getSecretKey ( ) , proxy , eventSampler ) ; Marshaller marshaller = createMarshaller ( dsn ) ; httpConnection . setMarshaller ( marshaller ) ; int timeout = getTimeout ( dsn ) ; httpConnection . setConnectionTimeout ( timeout ) ; boolean bypassSecurityEnabled = getBypassSecurityEnabled ( dsn ) ; httpConnection . setBypassSecurity ( bypassSecurityEnabled ) ; return httpConnection ; |
public class NomadScheduler { /** * Get the task spec for using the docker driver in Nomad
* In docker mode , Heron will be use in docker containers */
Task getTaskSpecDockerDriver ( Task task , String taskName , int containerIndex ) { } } | String executorBinary = Context . executorBinary ( this . clusterConfig ) ; // get arguments for heron executor command
String [ ] executorArgs = SchedulerUtils . executorCommandArgs ( this . clusterConfig , this . runtimeConfig , NomadConstants . EXECUTOR_PORTS , String . valueOf ( containerIndex ) ) ; // get complete heron executor command
String executorCmd = executorBinary + " " + String . join ( " " , executorArgs ) ; // get heron _ downloader command for downloading topology package
String topologyDownloadCmd = getFetchCommand ( this . clusterConfig , this . clusterConfig , this . runtimeConfig ) ; task . setName ( taskName ) ; // use nomad driver
task . setDriver ( NomadConstants . NomadDriver . DOCKER . getName ( ) ) ; // set docker image to use
task . addConfig ( NomadConstants . NOMAD_IMAGE , NomadContext . getHeronExecutorDockerImage ( this . localConfig ) ) ; task . addConfig ( NomadConstants . NOMAD_TASK_COMMAND , NomadConstants . SHELL_CMD ) ; task . addConfig ( NomadConstants . NETWORK_MODE , NomadContext . getHeronNomadNetworkMode ( this . localConfig ) ) ; String setMetricsPortFileCmd = getSetMetricsPortFileCmd ( ) ; String [ ] args = { "-c" , String . format ( "%s && %s && %s" , topologyDownloadCmd , setMetricsPortFileCmd , executorCmd ) } ; task . addConfig ( NomadConstants . NOMAD_TASK_COMMAND_ARGS , args ) ; Map < String , String > envVars = new HashMap < > ( ) ; envVars . put ( NomadConstants . HOST , "${attr.unique.network.ip-address}" ) ; task . setEnv ( envVars ) ; return task ; |
public class TransferThreadManager { /** * Act as the active side . Connect to the server and
* store the newly connected sockets in the socketPool . */
public void activeConnect ( HostPort hp , int connections ) { } } | for ( int i = 0 ; i < connections ; i ++ ) { SocketBox sbox = new ManagedSocketBox ( ) ; logger . debug ( "adding new empty socketBox to the socket pool" ) ; socketPool . add ( sbox ) ; logger . debug ( "connecting active socket " + i + "; total cached sockets = " + socketPool . count ( ) ) ; Task task = new GridFTPActiveConnectTask ( hp , localControlChannel , sbox , gSession ) ; runTask ( task ) ; } |
public class ReferenceElement { /** * Add a reference to another users bare jid to a stanza .
* @ param stanza stanza .
* @ param begin start index of the mention in the messages body .
* @ param end end index of the mention in the messages body .
* @ param jid referenced jid . */
public static void addMention ( Stanza stanza , int begin , int end , BareJid jid ) { } } | URI uri ; try { uri = new URI ( "xmpp:" + jid . toString ( ) ) ; } catch ( URISyntaxException e ) { throw new AssertionError ( "Cannot create URI from bareJid." ) ; } ReferenceElement reference = new ReferenceElement ( begin , end , ReferenceElement . Type . mention , null , uri ) ; stanza . addExtension ( reference ) ; |
public class ResourceHandle { /** * property access */
public < T > T getProperty ( String key , T defaultValue ) { } } | Class < T > type = PropertyUtil . getType ( defaultValue ) ; T value = getProperty ( key , type ) ; return value != null ? value : defaultValue ; |
public class TemplateList { /** * Given a target element , find the template that best
* matches in the given XSL document , according
* to the rules specified in the xsl draft .
* @ param xctxt
* @ param targetNode
* @ param mode A string indicating the display mode .
* @ param quietConflictWarnings
* @ return Rule that best matches targetElem .
* @ throws XSLProcessorException thrown if the active ProblemListener and XPathContext decide
* the error condition is severe enough to halt processing .
* @ throws TransformerException */
public ElemTemplate getTemplate ( XPathContext xctxt , int targetNode , QName mode , boolean quietConflictWarnings , DTM dtm ) throws TransformerException { } } | TemplateSubPatternAssociation head = getHead ( xctxt , targetNode , dtm ) ; if ( null != head ) { // XSLT functions , such as xsl : key , need to be able to get to
// current ElemTemplateElement via a cast to the prefix resolver .
// Setting this fixes bug idkey03.
xctxt . pushNamespaceContextNull ( ) ; xctxt . pushCurrentNodeAndExpression ( targetNode , targetNode ) ; try { do { ElemTemplate template = head . getTemplate ( ) ; xctxt . setNamespaceContext ( template ) ; if ( ( head . m_stepPattern . execute ( xctxt , targetNode ) != NodeTest . SCORE_NONE ) && head . matchMode ( mode ) ) { if ( quietConflictWarnings ) checkConflicts ( head , xctxt , targetNode , mode ) ; return template ; } } while ( null != ( head = head . getNext ( ) ) ) ; } finally { xctxt . popCurrentNodeAndExpression ( ) ; xctxt . popNamespaceContext ( ) ; } } return null ; |
public class ConfigurationsInner { /** * Updates a configuration of a server .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param configurationName The name of the server configuration .
* @ param parameters The required parameters for updating a server configuration .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ConfigurationInner object */
public Observable < ConfigurationInner > beginCreateOrUpdateAsync ( String resourceGroupName , String serverName , String configurationName , ConfigurationInner parameters ) { } } | return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , configurationName , parameters ) . map ( new Func1 < ServiceResponse < ConfigurationInner > , ConfigurationInner > ( ) { @ Override public ConfigurationInner call ( ServiceResponse < ConfigurationInner > response ) { return response . body ( ) ; } } ) ; |
public class Filter { /** * Splits the filter from its disjunctive normal form . Or ' ng the filters
* together produces the full disjunctive normal form .
* @ return unmodifiable list of sub filters which don ' t perform any ' or '
* operations
* @ since 1.1.1 */
public List < Filter < S > > disjunctiveNormalFormSplit ( ) { } } | final List < Filter < S > > list = new ArrayList < Filter < S > > ( ) ; disjunctiveNormalForm ( ) . accept ( new Visitor < S , Object , Object > ( ) { @ Override public Object visit ( AndFilter < S > filter , Object param ) { list . add ( filter ) ; return null ; } @ Override public Object visit ( PropertyFilter < S > filter , Object param ) { list . add ( filter ) ; return null ; } @ Override public Object visit ( ExistsFilter < S > filter , Object param ) { list . add ( filter ) ; return null ; } } , null ) ; return Collections . unmodifiableList ( list ) ; |
public class CommerceOrderPaymentLocalServiceBaseImpl { /** * Creates a new commerce order payment with the primary key . Does not add the commerce order payment to the database .
* @ param commerceOrderPaymentId the primary key for the new commerce order payment
* @ return the new commerce order payment */
@ Override @ Transactional ( enabled = false ) public CommerceOrderPayment createCommerceOrderPayment ( long commerceOrderPaymentId ) { } } | return commerceOrderPaymentPersistence . create ( commerceOrderPaymentId ) ; |
public class PageFlowContext { /** * Factory method that will return the < code > PageFlowContext < / code > object to the caller .
* @ return A < code > PageFlowContext < / code > that is associated with the tread . */
public static PageFlowContext getContext ( ) { } } | PageFlowContext ctxt = ( PageFlowContext ) pageflowContext . get ( ) ; if ( ctxt == null ) { ctxt = new PageFlowContext ( ) ; pageflowContext . set ( ctxt ) ; } return ctxt ; |
public class BuildStepCompatibilityLayer { /** * { @ inheritDoc }
* @ return Delegates to { @ link SimpleBuildStep # perform ( Run , FilePath , Launcher , TaskListener ) } if possible , always returning true or throwing an error . */
@ Override public boolean perform ( AbstractBuild < ? , ? > build , Launcher launcher , BuildListener listener ) throws InterruptedException , IOException { } } | if ( this instanceof SimpleBuildStep ) { // delegate to the overloaded version defined in SimpleBuildStep
FilePath workspace = build . getWorkspace ( ) ; if ( workspace == null ) { throw new AbortException ( "no workspace for " + build ) ; } ( ( SimpleBuildStep ) this ) . perform ( build , workspace , launcher , listener ) ; return true ; } else if ( build instanceof Build ) { // delegate to the legacy signature deprecated in 1.312
return perform ( ( Build ) build , launcher , listener ) ; } else { return true ; } |
public class SubnetworkId { /** * Returns a subnetwork identity given the region identity and the subnetwork name . The subnetwork
* name must be 1-63 characters long and comply with RFC1035 . Specifically , the name must match
* the regular expression { @ code [ a - z ] ( [ - a - z0-9 ] * [ a - z0-9 ] ) ? } which means the first character must
* be a lowercase letter , and all following characters must be a dash , lowercase letter , or digit ,
* except the last character , which cannot be a dash .
* @ see < a href = " https : / / www . ietf . org / rfc / rfc1035 . txt " > RFC1035 < / a > */
public static SubnetworkId of ( RegionId regionId , String subnetwork ) { } } | return new SubnetworkId ( regionId . getProject ( ) , regionId . getRegion ( ) , subnetwork ) ; |
public class DCacheBase { /** * This is a helper method to initialize event source for invalidation listener */
private boolean initEventSource ( ) { } } | boolean success = false ; try { eventSource = ServerCache . cacheUnit . createEventSource ( cacheConfig . useListenerContext , cacheName ) ; } catch ( Exception ex ) { com . ibm . ws . ffdc . FFDCFilter . processException ( ex , "com.ibm.ws.cache.Cache.initEventSource" , "3289" , this ) ; } if ( eventSource != null ) { success = true ; } return success ; |
public class DefaultElementProducer { /** * leaves object creation to the first styler in the list
* @ param < E >
* @ param stylers
* @ param data
* @ param clazz
* @ return
* @ throws VectorPrintException */
public < E extends Element > E createElementByStyler ( Collection < ? extends BaseStyler > stylers , Object data , Class < E > clazz ) throws VectorPrintException { } } | // pdfptable , Section and others do not have a default constructor , a styler creates it
E e = null ; return styleHelper . style ( e , data , stylers ) ; |
public class SnowflakeFileTransferAgent { /** * A callable that can be executed in a separate thread using exeuctor service .
* The callable does compression if needed and upload the result to the
* table ' s staging area .
* @ param stage information about the stage
* @ param srcFilePath source file path
* @ param metadata file metadata
* @ param client client object used to communicate with c3
* @ param connection connection object
* @ param command command string
* @ param inputStream null if upload source is file
* @ param sourceFromStream whether upload source is file or stream
* @ param parallel number of threads for parallel uploading
* @ param srcFile source file name
* @ param encMat not null if encryption is required
* @ return a callable that uploading file to the remote store */
public static Callable < Void > getUploadFileCallable ( final StageInfo stage , final String srcFilePath , final FileMetadata metadata , final SnowflakeStorageClient client , final SFSession connection , final String command , final InputStream inputStream , final boolean sourceFromStream , final int parallel , final File srcFile , final RemoteStoreFileEncryptionMaterial encMat ) { } } | return new Callable < Void > ( ) { public Void call ( ) throws Exception { logger . debug ( "Entering getUploadFileCallable..." ) ; InputStream uploadStream = inputStream ; File fileToUpload = null ; if ( uploadStream == null ) { try { uploadStream = new FileInputStream ( srcFilePath ) ; } catch ( FileNotFoundException ex ) { metadata . resultStatus = ResultStatus . ERROR ; metadata . errorDetails = ex . getMessage ( ) ; throw ex ; } } // this shouldn ' t happen
if ( metadata == null ) { throw new SnowflakeSQLException ( SqlState . INTERNAL_ERROR , ErrorCode . INTERNAL_ERROR . getMessageCode ( ) , "missing file metadata for: " + srcFilePath ) ; } String destFileName = metadata . destFileName ; long uploadSize ; String digest = null ; logger . debug ( "Dest file name={}" ) ; // Temp file that needs to be cleaned up when upload was successful
FileBackedOutputStream fileBackedOutputStream = null ; // SNOW - 16082 : we should catpure exception if we fail to compress or
// calcuate digest .
try { if ( metadata . requireCompress ) { InputStreamWithMetadata compressedSizeAndStream = ( encMat == null ? compressStreamWithGZIPNoDigest ( uploadStream ) : compressStreamWithGZIP ( uploadStream ) ) ; fileBackedOutputStream = compressedSizeAndStream . fileBackedOutputStream ; // update the size
uploadSize = compressedSizeAndStream . size ; digest = compressedSizeAndStream . digest ; if ( compressedSizeAndStream . fileBackedOutputStream . getFile ( ) != null ) { fileToUpload = compressedSizeAndStream . fileBackedOutputStream . getFile ( ) ; } logger . debug ( "New size after compression: {}" , uploadSize ) ; } else if ( stage . getStageType ( ) != StageInfo . StageType . LOCAL_FS ) { // If it ' s not local _ fs , we store our digest in the metadata
// In local _ fs , we don ' t need digest , and if we turn it on , we will consume whole uploadStream , which local _ fs uses .
InputStreamWithMetadata result = computeDigest ( uploadStream , sourceFromStream ) ; digest = result . digest ; fileBackedOutputStream = result . fileBackedOutputStream ; uploadSize = result . size ; if ( ! sourceFromStream ) { fileToUpload = srcFile ; } else if ( result . fileBackedOutputStream . getFile ( ) != null ) { fileToUpload = result . fileBackedOutputStream . getFile ( ) ; } } else { if ( ! sourceFromStream && ( srcFile != null ) ) { fileToUpload = srcFile ; } // if stage is local _ fs and upload source is stream , upload size
// does not matter since 1 ) transfer did not require size 2 ) no
// output from uploadStream api is required
uploadSize = sourceFromStream ? 0 : srcFile . length ( ) ; } logger . debug ( "Started copying file from: {} to {}:{} destName: {} " + "auto compressed? {} size={}" , srcFilePath , stage . getStageType ( ) . name ( ) , stage . getLocation ( ) , destFileName , ( metadata . requireCompress ? "yes" : "no" ) , uploadSize ) ; // Simulated failure code .
if ( connection . getInjectFileUploadFailure ( ) != null && srcFilePath . endsWith ( ( connection ) . getInjectFileUploadFailure ( ) ) ) { throw new SnowflakeSimulatedUploadFailure ( srcFile != null ? srcFile . getName ( ) : "Unknown" ) ; } // upload it
switch ( stage . getStageType ( ) ) { case LOCAL_FS : pushFileToLocal ( stage . getLocation ( ) , srcFilePath , destFileName , uploadStream , fileBackedOutputStream ) ; break ; case S3 : case AZURE : pushFileToRemoteStore ( stage , destFileName , uploadStream , fileBackedOutputStream , uploadSize , digest , metadata . destCompressionType , client , connection , command , parallel , fileToUpload , ( fileToUpload == null ) , encMat ) ; metadata . isEncrypted = encMat != null ; break ; } } catch ( SnowflakeSimulatedUploadFailure ex ) { // This code path is used for Simulated failure code in tests .
// Never happen in production
metadata . resultStatus = ResultStatus . ERROR ; metadata . errorDetails = ex . getMessage ( ) ; throw ex ; } catch ( Throwable ex ) { logger . error ( "Exception encountered during file upload" , ex ) ; metadata . resultStatus = ResultStatus . ERROR ; metadata . errorDetails = ex . getMessage ( ) ; throw ex ; } finally { if ( fileBackedOutputStream != null ) { try { fileBackedOutputStream . reset ( ) ; } catch ( IOException ex ) { logger . debug ( "failed to clean up temp file: {}" , ex ) ; } } if ( inputStream == null ) { IOUtils . closeQuietly ( uploadStream ) ; } } logger . debug ( "filePath: {}" , srcFilePath ) ; // set dest size
metadata . destFileSize = uploadSize ; // mark the file as being uploaded
metadata . resultStatus = ResultStatus . UPLOADED ; return null ; } } ; |
public class SnomedCoderService { /** * Return some docs about how to call this webservice */
@ GET @ Produces ( MediaType . TEXT_PLAIN ) public InputStream doc ( ) throws IOException { } } | // noinspection ConstantConditions
return getClass ( ) . getClassLoader ( ) . getResource ( "SnomedCoderService_help.txt" ) . openStream ( ) ; |
public class BridgeMethodResolver { /** * If the supplied { @ link Class } has a declared { @ link Method } whose signature matches
* that of the supplied { @ link Method } , then this matching { @ link Method } is returned ,
* otherwise { @ code null } is returned . */
private static Method searchForMatch ( Class < ? > type , Method bridgeMethod ) { } } | return ReflectionUtils . findMethod ( type , bridgeMethod . getName ( ) , bridgeMethod . getParameterTypes ( ) ) ; |
public class DrizzlePreparedStatement { /** * Executes the SQL statement in this < code > PreparedStatement < / code > object , which must be an SQL Data Manipulation
* Language ( DML ) statement , such as < code > INSERT < / code > , < code > UPDATE < / code > or < code > DELETE < / code > ; or an SQL
* statement that returns nothing , such as a DDL statement .
* @ return either ( 1 ) the row count for SQL Data Manipulation Language ( DML ) statements or ( 2 ) 0 for SQL statements
* that return nothing
* @ throws java . sql . SQLException if a database access error occurs ; this method is called on a closed
* < code > PreparedStatement < / code > or the SQL statement returns a
* < code > ResultSet < / code > object */
public int executeUpdate ( ) throws SQLException { } } | startTimer ( ) ; try { setQueryResult ( getProtocol ( ) . executeQuery ( dQuery ) ) ; dQuery . clearParameters ( ) ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; } finally { stopTimer ( ) ; } if ( getQueryResult ( ) . getResultSetType ( ) != ResultSetType . MODIFY ) { throw SQLExceptionMapper . getSQLException ( "The query returned a result set" ) ; } return ( int ) ( ( ModifyQueryResult ) getQueryResult ( ) ) . getUpdateCount ( ) ; |
public class ImgUtil { /** * 将图片对象转换为Base64形式
* @ param image 图片对象
* @ param imageType 图片类型
* @ return Base64的字符串表现形式
* @ since 4.1.8 */
public static String toBase64 ( Image image , String imageType ) { } } | final ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; write ( image , imageType , out ) ; return Base64 . encode ( out . toByteArray ( ) ) ; |
public class JDBCStorageConnection { /** * The method < code > traverseQPath < / code > implemented thanks to simple queries . It allows
* to use Simple Queries instead of Complex Queries when complex queries are much slower such
* as with HSQLDB for example . */
protected QPath traverseQPathSQ ( String cpid ) throws SQLException , InvalidItemStateException , IllegalNameException { } } | // get item by Identifier usecase
List < QPathEntry > qrpath = new ArrayList < QPathEntry > ( ) ; // reverted path
String caid = cpid ; // container ancestor id
do { ResultSet parent = findItemByIdentifier ( caid ) ; try { if ( ! parent . next ( ) ) { throw new InvalidItemStateException ( "Parent not found, uuid: " + getIdentifier ( caid ) ) ; } QPathEntry qpe = new QPathEntry ( InternalQName . parse ( parent . getString ( COLUMN_NAME ) ) , parent . getInt ( COLUMN_INDEX ) , getIdentifier ( caid ) ) ; qrpath . add ( qpe ) ; caid = parent . getString ( COLUMN_PARENTID ) ; if ( caid . equals ( parent . getString ( COLUMN_ID ) ) ) { throw new InvalidItemStateException ( "An item with id='" + getIdentifier ( caid ) + "' is its own parent" ) ; } } finally { try { parent . close ( ) ; } catch ( SQLException e ) { LOG . error ( "Can't close the ResultSet: " + e . getMessage ( ) ) ; } } } while ( ! caid . equals ( Constants . ROOT_PARENT_UUID ) ) ; QPathEntry [ ] qentries = new QPathEntry [ qrpath . size ( ) ] ; int qi = 0 ; for ( int i = qrpath . size ( ) - 1 ; i >= 0 ; i -- ) { qentries [ qi ++ ] = qrpath . get ( i ) ; } return new QPath ( qentries ) ; |
public class FetchConfFile { /** * 进行下载 */
@ Override public < T > T call ( ) throws Exception { } } | // 删除临时文件
// LOGGER . info ( " start to remove tmp download file : " + " "
// + localTmpFile . getAbsolutePath ( ) ) ;
if ( localTmpFile . exists ( ) ) { localTmpFile . delete ( ) ; } // start tp download
LOGGER . debug ( "start to download. From: " + remoteUrl + " , TO: " + localTmpFile . getAbsolutePath ( ) ) ; // 下载
FileUtils . copyURLToFile ( remoteUrl , localTmpFile ) ; // check
if ( ! OsUtil . isFileExist ( localTmpFile . getAbsolutePath ( ) ) ) { throw new Exception ( "download is ok, but cannot find downloaded file." + localTmpFile ) ; } // download success
LOGGER . debug ( "download success! " + localTmpFile . getAbsolutePath ( ) ) ; return null ; |
public class AWSStorageGatewayClient { /** * Updates a gateway ' s metadata , which includes the gateway ' s name and time zone . To specify which gateway to
* update , use the Amazon Resource Name ( ARN ) of the gateway in your request .
* < note >
* For Gateways activated after September 2 , 2015 , the gateway ' s ARN contains the gateway ID rather than the gateway
* name . However , changing the name of the gateway has no effect on the gateway ' s ARN .
* < / note >
* @ param updateGatewayInformationRequest
* @ return Result of the UpdateGatewayInformation operation returned by the service .
* @ throws InvalidGatewayRequestException
* An exception occurred because an invalid gateway request was issued to the service . For more information ,
* see the error and message fields .
* @ throws InternalServerErrorException
* An internal server error has occurred during the request . For more information , see the error and message
* fields .
* @ sample AWSStorageGateway . UpdateGatewayInformation
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / UpdateGatewayInformation "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public UpdateGatewayInformationResult updateGatewayInformation ( UpdateGatewayInformationRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateGatewayInformation ( request ) ; |
public class RingbufferContainer { /** * Reads the item at the specified sequence or loads it from the ringbuffer
* store if one is enabled . The type of the returned object is equal to the
* ringbuffer format . */
private Object readOrLoadItem ( long sequence ) { } } | Object item ; if ( sequence < ringbuffer . headSequence ( ) && store . isEnabled ( ) ) { item = store . load ( sequence ) ; } else { item = ringbuffer . read ( sequence ) ; } return item ; |
public class IpcLogEntry { /** * Set the server ASG for the request . In the case of the client side this will be
* automatically filled in if the { @ link NetflixHeader # ASG } is specified on the server
* response . The ASG value must follow the
* < a href = " https : / / github . com / Netflix / iep / tree / master / iep - nflxenv # server - group - settings " >
* Frigga server group < / a > naming conventions . */
public IpcLogEntry withServerAsg ( String asg ) { } } | this . serverAsg = asg ; if ( serverApp == null || serverCluster == null ) { ServerGroup group = ServerGroup . parse ( asg ) ; serverApp = ( serverApp == null ) ? group . app ( ) : serverApp ; serverCluster = ( serverCluster == null ) ? group . cluster ( ) : serverCluster ; } return this ; |
public class InstallUtil { /** * Get the last saved Jenkins instance version .
* @ return The last saved Jenkins instance version .
* @ see # saveLastExecVersion ( ) */
public static @ Nonnull String getLastExecVersion ( ) { } } | File lastExecVersionFile = getLastExecVersionFile ( ) ; if ( lastExecVersionFile . exists ( ) ) { try { String version = FileUtils . readFileToString ( lastExecVersionFile ) ; // JENKINS - 37438 blank will force the setup
// wizard regardless of current state of the system
if ( StringUtils . isBlank ( version ) ) { return FORCE_NEW_INSTALL_VERSION . toString ( ) ; } return version ; } catch ( IOException e ) { LOGGER . log ( SEVERE , "Unexpected Error. Unable to read " + lastExecVersionFile . getAbsolutePath ( ) , e ) ; LOGGER . log ( WARNING , "Unable to determine the last running version (see error above). Treating this as a restart. No plugins will be updated." ) ; return getCurrentExecVersion ( ) ; } } else { // Backward compatibility . Use the last version stored in the top level config . xml .
// Going to read the value directly from the config . xml file Vs hoping that the
// Jenkins startup sequence has moved far enough along that it has loaded the
// global config . It can ' t load the global config until well into the startup
// sequence because the unmarshal requires numerous objects to be created e . g .
// it requires the Plugin Manager . It happens too late and it ' s too risky to
// change how it currently works .
File configFile = getConfigFile ( ) ; if ( configFile . exists ( ) ) { try { String lastVersion = XMLUtils . getValue ( "/hudson/version" , configFile ) ; if ( lastVersion . length ( ) > 0 ) { LOGGER . log ( Level . FINE , "discovered serialized lastVersion {0}" , lastVersion ) ; return lastVersion ; } } catch ( Exception e ) { LOGGER . log ( SEVERE , "Unexpected error reading global config.xml" , e ) ; } } return NEW_INSTALL_VERSION . toString ( ) ; } |
public class DeleteConfigRuleRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteConfigRuleRequest deleteConfigRuleRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteConfigRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteConfigRuleRequest . getConfigRuleName ( ) , CONFIGRULENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Processor { /** * The processor parameters .
* @ param parameters
* The processor parameters . */
public void setParameters ( java . util . Collection < ProcessorParameter > parameters ) { } } | if ( parameters == null ) { this . parameters = null ; return ; } this . parameters = new java . util . ArrayList < ProcessorParameter > ( parameters ) ; |
public class LazyDataObject { public Object put ( String key , Object o ) { } } | explode ( ) ; return delegee . put ( key , o ) ; |
public class ViolationEventMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ViolationEvent violationEvent , ProtocolMarshaller protocolMarshaller ) { } } | if ( violationEvent == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( violationEvent . getViolationId ( ) , VIOLATIONID_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getThingName ( ) , THINGNAME_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getSecurityProfileName ( ) , SECURITYPROFILENAME_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getBehavior ( ) , BEHAVIOR_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getMetricValue ( ) , METRICVALUE_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getViolationEventType ( ) , VIOLATIONEVENTTYPE_BINDING ) ; protocolMarshaller . marshall ( violationEvent . getViolationEventTime ( ) , VIOLATIONEVENTTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LineItemSummary { /** * Gets the deliveryIndicator value for this LineItemSummary .
* @ return deliveryIndicator * Indicates how well the line item has been performing . This
* attribute is
* readonly and is populated by Google . This will be
* { @ code null } if the
* delivery indicator information is not available due
* to one of the following
* reasons :
* < ol >
* < li > The line item is not delivering . < / li >
* < li > The line item has an unlimited goal or cap . < / li >
* < li > The line item has a percentage based goal or cap . < / li >
* < / ol > */
public com . google . api . ads . admanager . axis . v201808 . DeliveryIndicator getDeliveryIndicator ( ) { } } | return deliveryIndicator ; |
public class SQLTable { /** * This is the getter method for instance variable { @ link # types } .
* @ return value of instance variable { @ link # types }
* @ see # types
* @ throws CacheReloadException on error */
public Set < Type > getTypes ( ) throws CacheReloadException { } } | final Set < Type > ret = new HashSet < > ( ) ; for ( final Long id : this . types ) { ret . add ( Type . get ( id ) ) ; } return Collections . unmodifiableSet ( ret ) ; |
public class LoaderUtil { /** * Loads a class by name . This method respects the { @ link # IGNORE _ TCCL _ PROPERTY } Log4j property . If this property is
* specified and set to anything besides { @ code false } , then the default ClassLoader will be used .
* @ param className The class name .
* @ return the Class for the given name .
* @ throws ClassNotFoundException if the specified class name could not be found
* @ since 2.1 */
public static Class < ? > loadClass ( final String className ) throws ClassNotFoundException { } } | if ( isIgnoreTccl ( ) ) { return Class . forName ( className ) ; } try { return getThreadContextClassLoader ( ) . loadClass ( className ) ; } catch ( final Throwable ignored ) { return Class . forName ( className ) ; } |
public class SignerUtils { /** * Converts the given data to be safe for use in signed URLs for a private
* distribution by using specialized Base64 encoding . */
public static String makeBytesUrlSafe ( byte [ ] bytes ) { } } | byte [ ] encoded = Base64 . encode ( bytes ) ; for ( int i = 0 ; i < encoded . length ; i ++ ) { switch ( encoded [ i ] ) { case '+' : encoded [ i ] = '-' ; continue ; case '=' : encoded [ i ] = '_' ; continue ; case '/' : encoded [ i ] = '~' ; continue ; default : continue ; } } return new String ( encoded , UTF8 ) ; |
public class ClientAppBase { /** * The static method to print a log message to the console .
* @ param className Name of the class that prints this message .
* @ param msg The log message that needs to be printed .
* @ param args The arguments that may be needed for formatting the message . */
protected static void printLogStatic ( String className , String msg , Object ... args ) { } } | if ( args != null ) { msg = String . format ( msg , args ) ; } String header = String . format ( "%s [%s] " , ZonedDateTime . now ( ) . format ( TIME_FORMAT ) , className ) ; System . out . println ( String . format ( "%s%s" , header , msg . replaceAll ( "\n" , "\n" + header ) ) ) ; |
public class BinaryJedis { /** * Select the DB with having the specified zero - based numeric index . For default every new client
* connection is automatically selected to DB 0.
* @ param index
* @ return Status code reply */
@ Override public String select ( final int index ) { } } | checkIsInMultiOrPipeline ( ) ; client . select ( index ) ; String statusCodeReply = client . getStatusCodeReply ( ) ; client . setDb ( index ) ; return statusCodeReply ; |
public class DefaultEntityHandler { /** * UPDATE SQL生成
* @ param metadata エンティティメタ情報
* @ param type エイティティタイプ
* @ param sqlConfig SQLコンフィグ
* @ param addCondition 条件を追加するかどうか 。 追加する場合 < code > true < / code >
* @ return UPDATE SQL */
protected String buildUpdateSQL ( final TableMetadata metadata , final Class < ? extends Object > type , final SqlConfig sqlConfig , final boolean addCondition ) { } } | StringBuilder sql = new StringBuilder ( "UPDATE " ) . append ( "/* " ) . append ( sqlConfig . getSqlAgentFactory ( ) . getSqlIdKeyName ( ) ) . append ( " */" ) . append ( " " ) . append ( metadata . getTableIdentifier ( ) ) . append ( " SET " ) . append ( System . lineSeparator ( ) ) ; Map < String , MappingColumn > mappingColumns = MappingUtils . getMappingColumnMap ( type , SqlKind . UPDATE ) ; Optional < MappingColumn > versionMappingColumn = type == null ? Optional . empty ( ) : MappingUtils . getVersionMappingColumn ( type ) ; boolean firstFlag = true ; for ( TableMetadata . Column col : metadata . getColumns ( ) ) { if ( ! mappingColumns . isEmpty ( ) ) { MappingColumn mappingColumn = mappingColumns . get ( col . getCamelColumnName ( ) ) ; if ( mappingColumn == null ) { // Transient annotation のついているカラムをスキップ
continue ; } else if ( mappingColumn . isId ( ) ) { // @ Idが付与されたカラムは自動採番なので更新対象としないためスキップする
continue ; } } String camelColName = col . getCamelColumnName ( ) ; StringBuilder parts = new StringBuilder ( ) . append ( "\t" ) ; if ( firstFlag ) { if ( col . isNullable ( ) ) { parts . append ( ", " ) ; } else { parts . append ( " " ) ; } firstFlag = false ; } else { parts . append ( ", " ) ; } parts . append ( col . getColumnIdentifier ( ) ) . append ( " = /*" ) . append ( camelColName ) . append ( "*/''" ) ; versionMappingColumn . ifPresent ( mappingColumn -> { if ( camelColName . equals ( mappingColumn . getCamelName ( ) ) ) { parts . append ( " + 1" ) ; } } ) ; if ( StringUtils . isNotEmpty ( col . getRemarks ( ) ) ) { parts . append ( "\t" ) . append ( "-- " ) . append ( col . getRemarks ( ) ) ; } parts . append ( System . lineSeparator ( ) ) ; if ( col . isNullable ( ) ) { wrapIfComment ( sql , parts , col ) ; } else { sql . append ( parts ) ; } } if ( addCondition ) { sql . append ( "WHERE" ) . append ( System . lineSeparator ( ) ) ; final List < ? extends Column > cols = ! metadata . getKeyColumns ( ) . isEmpty ( ) ? metadata . getKeyColumns ( ) : Arrays . asList ( metadata . getColumns ( ) . get ( 0 ) ) ; firstFlag = true ; for ( final TableMetadata . Column col : cols ) { final StringBuilder parts = new StringBuilder ( ) . append ( "\t" ) ; if ( firstFlag ) { if ( col . isNullable ( ) ) { parts . append ( "AND " ) ; } else { parts . append ( " " ) ; } firstFlag = false ; } else { parts . append ( "AND " ) ; } parts . append ( col . getColumnIdentifier ( ) ) . append ( " = " ) . append ( "/*" ) . append ( col . getCamelColumnName ( ) ) . append ( "*/''" ) . append ( System . lineSeparator ( ) ) ; if ( col . isNullable ( ) ) { wrapIfComment ( sql , parts , col ) ; } else { sql . append ( parts ) ; } } final boolean first = firstFlag ; versionMappingColumn . ifPresent ( mappingColumn -> { TableMetadata . Column col = metadata . getColumn ( mappingColumn . getCamelName ( ) ) ; sql . append ( "\t" ) ; if ( first ) { sql . append ( " " ) ; } else { sql . append ( "AND " ) ; } sql . append ( col . getColumnIdentifier ( ) ) . append ( " = " ) . append ( "/*" ) . append ( col . getCamelColumnName ( ) ) . append ( "*/''" ) . append ( System . lineSeparator ( ) ) ; } ) ; } return sql . toString ( ) ; |
public class GCLINERGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . GCLINERG__XPOS : setXPOS ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCLINERG__YPOS : setYPOS ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class CalendarFormatterBase { /** * Format the day of the month , optionally zero - padded . */
void formatDayOfMonth ( StringBuilder b , ZonedDateTime d , int width ) { } } | int day = d . getDayOfMonth ( ) ; zeroPad2 ( b , day , width ) ; |
public class HelpSearchService { /** * Index all HTML files within the content of the help module .
* @ param helpModule Help module to be indexed . */
@ Override public void indexHelpModule ( HelpModule helpModule ) { } } | try { if ( indexTracker . isSame ( helpModule ) ) { return ; } unindexHelpModule ( helpModule ) ; log . info ( "Indexing help module " + helpModule . getLocalizedId ( ) ) ; int i = helpModule . getUrl ( ) . lastIndexOf ( '/' ) ; String pattern = "classpath:" + helpModule . getUrl ( ) . substring ( 0 , i + 1 ) + "*.htm*" ; for ( Resource resource : appContext . getResources ( pattern ) ) { indexDocument ( helpModule , resource ) ; } writer . commit ( ) ; indexTracker . add ( helpModule ) ; } catch ( Exception e ) { throw MiscUtil . toUnchecked ( e ) ; } |
public class MapComposedElement { /** * Replies the specified point at the given index in the specified group .
* @ param groupIndex is the index of the group
* @ param indexInGroup is the index of the point in the group ( 0 for the
* first point of the group . . . ) .
* @ return the point
* @ throws IndexOutOfBoundsException in case of error . */
@ Pure public Point2d getPointAt ( int groupIndex , int indexInGroup ) { } } | final int startIndex = firstInGroup ( groupIndex ) ; // Besure that the member ' s index is in the group index ' s range
final int groupMemberCount = getPointCountInGroup ( groupIndex ) ; if ( indexInGroup < 0 ) { throw new IndexOutOfBoundsException ( indexInGroup + "<0" ) ; // $ NON - NLS - 1 $
} if ( indexInGroup >= groupMemberCount ) { throw new IndexOutOfBoundsException ( indexInGroup + ">=" + groupMemberCount ) ; // $ NON - NLS - 1 $
} return new Point2d ( this . pointCoordinates [ startIndex + indexInGroup * 2 ] , this . pointCoordinates [ startIndex + indexInGroup * 2 + 1 ] ) ; |
public class URLConnection { /** * Call this routine to get the content - type associated with this
* object . */
public String getContentType ( ) { } } | if ( contentType == null ) contentType = getHeaderField ( "content-type" ) ; if ( contentType == null ) { String ct = null ; try { ct = guessContentTypeFromStream ( getInputStream ( ) ) ; } catch ( java . io . IOException e ) { } String ce = properties . findValue ( "content-encoding" ) ; if ( ct == null ) { ct = properties . findValue ( "content-type" ) ; if ( ct == null ) if ( url . getFile ( ) . endsWith ( "/" ) ) ct = "text/html" ; else ct = guessContentTypeFromName ( url . getFile ( ) ) ; } /* * If the Mime header had a Content - encoding field and its value
* was not one of the values that essentially indicate no
* encoding , we force the content type to be unknown . This will
* cause a save dialog to be presented to the user . It is not
* ideal but is better than what we were previously doing , namely
* bringing up an image tool for compressed tar files . */
if ( ct == null || ce != null && ! ( ce . equalsIgnoreCase ( "7bit" ) || ce . equalsIgnoreCase ( "8bit" ) || ce . equalsIgnoreCase ( "binary" ) ) ) ct = "content/unknown" ; setContentType ( ct ) ; } return contentType ; |
public class FeatureFileParser { /** * If parsedFeature has no configurations then we can return a single item list containg the parsedFeature
* If configurations are present we need to return a list with one feature per supported configuration
* @ param configurationNames
* @ param parsedFeature
* @ return */
private List < FeatureToken > getFeaturesWithConfigurations ( List < String > configurationNames , FeatureToken parsedFeature ) { } } | List < FeatureToken > results = new ArrayList < > ( ) ; if ( parsedFeature != null ) { if ( configurationNames == null ) { results . add ( parsedFeature ) ; } else { createFeaturesWithConfigurations ( configurationNames , parsedFeature , results ) ; } } return results ; |
public class VoiceApi { /** * Remove a key / value pair from user data
* Delete data with the specified key from the call & # 39 ; s user data .
* @ param id The connection ID of the call . ( required )
* @ param keyData ( required )
* @ return ApiResponse & lt ; ApiSuccessResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < ApiSuccessResponse > deleteUserDataPairWithHttpInfo ( String id , KeyData keyData ) throws ApiException { } } | com . squareup . okhttp . Call call = deleteUserDataPairValidateBeforeCall ( id , keyData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class FctBnAccEntitiesProcessors { /** * < p > Get sales invoice good line utility . < / p >
* @ param pAddParam additional param
* @ return sales invoice good line utility
* @ throws Exception - an exception */
protected final UtlInvLine < RS , SalesInvoice , SalesInvoiceLine , SalesInvoiceTaxLine , SalesInvoiceGoodsTaxLine > lazyGetUtlSalGdLn ( final Map < String , Object > pAddParam ) throws Exception { } } | UtlInvLine < RS , SalesInvoice , SalesInvoiceLine , SalesInvoiceTaxLine , SalesInvoiceGoodsTaxLine > utlInvLn = this . utlSalInvGdLn ; if ( utlInvLn == null ) { utlInvLn = new UtlInvLine < RS , SalesInvoice , SalesInvoiceLine , SalesInvoiceTaxLine , SalesInvoiceGoodsTaxLine > ( ) ; utlInvLn . setUtlInvBase ( lazyGetUtlInvBase ( pAddParam ) ) ; utlInvLn . setInvTxMeth ( lazyGetSalInvTxMeth ( pAddParam ) ) ; utlInvLn . setIsMutable ( false ) ; utlInvLn . setNeedMkTxCat ( true ) ; utlInvLn . setLtlCl ( SalesInvoiceGoodsTaxLine . class ) ; utlInvLn . setDstTxItLnCl ( DestTaxGoodsLn . class ) ; FactoryPersistableBase < SalesInvoiceGoodsTaxLine > fctLtl = new FactoryPersistableBase < SalesInvoiceGoodsTaxLine > ( ) ; fctLtl . setObjectClass ( SalesInvoiceGoodsTaxLine . class ) ; fctLtl . setDatabaseId ( getSrvDatabase ( ) . getIdDatabase ( ) ) ; utlInvLn . setFctLineTxLn ( fctLtl ) ; // assigning fully initialized object :
this . utlSalInvGdLn = utlInvLn ; } return utlInvLn ; |
public class FilterInvoker { /** * 取得方法的特殊参数配置
* @ param methodName 方法名
* @ param paramKey 参数关键字
* @ param defaultValue 默认值
* @ return 都找不到为false boolean method param */
protected boolean getBooleanMethodParam ( String methodName , String paramKey , boolean defaultValue ) { } } | if ( CommonUtils . isEmpty ( configContext ) ) { return defaultValue ; } Boolean o = ( Boolean ) configContext . get ( buildMethodKey ( methodName , paramKey ) ) ; if ( o == null ) { o = ( Boolean ) configContext . get ( paramKey ) ; return o == null ? defaultValue : o ; } else { return o ; } |
public class TransformProcess { /** * Based on the input schema ,
* map raw string values to the appropriate
* writable
* @ param values the values to convert
* @ return the transformed values based on the schema */
public List < Writable > transformRawStringsToInputList ( List < String > values ) { } } | List < Writable > ret = new ArrayList < > ( ) ; if ( values . size ( ) != initialSchema . numColumns ( ) ) throw new IllegalArgumentException ( String . format ( "Number of values %d does not match the number of input columns %d for schema" , values . size ( ) , initialSchema . numColumns ( ) ) ) ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { switch ( initialSchema . getType ( i ) ) { case String : ret . add ( new Text ( values . get ( i ) ) ) ; break ; case Integer : ret . add ( new IntWritable ( Integer . parseInt ( values . get ( i ) ) ) ) ; break ; case Double : ret . add ( new DoubleWritable ( Double . parseDouble ( values . get ( i ) ) ) ) ; break ; case Float : ret . add ( new FloatWritable ( Float . parseFloat ( values . get ( i ) ) ) ) ; break ; case Categorical : ret . add ( new Text ( values . get ( i ) ) ) ; break ; case Boolean : ret . add ( new BooleanWritable ( Boolean . parseBoolean ( values . get ( i ) ) ) ) ; break ; case Time : break ; case Long : ret . add ( new LongWritable ( Long . parseLong ( values . get ( i ) ) ) ) ; } } return ret ; |
public class ScriptController { /** * Get a specific script
* @ param model
* @ param scriptIdentifier
* @ return
* @ throws Exception */
@ RequestMapping ( value = "/api/scripts/{scriptIdentifier}" , method = RequestMethod . GET ) public @ ResponseBody Script getScript ( Model model , @ PathVariable String scriptIdentifier ) throws Exception { } } | return ScriptService . getInstance ( ) . getScript ( Integer . parseInt ( scriptIdentifier ) ) ; |
public class CheckArg { /** * Checks that the iterator is not empty , and throws an exception if it is .
* @ param argument the iterator to check
* @ param name The name of the argument
* @ throws IllegalArgumentException If iterator is empty ( i . e . , iterator . hasNext ( ) returns false ) */
public static void isNotEmpty ( Iterator < ? > argument , String name ) { } } | isNotNull ( argument , name ) ; if ( ! argument . hasNext ( ) ) { throw new IllegalArgumentException ( CommonI18n . argumentMayNotBeEmpty . text ( name ) ) ; } |
public class VarExporter { /** * Load the dynamic variable object .
* @ param variableName name of variable
* @ param < T > The data type of the exported variable .
* @ return Null if the variable was not found . The value otherwise . */
@ Override @ SuppressWarnings ( "unchecked" ) public < T > Variable < T > getVariable ( final String variableName ) { } } | final String [ ] subTokens = getSubVariableTokens ( variableName ) ; if ( subTokens != null ) { final Variable < T > sub = getSubVariable ( subTokens [ 0 ] , subTokens [ 1 ] ) ; if ( sub != null ) { return sub ; } } final Variable < T > v ; synchronized ( variables ) { v = variables . get ( variableName ) ; } return v ; |
public class AwsSecurityFinding { /** * A list of name / value string pairs associated with the finding . These are custom , user - defined fields added to a
* finding .
* @ param userDefinedFields
* A list of name / value string pairs associated with the finding . These are custom , user - defined fields added
* to a finding .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AwsSecurityFinding withUserDefinedFields ( java . util . Map < String , String > userDefinedFields ) { } } | setUserDefinedFields ( userDefinedFields ) ; return this ; |
public class ServerWebExchangeLimiterBuilder { /** * Partition the limit by header .
* @ param name header name
* @ return Chainable builder */
public ServerWebExchangeLimiterBuilder partitionByHeader ( String name ) { } } | return partitionResolver ( exchange -> exchange . getRequest ( ) . getHeaders ( ) . getFirst ( name ) ) ; |
public class AppServiceEnvironmentsInner { /** * Get properties of a multi - role pool .
* Get properties of a multi - role pool .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service Environment .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the WorkerPoolResourceInner object if successful . */
public WorkerPoolResourceInner getMultiRolePool ( String resourceGroupName , String name ) { } } | return getMultiRolePoolWithServiceResponseAsync ( resourceGroupName , name ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class GutmanSplitStrategy { /** * This algorithm is from the original paper .
* Algorithm Split . Divide a set of M + 1 index entries into two groups .
* S1 . [ Pick first entry for each group ] . Apply Algorithm { @ link # pickSeeds ( List ) } to choose
* two entries to be the first elements of the groups . Assign each to a group .
* S2 . [ Check if done ] . If all entries have been assigned , stop . If one group has so few entries that all the rest
* must be assigned to it in order for it to have the minimum number m , assign them and stop .
* S3 . [ Select entry to assign ] . Invoke Algorithm { @ link # pickNext ( List ) }
* to choose the next entry to assign . Add it to the group whose covering rectangle will have to be enlarged least to
* accommodate it . Resolve ties by adding the entry to the group smaller area , then to the one with fewer entries , then
* to either . Repeat from S2. */
@ Override public Node [ ] split ( Node node ) { } } | List < Node > children = Lists . newArrayList ( node . getChildren ( ) ) ; Node [ ] seeds = pickSeeds ( children ) ; node . clear ( ) ; node . addChild ( seeds [ 0 ] ) ; node . addToBitmapIndex ( seeds [ 0 ] ) ; Node group1 = new Node ( seeds [ 1 ] . getMinCoordinates ( ) . clone ( ) , seeds [ 1 ] . getMaxCoordinates ( ) . clone ( ) , seeds [ 1 ] , node . isLeaf ( ) , node . getParent ( ) , bf . makeEmptyMutableBitmap ( ) ) ; group1 . addToBitmapIndex ( seeds [ 1 ] ) ; if ( node . getParent ( ) != null ) { node . getParent ( ) . addChild ( group1 ) ; } Node [ ] groups = new Node [ ] { node , group1 } ; RTreeUtils . enclose ( groups ) ; while ( ! children . isEmpty ( ) ) { for ( Node group : groups ) { if ( group . getChildren ( ) . size ( ) + children . size ( ) <= minNumChildren ) { for ( Node child : children ) { group . addToBitmapIndex ( child ) ; group . addChild ( child ) ; } RTreeUtils . enclose ( groups ) ; return groups ; } } Node nextToAssign = pickNext ( children ) ; double group0ExpandedArea = RTreeUtils . getEnclosingArea ( groups [ 0 ] , nextToAssign ) ; double group1ExpandedArea = RTreeUtils . getEnclosingArea ( groups [ 1 ] , nextToAssign ) ; Node optimal ; if ( group0ExpandedArea < group1ExpandedArea ) { optimal = groups [ 0 ] ; } else if ( group0ExpandedArea == group1ExpandedArea ) { if ( groups [ 0 ] . getArea ( ) < groups [ 1 ] . getArea ( ) ) { optimal = groups [ 0 ] ; } else { optimal = groups [ 1 ] ; } } else { optimal = groups [ 1 ] ; } optimal . addToBitmapIndex ( nextToAssign ) ; optimal . addChild ( nextToAssign ) ; optimal . enclose ( ) ; } return groups ; |
public class Kv { /** * key 存在 , 并且 value 为 false , 则返回 true */
public boolean isFalse ( Object key ) { } } | Object value = get ( key ) ; return ( value instanceof Boolean && ( ( Boolean ) value == false ) ) ; |
public class Geldbetrag { /** * Wandelt den angegebenen MonetaryAmount in einen Geldbetrag um . Um die
* Anzahl von Objekten gering zu halten , wird nur dann tatsaechlich eine
* neues Objekt erzeugt , wenn es sich nicht vermeiden laesst .
* In Anlehnung an { @ link BigDecimal } heisst die Methode " valueOf " .
* @ param value Wert des andere Geldbetrags
* @ param currency Waehrung des anderen Geldbetrags
* @ return ein Geldbetrag */
public static Geldbetrag valueOf ( Number value , String currency ) { } } | return valueOf ( value , Waehrung . toCurrency ( currency ) ) ; |
public class TableProcessor { /** * Populate metadata .
* @ param < X >
* the generic type
* @ param < T >
* the generic type
* @ param metaModelBuilder
* the metaModelBuilder */
private < X , T > void onBuildMetaModelSuperClass ( Class < ? super X > clazz , MetaModelBuilder < X , T > metaModelBuilder ) { } } | if ( clazz != null && clazz . isAnnotationPresent ( javax . persistence . Entity . class ) ) { while ( clazz != null && clazz . isAnnotationPresent ( javax . persistence . Entity . class ) ) { metaModelBuilder . process ( ( Class < X > ) clazz ) ; for ( Field f : clazz . getDeclaredFields ( ) ) { if ( f != null && ! Modifier . isStatic ( f . getModifiers ( ) ) && ! Modifier . isTransient ( f . getModifiers ( ) ) && ! f . isAnnotationPresent ( Transient . class ) ) { metaModelBuilder . construct ( ( Class < X > ) clazz , f ) ; } } clazz = clazz . getSuperclass ( ) ; } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.