signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DeploymentMetaData { /** * The Service Name for this deployment , based on either an explicit service name property , or based
* on environmental data , such as OpenShift build name / namespace .
* @ return the service name to include on the trace context */
static String getServiceNameFromEnv ( ) { } } | String serviceName = PropertyUtil . getProperty ( PropertyUtil . HAWKULAR_APM_SERVICE_NAME ) ; if ( null == serviceName || serviceName . isEmpty ( ) ) { String buildStamp = getBuildStampFromEnv ( ) ; if ( null != buildStamp && ! buildStamp . isEmpty ( ) ) { return getServiceFromBuildName ( buildStamp ) ; } return null ; } return serviceName ; |
public class SegmentedJournal { /** * Resets and returns the first segment in the journal .
* @ param index the starting index of the journal
* @ return the first segment */
JournalSegment < E > resetSegments ( long index ) { } } | assertOpen ( ) ; // If the index already equals the first segment index , skip the reset .
JournalSegment < E > firstSegment = getFirstSegment ( ) ; if ( index == firstSegment . index ( ) ) { return firstSegment ; } for ( JournalSegment < E > segment : segments . values ( ) ) { segment . close ( ) ; segment . delete ( ) ; } segments . clear ( ) ; JournalSegmentDescriptor descriptor = JournalSegmentDescriptor . builder ( ) . withId ( 1 ) . withIndex ( index ) . withMaxSegmentSize ( maxSegmentSize ) . withMaxEntries ( maxEntriesPerSegment ) . build ( ) ; currentSegment = createSegment ( descriptor ) ; segments . put ( index , currentSegment ) ; return currentSegment ; |
public class XMLStreamCopier { /** * Reads all events from the reader and pipes them into the writer .
* @ param isFragmentMode
* if true , copies all events of the XML fragment starting at the
* current cursor position , if false copies all events of the
* document starting at the current cursor positions .
* @ throws XMLStreamException
* If an error occurs while copying the stream */
public void copy ( boolean isFragmentMode ) throws XMLStreamException { } } | int ev = isFragmentMode ? XMLStreamConstants . START_ELEMENT : XMLStreamConstants . START_DOCUMENT ; reader . require ( ev , null , null ) ; int depth = 0 ; ev = reader . getEventType ( ) ; while ( true ) { switch ( ev ) { case XMLStreamConstants . START_ELEMENT : { writer . writeStartElement ( nonNull ( reader . getPrefix ( ) ) , // fixup bug where woodstox - 3.2.7 returns null
reader . getLocalName ( ) , nonNull ( reader . getNamespaceURI ( ) ) ) ; // Saxon requires nonNull
copyAttributes ( ) ; copyNamespaces ( ) ; depth ++ ; break ; } case XMLStreamConstants . END_ELEMENT : { writer . writeEndElement ( ) ; depth -- ; if ( isFragmentMode && depth == 0 ) { writer . flush ( ) ; return ; // we ' re done
} break ; } case XMLStreamConstants . ATTRIBUTE : { // can happen as part of an XPath result sequence , or similar
copyAttribute ( 0 ) ; break ; } case XMLStreamConstants . START_DOCUMENT : { copyStartDocument ( ) ; break ; } case XMLStreamConstants . END_DOCUMENT : { writer . writeEndDocument ( ) ; writer . flush ( ) ; return ; // we ' re done
} case XMLStreamConstants . PROCESSING_INSTRUCTION : { writer . writeProcessingInstruction ( reader . getPITarget ( ) , reader . getPIData ( ) ) ; break ; } case XMLStreamConstants . COMMENT : { writer . writeComment ( reader . getText ( ) ) ; break ; } case XMLStreamConstants . CDATA : { writer . writeCData ( reader . getText ( ) ) ; break ; } case XMLStreamConstants . SPACE : case XMLStreamConstants . CHARACTERS : { copyText ( ) ; break ; } case XMLStreamConstants . ENTITY_REFERENCE : { // writer . writeEntityRef ( reader . getLocalName ( ) ) ; / / don ' t expand the ref
copyText ( ) ; // expand the ref ( safer )
break ; } case XMLStreamConstants . DTD : { copyDTD ( ) ; break ; } case XMLStreamConstants . ENTITY_DECLARATION : break ; // ignore ( handled by XMLStreamConstants . DTD )
case XMLStreamConstants . NOTATION_DECLARATION : break ; // ignore ( handled by XMLStreamConstants . DTD )
case XMLStreamConstants . NAMESPACE : { // can happen as part of an XPath result sequence , or similar
writer . writeNamespace ( reader . getPrefix ( ) , reader . getNamespaceURI ( ) ) ; break ; } default : { throw new XMLStreamException ( "Unrecognized event type: " + reader . getEventType ( ) ) ; } } ev = reader . next ( ) ; } |
public class ActionFormMapper { protected void mappingJsonBody ( ActionRuntime runtime , VirtualForm virtualForm , String json ) { } } | final JsonManager jsonManager = getJsonManager ( ) ; try { final Class < ? > formType = virtualForm . getFormMeta ( ) . getRootFormType ( ) ; // called only when root here
final Object fromJson = jsonManager . fromJson ( json , formType ) ; acceptJsonRealForm ( virtualForm , fromJson ) ; } catch ( RuntimeException e ) { throwJsonBodyParseFailureException ( runtime , virtualForm , json , e ) ; } |
public class ChainWriter { /** * This is must be used within a < code > $ ' . . . ' < / code > string .
* @ see TextInShEncoder
* @ param value the value to be encoded */
public ChainWriter encodeSh ( Object value ) throws IOException { } } | Coercion . write ( value , textInShEncoder , out ) ; return this ; |
public class AxiomAssign { /** * A property :
* - has a min : number or " * "
* - has a max : number or " * " */
public boolean assignParsedElement ( Multiplicity parsedMultiplicity , String syntaxElementName , ISyntaxElement syntaxElement ) throws ModelException { } } | if ( syntaxElementName . equalsIgnoreCase ( XMLSyntax . MULTIPLICITY_MIN ( ) ) && syntaxElement instanceof SimpleProperty ) { if ( ( ( SimpleProperty ) syntaxElement ) . getValue ( ) . toString ( ) . equalsIgnoreCase ( "*" ) ) { parsedMultiplicity . setMin ( null ) ; } else { parsedMultiplicity . setMin ( Integer . parseInt ( ( ( SimpleProperty ) syntaxElement ) . getValue ( ) . toString ( ) ) ) ; } return true ; } if ( syntaxElementName . equalsIgnoreCase ( XMLSyntax . MULTIPLICITY_MAX ( ) ) && syntaxElement instanceof SimpleProperty ) { if ( ( ( SimpleProperty ) syntaxElement ) . getValue ( ) . toString ( ) . equalsIgnoreCase ( "*" ) ) { parsedMultiplicity . setMax ( null ) ; } else { parsedMultiplicity . setMax ( Integer . parseInt ( ( ( SimpleProperty ) syntaxElement ) . getValue ( ) . toString ( ) ) ) ; } return true ; } return false ; |
public class FailoverGroupsInner { /** * Creates or updates a failover group .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server containing the failover group .
* @ param failoverGroupName The name of the failover group .
* @ param parameters The failover group parameters .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < FailoverGroupInner > beginCreateOrUpdateAsync ( String resourceGroupName , String serverName , String failoverGroupName , FailoverGroupInner parameters , final ServiceCallback < FailoverGroupInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName , parameters ) , serviceCallback ) ; |
public class IntBinaryTree { /** * Gets the leaves of this tree . */
public ArrayList < IntBinaryTree > getLeaves ( ) { } } | LeafCollector leafCollector = new LeafCollector ( ) ; postOrderTraversal ( leafCollector ) ; return leafCollector . leaves ; |
public class HttpMessageSecurity { /** * Unencrypt encrypted payload .
* @ param payload
* base64url serialized JWEObject .
* @ return Unencrypted message . */
private String unprotectPayload ( String payload ) throws IOException { } } | try { JWEObject jweObject = JWEObject . deserialize ( MessageSecurityHelper . base64UrltoString ( payload ) ) ; JWEHeader jweHeader = jweObject . jweHeader ( ) ; if ( ! clientEncryptionKey . kid ( ) . equals ( jweHeader . kid ( ) ) || ! jweHeader . alg ( ) . equals ( "RSA-OAEP" ) || ! jweHeader . enc ( ) . equals ( "A128CBC-HS256" ) ) { throw new IOException ( "Invalid protected response" ) ; } byte [ ] key = MessageSecurityHelper . base64UrltoByteArray ( jweObject . encryptedKey ( ) ) ; RsaKey clientEncryptionRsaKey = new RsaKey ( clientEncryptionKey . kid ( ) , clientEncryptionKey . toRSA ( true ) ) ; byte [ ] aesKeyBytes = clientEncryptionRsaKey . decryptAsync ( key , null , null , null , "RSA-OAEP" ) . get ( ) ; SymmetricKey aesKey = new SymmetricKey ( UUID . randomUUID ( ) . toString ( ) , aesKeyBytes ) ; byte [ ] result = aesKey . decryptAsync ( MessageSecurityHelper . base64UrltoByteArray ( jweObject . cipherText ( ) ) , MessageSecurityHelper . base64UrltoByteArray ( jweObject . iv ( ) ) , jweObject . originalProtected ( ) . getBytes ( MESSAGE_ENCODING ) , MessageSecurityHelper . base64UrltoByteArray ( jweObject . tag ( ) ) , "A128CBC-HS256" ) . get ( ) ; return new String ( result , MESSAGE_ENCODING ) ; } catch ( ExecutionException e ) { // unexpected ;
return null ; } catch ( InterruptedException e ) { // unexpected ;
return null ; } catch ( NoSuchAlgorithmException e ) { // unexpected ;
return null ; } |
public class FileImportCommand { /** * ~ - - - methods - - - - - */
@ Override public void processMessage ( WebSocketMessage webSocketData ) throws FrameworkException { } } | setDoTransactionNotifications ( true ) ; final String mode = webSocketData . getNodeDataStringValue ( "mode" ) ; // default : list start | pause | resume | cancel | abort
final Long jobId = webSocketData . getNodeDataLongValue ( "jobId" ) ; final JobQueueManager mgr = JobQueueManager . getInstance ( ) ; final List < GraphObject > result = new LinkedList < > ( ) ; switch ( mode ) { case "start" : mgr . startJob ( jobId ) ; break ; case "pause" : mgr . pauseRunningJob ( jobId ) ; break ; case "resume" : mgr . resumePausedJob ( jobId ) ; break ; case "abort" : mgr . abortActiveJob ( jobId ) ; break ; case "cancel" : mgr . cancelQueuedJob ( jobId ) ; break ; case "list" : default : final GraphObjectMap importsContainer = new GraphObjectMap ( ) ; importsContainer . put ( importJobsProperty , mgr . listJobs ( ) ) ; result . add ( importsContainer ) ; } webSocketData . setResult ( result ) ; webSocketData . setRawResultCount ( 1 ) ; getWebSocket ( ) . send ( webSocketData , true ) ; |
public class GradientThree_Share { /** * Can only be used with images that are NOT sub - images . */
public static void derivX_F32 ( GrayF32 orig , GrayF32 derivX ) { } } | final float [ ] data = orig . data ; final float [ ] imgX = derivX . data ; final int width = orig . getWidth ( ) ; final int height = orig . getHeight ( ) ; for ( int y = 0 ; y < height ; y ++ ) { int index = width * y + 1 ; int endX = index + width - 2 ; int endXAlt = endX - ( width - 2 ) % 3 ; float x0 = data [ index - 1 ] ; float x1 = data [ index ] ; for ( ; index < endXAlt ; ) { float x2 = data [ index + 1 ] ; imgX [ index ++ ] = ( x2 - x0 ) * 0.5f ; x0 = data [ index + 1 ] ; imgX [ index ++ ] = ( x0 - x1 ) * 0.5f ; x1 = data [ index + 1 ] ; imgX [ index ++ ] = ( x1 - x2 ) * 0.5f ; } for ( ; index < endX ; index ++ ) { imgX [ index ] = ( data [ index + 1 ] - data [ index - 1 ] ) * 0.5f ; } } |
public class RemoteConsumerDispatcher { /** * overiding the method in the superclass , to create a RemoteQPConsumerKey */
protected DispatchableKey createConsumerKey ( DispatchableConsumerPoint consumerPoint , SelectionCriteria criteria , SIBUuid12 connectionUuid , boolean readAhead , boolean forwardScanning , JSConsumerSet consumerSet ) throws SISelectorSyntaxException , SIDiscriminatorSyntaxException , SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createConsumerKey" , new Object [ ] { consumerPoint , criteria , connectionUuid , new Boolean ( readAhead ) , new Boolean ( forwardScanning ) , consumerSet } ) ; RemoteQPConsumerKey consKey = new RemoteQPConsumerKey ( consumerPoint , this , criteria , connectionUuid , readAhead , forwardScanning ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createConsumerKey" , consKey ) ; return consKey ; |
public class BlacklistGenerator2 { /** * Generates the blacklist .
* @ param model model to use
* @ return the blacklist
* @ throws IOException when there is an I / O problem */
public Blacklist generateBlacklist ( Model model ) throws IOException { } } | Map < String , String > nameMapping = readNameMapping ( ) ; if ( nameMapping == null ) { generateNameMappingFileToCurate ( model ) ; throw new RuntimeException ( "Small molecule name mapping file not found. Generated a " + "mapping file, but it needs manual curation before use.\nPlease go over some top " + "portion of this file and delete invalid lines and any uncurated bottom part.\n" + "After that, you can rerun this method." ) ; } SIFSearcher searcher = new SIFSearcher ( new Fetcher ( nameMapping ) , SIFEnum . USED_TO_PRODUCE ) ; Set < SIFInteraction > sifs = searcher . searchSIF ( model ) ; // read interactions into maps
Map < String , Set < String > > upstrMap = new HashMap < String , Set < String > > ( ) ; Map < String , Set < String > > dwstrMap = new HashMap < String , Set < String > > ( ) ; final Map < String , Set < String > > neighMap = new HashMap < String , Set < String > > ( ) ; for ( SIFInteraction sif : sifs ) { String source = sif . sourceID ; String target = sif . targetID ; if ( ! neighMap . containsKey ( source ) ) neighMap . put ( source , new HashSet < String > ( ) ) ; if ( ! neighMap . containsKey ( target ) ) neighMap . put ( target , new HashSet < String > ( ) ) ; if ( ! dwstrMap . containsKey ( source ) ) dwstrMap . put ( source , new HashSet < String > ( ) ) ; if ( ! dwstrMap . containsKey ( target ) ) dwstrMap . put ( target , new HashSet < String > ( ) ) ; if ( ! upstrMap . containsKey ( source ) ) upstrMap . put ( source , new HashSet < String > ( ) ) ; if ( ! upstrMap . containsKey ( target ) ) upstrMap . put ( target , new HashSet < String > ( ) ) ; neighMap . get ( source ) . add ( target ) ; neighMap . get ( target ) . add ( source ) ; dwstrMap . get ( source ) . add ( target ) ; upstrMap . get ( target ) . add ( source ) ; } // remove intersection of upstream and downstream
for ( String name : neighMap . keySet ( ) ) { if ( ! upstrMap . containsKey ( name ) || ! dwstrMap . containsKey ( name ) ) continue ; Set < String > upstr = upstrMap . get ( name ) ; Set < String > dwstr = dwstrMap . get ( name ) ; Set < String > temp = new HashSet < String > ( upstr ) ; upstr . removeAll ( dwstr ) ; dwstr . removeAll ( temp ) ; } // writeTheGuideRankingToTuneTheDecider ( model , nameMapping , upstrMap , dwstrMap , neighMap ) ;
// if ( true ) return null ;
Set < String > white = readWhitelist ( ) ; Blacklist blacklist = new Blacklist ( ) ; // populate the blacklist
Fetcher nameFetcher = new Fetcher ( nameMapping ) ; for ( SmallMoleculeReference smr : model . getObjects ( SmallMoleculeReference . class ) ) { Set < String > names = nameFetcher . fetchID ( smr ) ; if ( names . isEmpty ( ) ) continue ; String name = names . iterator ( ) . next ( ) ; if ( white != null && white . contains ( name ) ) continue ; int neighSize = neighMap . containsKey ( name ) ? neighMap . get ( name ) . size ( ) : 0 ; int upstrOnly = upstrMap . containsKey ( name ) ? upstrMap . get ( name ) . size ( ) : 0 ; int dwstrOnly = dwstrMap . containsKey ( name ) ? dwstrMap . get ( name ) . size ( ) : 0 ; if ( decider . isUbique ( neighSize , upstrOnly , dwstrOnly ) ) { blacklist . addEntry ( smr . getUri ( ) , decider . getScore ( neighSize , upstrOnly , dwstrOnly ) , decider . getContext ( neighSize , upstrOnly , dwstrOnly ) ) ; } } return blacklist ; |
public class GridFontParam { /** * Initialize default values and validate that config is correct . */
public void postConstruct ( ) { } } | Assert . isTrue ( this . name != null , "name parameter cannot be null" ) ; Assert . isTrue ( this . style != null , "style parameter cannot be null" ) ; Assert . isTrue ( this . size > 1 , "size must be greater than 1" ) ; Font baseFont = null ; for ( String fontName : this . name ) { try { baseFont = new Font ( fontName , this . style . styleId , this . size ) ; break ; } catch ( Exception e ) { // try next font in list
} } if ( baseFont == null ) { String [ ] legalFonts = GraphicsEnvironment . getLocalGraphicsEnvironment ( ) . getAvailableFontFamilyNames ( ) ; throw new IllegalArgumentException ( Arrays . toString ( this . name ) + " does not contain a font that can be created by this Java " + "Virtual Machine, legal options are: \n" + Arrays . toString ( legalFonts ) ) ; } |
public class ContentHandlerImporter { /** * { @ inheritDoc } */
public void startElement ( String uri , String localName , String qName , Attributes atts ) throws SAXException { } } | try { Map < String , String > attribute = new HashMap < String , String > ( ) ; for ( int i = 0 ; i < atts . getLength ( ) ; i ++ ) { attribute . put ( atts . getQName ( i ) , atts . getValue ( i ) ) ; } importer . startElement ( uri , localName , qName , attribute ) ; } catch ( RepositoryException e ) { // e . printStackTrace ( ) ;
throw new SAXException ( e ) ; } |
public class CommandLineInterface { /** * Generate Language Profile from a text file .
* < pre >
* usage : - - genprofile [ text file ] [ language name ]
* < / pre > */
public void generateProfile ( ) { } } | File directory = new File ( arglist . get ( 0 ) ) ; String lang = arglist . get ( 1 ) ; File file = searchFile ( directory , lang + "wiki-.*-abstract\\.xml.*" ) ; if ( file == null ) { System . err . println ( "Not Found text file : lang = " + lang ) ; return ; } try ( FileOutputStream outputStream = new FileOutputStream ( new File ( lang ) ) ) { LangProfile profile = GenProfile . load ( lang , file ) ; profile . omitLessFreq ( ) ; new LangProfileWriter ( ) . write ( profile , outputStream ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } |
public class DoublesSketch { /** * This returns an approximation to the value of the data item
* that would be preceded by the given fraction of a hypothetical sorted
* version of the input stream so far .
* < p > We note that this method has a fairly large overhead ( microseconds instead of nanoseconds )
* so it should not be called multiple times to get different quantiles from the same
* sketch . Instead use getQuantiles ( ) , which pays the overhead only once .
* < p > If the sketch is empty this returns Double . NaN .
* @ param fraction the specified fractional position in the hypothetical sorted stream .
* These are also called normalized ranks or fractional ranks .
* If fraction = 0.0 , the true minimum value of the stream is returned .
* If fraction = 1.0 , the true maximum value of the stream is returned .
* @ return the approximation to the value at the above fraction */
public double getQuantile ( final double fraction ) { } } | if ( isEmpty ( ) ) { return Double . NaN ; } if ( ( fraction < 0.0 ) || ( fraction > 1.0 ) ) { throw new SketchesArgumentException ( "Fraction cannot be less than zero or greater than 1.0" ) ; } if ( fraction == 0.0 ) { return getMinValue ( ) ; } else if ( fraction == 1.0 ) { return getMaxValue ( ) ; } else { final DoublesAuxiliary aux = new DoublesAuxiliary ( this ) ; return aux . getQuantile ( fraction ) ; } |
public class CustomPostgreSQLContainer { /** * Add additional configuration options that should be used for this container .
* @ param key The PostgreSQL configuration option key . For example : " max _ connections "
* @ param value The PostgreSQL configuration option value . For example : " 200"
* @ return this */
public SELF withConfigOption ( String key , String value ) { } } | if ( key == null ) { throw new java . lang . NullPointerException ( "key marked @NonNull but is null" ) ; } if ( value == null ) { throw new java . lang . NullPointerException ( "value marked @NonNull but is null" ) ; } options . put ( key , value ) ; return self ( ) ; |
public class AmazonComprehendClient { /** * Get a list of key phrase detection jobs that you have submitted .
* @ param listKeyPhrasesDetectionJobsRequest
* @ return Result of the ListKeyPhrasesDetectionJobs operation returned by the service .
* @ throws InvalidRequestException
* The request is invalid .
* @ throws TooManyRequestsException
* The number of requests exceeds the limit . Resubmit your request later .
* @ throws InvalidFilterException
* The filter specified for the < code > ListDocumentClassificationJobs < / code > operation is invalid . Specify a
* different filter .
* @ throws InternalServerException
* An internal server error occurred . Retry your request .
* @ sample AmazonComprehend . ListKeyPhrasesDetectionJobs
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / comprehend - 2017-11-27 / ListKeyPhrasesDetectionJobs "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListKeyPhrasesDetectionJobsResult listKeyPhrasesDetectionJobs ( ListKeyPhrasesDetectionJobsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListKeyPhrasesDetectionJobs ( request ) ; |
public class FSNamesystem { /** * Check last block of the file under construction
* Replicate it if it is under replicated
* @ param src the file name
* @ param file the file ' s inode */
private void replicateLastBlock ( String src , INodeFileUnderConstruction file ) { } } | BlockInfo [ ] blks = file . getBlocks ( ) ; if ( blks == null || blks . length == 0 ) return ; BlockInfo block = blks [ blks . length - 1 ] ; DatanodeDescriptor [ ] targets = file . getValidTargets ( ) ; final int numOfTargets = targets == null ? 0 : targets . length ; NumberReplicas status = countNodes ( block ) ; int totalReplicas = status . getTotal ( ) ; if ( numOfTargets > totalReplicas ) { pendingReplications . add ( block , numOfTargets - totalReplicas ) ; } int expectedReplicas = file . getReplication ( ) ; if ( numOfTargets < expectedReplicas || status . decommissionedReplicas != 0 || status . corruptReplicas != 0 ) { LOG . info ( "Add " + block + " of " + src + " to needReplication queue: " + " numOfTargets = " + numOfTargets + " decomissionedReplicas = " + status . decommissionedReplicas + " corruptReplicas = " + status . corruptReplicas ) ; neededReplications . add ( block , status . liveReplicas , status . decommissionedReplicas , expectedReplicas ) ; } // update metrics
if ( numOfTargets < expectedReplicas ) { if ( numOfTargets == 1 ) { myFSMetrics . numNewBlocksWithOneReplica . inc ( ) ; } } else { myFSMetrics . numNewBlocksWithoutFailure . inc ( ) ; } myFSMetrics . numNewBlocks . inc ( ) ; |
public class SdkUtils { /** * Recursively delete a folder and all its subfolders and files .
* @ param f directory to be deleted .
* @ return True if the folder was deleted . */
public static boolean deleteFolderRecursive ( final File f ) { } } | if ( f . isDirectory ( ) ) { File [ ] files = f . listFiles ( ) ; if ( files == null ) { return false ; } for ( File c : files ) { deleteFolderRecursive ( c ) ; } } return f . delete ( ) ; |
public class DeviceInfo { /** * PRS : User agent is checked only from api - 17 */
private String getDefaultBrowserAgent ( Context context ) { } } | String userAgent = "" ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN_MR1 ) { try { userAgent = WebSettings . getDefaultUserAgent ( context ) ; } catch ( Exception ignore ) { // A known Android issue . Webview packages are not accessible while any updates for chrome is in progress .
// https : / / bugs . chromium . org / p / chromium / issues / detail ? id = 506369
} } return userAgent ; |
public class Resolver { /** * syck _ resolver _ use _ types _ at */
@ JRubyMethod public static IRubyObject use_types_at ( IRubyObject self , IRubyObject hsh ) { } } | ( ( RubyObject ) self ) . fastSetInstanceVariable ( "@tags" , hsh ) ; return self . getRuntime ( ) . getNil ( ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcHatchLineDistanceSelect ( ) { } } | if ( ifcHatchLineDistanceSelectEClass == null ) { ifcHatchLineDistanceSelectEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 957 ) ; } return ifcHatchLineDistanceSelectEClass ; |
public class ShardUtils { /** * Retrieve the list of all the reference names and their start = 0 / end positions for the variantSet .
* Note that start is hardcoded to zero since the referenceBounds only include the upper bound .
* @ param variantSetId - The id of the variantSet to query .
* @ param sexChromosomeFilter - An enum value indicating how sex chromosomes should be
* handled in the result .
* @ return The list of all references in the variantSet .
* @ throws IOException */
private static List < Contig > getContigsInVariantSet ( String variantSetId , SexChromosomeFilter sexChromosomeFilter , OfflineAuth auth ) throws IOException { } } | List < Contig > contigs = Lists . newArrayList ( ) ; for ( ReferenceBound bound : GenomicsUtils . getReferenceBounds ( variantSetId , auth ) ) { if ( sexChromosomeFilter == SexChromosomeFilter . EXCLUDE_XY && SEX_CHROMOSOME_REGEXP . matcher ( bound . getReferenceName ( ) ) . matches ( ) ) { // X and Y can skew some analysis results
continue ; } contigs . add ( new Contig ( bound . getReferenceName ( ) , 0 , bound . getUpperBound ( ) ) ) ; } return contigs ; |
public class Job { /** * Looks up a build by its ID .
* @ see LazyBuildMixIn # getBuild */
public RunT getBuild ( String id ) { } } | for ( RunT r : _getRuns ( ) . values ( ) ) { if ( r . getId ( ) . equals ( id ) ) return r ; } return null ; |
public class SnippetsApi { /** * Get a specific snippet as an Optional instance .
* < pre > < code > GitLab Endpoint : GET / snippets / : snippet _ id < / code > < / pre >
* @ param snippetId the ID of the snippet to get the Optional instance for
* @ param downloadContent indicating whether to download the snippet content
* @ return the specified Snippet as an Optional instance */
public Optional < Snippet > getOptionalSnippet ( Integer snippetId , boolean downloadContent ) { } } | try { return ( Optional . ofNullable ( getSnippet ( snippetId , downloadContent ) ) ) ; } catch ( GitLabApiException glae ) { return ( GitLabApi . createOptionalFromException ( glae ) ) ; } |
public class BugInstance { /** * Set value of given property .
* @ param name
* name of the property to set
* @ param value
* the value of the property
* @ return this object , so calls can be chained */
@ Nonnull public BugInstance setProperty ( String name , String value ) { } } | BugProperty prop = lookupProperty ( name ) ; if ( prop != null ) { prop . setValue ( value ) ; } else { prop = new BugProperty ( name , value ) ; addProperty ( prop ) ; } return this ; |
public class TypeRuntimeWiring { /** * This form allows a lambda to be used as the builder
* @ param typeName the name of the type to wire
* @ param builderFunction a function that will be given the builder to use
* @ return the same builder back please */
public static TypeRuntimeWiring newTypeWiring ( String typeName , UnaryOperator < Builder > builderFunction ) { } } | return builderFunction . apply ( newTypeWiring ( typeName ) ) . build ( ) ; |
public class CharSequenceUtil { /** * Trim horizontal whitespace from left side .
* @ param str The string to trim
* @ return str with whitespace from left side trimmed . */
public static CharSequence ltrim ( CharSequence str ) { } } | int len ; int i ; if ( str == null || ( len = str . length ( ) ) <= 0 ) return "" ; for ( i = 0 ; i < len ; i ++ ) if ( ! Character . isWhitespace ( str . charAt ( i ) ) ) break ; if ( i >= len ) return "" ; return str . subSequence ( i , len ) ; |
public class GeneratorXMLDatabaseConnection { /** * Load the XML configuration file .
* @ return Generator */
private GeneratorMain loadXMLSpecification ( ) { } } | try { DocumentBuilderFactory dbf = DocumentBuilderFactory . newInstance ( ) ; dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-external-dtd" , false ) ; URL url = ClassLoader . getSystemResource ( GENERATOR_SCHEMA_FILE ) ; if ( url != null ) { try { Schema schema = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) . newSchema ( url ) ; dbf . setSchema ( schema ) ; dbf . setIgnoringElementContentWhitespace ( true ) ; } catch ( Exception e ) { LOG . warning ( "Could not set up XML Schema validation for specification file." , e ) ; } } else { LOG . warning ( "Could not set up XML Schema validation for specification file." ) ; } Document doc = dbf . newDocumentBuilder ( ) . parse ( specfile ) ; Node root = doc . getDocumentElement ( ) ; if ( TAG_DATASET . equals ( root . getNodeName ( ) ) ) { GeneratorMain gen = new GeneratorMain ( ) ; processElementDataset ( gen , root ) ; return gen ; } else { throw new AbortException ( "Experiment specification has incorrect document element: " + root . getNodeName ( ) ) ; } } catch ( FileNotFoundException e ) { throw new AbortException ( "Can't open specification file." , e ) ; } catch ( SAXException e ) { throw new AbortException ( "Error parsing specification file." , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO Exception loading specification file." , e ) ; } catch ( ParserConfigurationException e ) { throw new AbortException ( "Parser Configuration Error" , e ) ; } |
public class HTMLGen { /** * Use " directive " to handle non - ended HTML tags like < meta . . . > and < link . . . >
* @ param tag
* @ param attrs
* @ return */
public HTMLGen directive ( String tag , String ... attrs ) { } } | forward . append ( '<' ) ; forward . append ( tag ) ; addAttrs ( attrs ) ; forward . append ( '>' ) ; if ( pretty ) { forward . println ( ) ; } return this ; |
public class MappeableRunContainer { /** * Checks whether the run container contains x .
* @ param buf underlying ByteBuffer
* @ param position starting position of the container in the ByteBuffer
* @ param x target 16 - bit value
* @ param numRuns number of runs
* @ return whether the run container contains x */
public static boolean contains ( ByteBuffer buf , int position , short x , final int numRuns ) { } } | int index = bufferedUnsignedInterleavedBinarySearch ( buf , position , 0 , numRuns , x ) ; if ( index >= 0 ) { return true ; } index = - index - 2 ; // points to preceding value , possibly - 1
if ( index != - 1 ) { // possible match
int offset = toIntUnsigned ( x ) - toIntUnsigned ( buf . getShort ( position + index * 2 * 2 ) ) ; int le = toIntUnsigned ( buf . getShort ( position + index * 2 * 2 + 2 ) ) ; if ( offset <= le ) { return true ; } } return false ; |
public class Shape { /** * Return the shape of the largest length array
* based on the input
* @ param inputs the inputs to get the max shape for
* @ return the largest shape based on the inputs */
public static long [ ] getMaxShape ( INDArray ... inputs ) { } } | if ( inputs == null ) return null ; else if ( inputs . length < 2 ) return inputs [ 0 ] . shape ( ) ; else { long [ ] currMax = inputs [ 0 ] . shape ( ) ; for ( int i = 1 ; i < inputs . length ; i ++ ) { if ( inputs [ i ] == null ) { continue ; } if ( ArrayUtil . prod ( currMax ) < inputs [ i ] . length ( ) ) { currMax = inputs [ i ] . shape ( ) ; } } return currMax ; } |
public class ConfigurationParseHelper { /** * Parses a string to recognize exactly either " true " or " false " .
* @ param value the string to be parsed
* @ param errorMsgOnParseFailure the message to be put in the exception if thrown
* @ return true if value is " true " , false if value is " false "
* @ throws SearchException for invalid format or values . */
public static final boolean parseBoolean ( String value , String errorMsgOnParseFailure ) { } } | // avoiding Boolean . valueOf ( ) to have more checks : makes it easy to spot wrong type in cfg .
if ( value == null ) { throw new SearchException ( errorMsgOnParseFailure ) ; } else if ( "false" . equalsIgnoreCase ( value . trim ( ) ) ) { return false ; } else if ( "true" . equalsIgnoreCase ( value . trim ( ) ) ) { return true ; } else { throw new SearchException ( errorMsgOnParseFailure ) ; } |
public class CssImageUrlRewriter { /** * Returns the rewritten image path
* @ param originalCssPath
* the original Css path
* @ param newCssPath
* the new Css path
* @ param url
* the image URL
* @ return the rewritten image path
* @ throws IOException
* if an IOException occurs */
protected String getRewrittenImagePath ( String originalCssPath , String newCssPath , String url ) throws IOException { } } | String imgUrl = null ; // Retrieve the current CSS file from which the CSS image is referenced
boolean generatedImg = false ; if ( binaryRsHandler != null ) { GeneratorRegistry imgRsGeneratorRegistry = binaryRsHandler . getConfig ( ) . getGeneratorRegistry ( ) ; generatedImg = imgRsGeneratorRegistry . isGeneratedBinaryResource ( url ) ; } String fullImgPath = PathNormalizer . concatWebPath ( originalCssPath , url ) ; if ( ! generatedImg ) { // Add image servlet path in the URL , if it ' s defined
if ( StringUtils . isNotEmpty ( binaryServletPath ) ) { fullImgPath = binaryServletPath + JawrConstant . URL_SEPARATOR + fullImgPath ; } imgUrl = PathNormalizer . getRelativeWebPath ( PathNormalizer . getParentPath ( newCssPath ) , fullImgPath ) ; } else { imgUrl = url ; } return imgUrl ; |
public class AbstractConfigurableTemplateResolver { /** * Computes the resource name that will be used for resolving , from the template name and other
* parameters configured at this < em > configurable < / em > resolver .
* This method can be overridden by subclasses that need to modify the standard way in which the
* name of the template resource is computed by default before passing it to the real resource
* resolution mechanism ( in method { @ link # computeTemplateResource ( IEngineConfiguration , String , String , String , String , Map ) }
* By default , the resource name will be created by first applying the < em > template aliases < / em > , and then
* adding < em > prefix < / em > and < em > suffix < / em > to the specified < em > template < / em > ( template name ) .
* @ param configuration the engine configuration in use .
* @ param ownerTemplate the owner template , if the resource being computed is a fragment . Might be null .
* @ param template the template ( normally the template name , except for String templates ) .
* @ param prefix the prefix to be applied .
* @ param suffix the suffix to be applied .
* @ param templateAliases the template aliases map .
* @ param templateResolutionAttributes the template resolution attributes , if any . Might be null .
* @ return the resource name that should be used for resolving
* @ deprecated in 3.0.6 . Use { @ link # computeResourceName ( IEngineConfiguration , String , String , String , String , boolean , Map , Map ) } instead .
* Will be removed in Thymeleaf 3.2. */
@ Deprecated protected String computeResourceName ( final IEngineConfiguration configuration , final String ownerTemplate , final String template , final String prefix , final String suffix , final Map < String , String > templateAliases , final Map < String , Object > templateResolutionAttributes ) { } } | return computeResourceName ( configuration , ownerTemplate , template , prefix , suffix , false , templateAliases , templateResolutionAttributes ) ; |
public class NonBlockingPushbackReader { /** * Reads characters into a portion of an array .
* @ param aBuf
* Destination buffer
* @ param nOfs
* Offset at which to start writing characters
* @ param nLen
* Maximum number of characters to read
* @ return The number of characters read , or - 1 if the end of the stream has
* been reached
* @ exception IOException
* If an I / O error occurs */
@ Override public int read ( @ Nonnull final char [ ] aBuf , @ Nonnegative final int nOfs , @ Nonnegative final int nLen ) throws IOException { } } | ValueEnforcer . isArrayOfsLen ( aBuf , nOfs , nLen ) ; _ensureOpen ( ) ; if ( nLen == 0 ) return 0 ; try { int nRealOfs = nOfs ; int nRealLen = nLen ; int nBufAvail = m_aBuf . length - m_nBufPos ; if ( nBufAvail > 0 ) { if ( nRealLen < nBufAvail ) nBufAvail = nRealLen ; System . arraycopy ( m_aBuf , m_nBufPos , aBuf , nRealOfs , nBufAvail ) ; m_nBufPos += nBufAvail ; nRealOfs += nBufAvail ; nRealLen -= nBufAvail ; } if ( nRealLen > 0 ) { nRealLen = super . read ( aBuf , nRealOfs , nRealLen ) ; if ( nRealLen == - 1 ) return ( nBufAvail == 0 ) ? - 1 : nBufAvail ; return nBufAvail + nRealLen ; } return nBufAvail ; } catch ( final ArrayIndexOutOfBoundsException e ) { throw new IndexOutOfBoundsException ( ) ; } |
public class AVACL { /** * Get whether the given user id is * explicitly * allowed to write this object . Even if this
* returns { @ code false } , the user may still be able to write it if getPublicWriteAccess returns
* { @ code true } or a role that the user belongs to has write access . */
public boolean getWriteAccess ( String userId ) { } } | if ( StringUtil . isEmpty ( userId ) ) { return false ; } Permissions permissions = permissionsById . get ( userId ) ; return permissions != null && permissions . getWritePermission ( ) ; |
public class SnappyServer { /** * Define a Server sent events endpoint without a handler . Supports path variables
* Data can be broadcast to this endpoint by using { @ link io . joshworks . snappy . sse . SseBroadcaster }
* @ param url The relative URL to be map this endpoint . */
public static synchronized void sse ( String url ) { } } | checkStarted ( ) ; instance ( ) . endpoints . add ( HandlerUtil . sse ( url , null ) ) ; |
public class ModbusInputStream { /** * read two bytes in Little Endian Byte Order
* @ return 16 - bit value placed in first two bytes of integer value , second two bytes is equal zero .
* @ throws IOException
* @ see ModbusInputStream # readShortBE ( ) */
public int readShortLE ( ) throws IOException { } } | int l = read ( ) ; int h = read ( ) ; if ( - 1 == h || - 1 == l ) return - 1 ; return DataUtils . toShort ( h , l ) ; |
public class NetworkEnvironment { /** * Batch release intermediate result partitions .
* @ param partitionIds partition ids to release */
public void releasePartitions ( Collection < ResultPartitionID > partitionIds ) { } } | for ( ResultPartitionID partitionId : partitionIds ) { resultPartitionManager . releasePartition ( partitionId , null ) ; } |
public class PreferenceActivity { /** * Sets the string , which should be used to format the progress , which is shown , when the
* activity is used as a wizard .
* @ param progressFormat
* The string , which should be set , as a { @ link String } . The string may neither be null ,
* nor empty . It must be formatted according to the following syntax : " * % d * % d * % s * " */
public final void setProgressFormat ( @ NonNull final String progressFormat ) { } } | Condition . INSTANCE . ensureNotNull ( progressFormat , "The progress format may not be null" ) ; Condition . INSTANCE . ensureNotEmpty ( progressFormat , "The progress format may not be empty" ) ; this . progressFormat = progressFormat ; adaptProgress ( ) ; |
public class IPAddressRange { /** * Is the given ip address numericaly above the address range ?
* Is it above ipHigher ? */
public boolean aboveRange ( InetAddress ip ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "aboveRange, ip is " + ip ) ; Tr . debug ( tc , "aboveRange, ip is " + ip ) ; } return greaterThan ( ip , ipHigher ) ; |
public class AmazonCloudWatchClient { /** * Retrieves the history for the specified alarm . You can filter the results by date range or item type . If an alarm
* name is not specified , the histories for all alarms are returned .
* CloudWatch retains the history of an alarm even if you delete the alarm .
* @ param describeAlarmHistoryRequest
* @ return Result of the DescribeAlarmHistory operation returned by the service .
* @ throws InvalidNextTokenException
* The next token specified is invalid .
* @ sample AmazonCloudWatch . DescribeAlarmHistory
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / monitoring - 2010-08-01 / DescribeAlarmHistory "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeAlarmHistoryResult describeAlarmHistory ( DescribeAlarmHistoryRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeAlarmHistory ( request ) ; |
public class ListTargetsForSecurityProfileResult { /** * The thing groups to which the security profile is attached .
* @ param securityProfileTargets
* The thing groups to which the security profile is attached . */
public void setSecurityProfileTargets ( java . util . Collection < SecurityProfileTarget > securityProfileTargets ) { } } | if ( securityProfileTargets == null ) { this . securityProfileTargets = null ; return ; } this . securityProfileTargets = new java . util . ArrayList < SecurityProfileTarget > ( securityProfileTargets ) ; |
public class MongodbPubSubHub { /** * { @ inheritDoc } */
@ Override public MongodbPubSubHub < ID , DATA > init ( ) { } } | if ( getMongoClient ( ) == null ) { setMongoClient ( buildMongoClient ( ) , true ) ; } super . init ( ) ; if ( getMongoClient ( ) == null ) { throw new IllegalStateException ( "MongoDB Client is null." ) ; } return this ; |
public class WritableColumnVector { /** * Reserve a integer column for ids of dictionary . */
public WritableColumnVector reserveDictionaryIds ( int capacity ) { } } | if ( dictionaryIds == null ) { dictionaryIds = reserveNewColumn ( capacity , DataTypes . IntegerType ) ; } else { dictionaryIds . reset ( ) ; dictionaryIds . reserve ( capacity ) ; } return dictionaryIds ; |
public class Bundler { /** * Inserts an ArrayList < Integer > value into the mapping of the underlying Bundle , replacing any
* existing value for the given key . Either key or value may be null .
* @ param key a String , or null
* @ param value an ArrayList < Integer > object , or null
* @ return this bundler instance to chain method calls */
public Bundler putIntegerArrayList ( String key , ArrayList < Integer > value ) { } } | delegate . putIntegerArrayList ( key , value ) ; return this ; |
public class NativeGlobal { /** * The global method parseInt , as per ECMA - 262 15.1.2.2. */
static Object js_parseInt ( Object [ ] args ) { } } | String s = ScriptRuntime . toString ( args , 0 ) ; int radix = ScriptRuntime . toInt32 ( args , 1 ) ; int len = s . length ( ) ; if ( len == 0 ) return ScriptRuntime . NaNobj ; boolean negative = false ; int start = 0 ; char c ; do { c = s . charAt ( start ) ; if ( ! ScriptRuntime . isStrWhiteSpaceChar ( c ) ) break ; start ++ ; } while ( start < len ) ; if ( c == '+' || ( negative = ( c == '-' ) ) ) start ++ ; final int NO_RADIX = - 1 ; if ( radix == 0 ) { radix = NO_RADIX ; } else if ( radix < 2 || radix > 36 ) { return ScriptRuntime . NaNobj ; } else if ( radix == 16 && len - start > 1 && s . charAt ( start ) == '0' ) { c = s . charAt ( start + 1 ) ; if ( c == 'x' || c == 'X' ) start += 2 ; } if ( radix == NO_RADIX ) { radix = 10 ; if ( len - start > 1 && s . charAt ( start ) == '0' ) { c = s . charAt ( start + 1 ) ; if ( c == 'x' || c == 'X' ) { radix = 16 ; start += 2 ; } else if ( '0' <= c && c <= '9' ) { radix = 8 ; start ++ ; } } } double d = ScriptRuntime . stringPrefixToNumber ( s , start , radix ) ; return ScriptRuntime . wrapNumber ( negative ? - d : d ) ; |
public class MediaType { /** * Parse the given comma - separated string into a list of { @ code MediaType } objects .
* < p > This method can be used to parse an Accept or Content - Type header .
* @ param mediaTypes the string to parse .
* @ return the list of media types .
* @ throws InvalidMediaTypeException if the media type value cannot be parsed . */
public static List < MediaType > parseMediaTypes ( String mediaTypes ) { } } | if ( StringUtils . isEmpty ( mediaTypes ) ) { return Collections . emptyList ( ) ; } String [ ] tokens = StringUtils . tokenizeToStringArray ( mediaTypes , "," ) ; List < MediaType > result = new ArrayList < > ( tokens . length ) ; for ( String token : tokens ) { result . add ( parseMediaType ( token ) ) ; } return result ; |
public class CommerceNotificationTemplatePersistenceImpl { /** * Creates a new commerce notification template with the primary key . Does not add the commerce notification template to the database .
* @ param commerceNotificationTemplateId the primary key for the new commerce notification template
* @ return the new commerce notification template */
@ Override public CommerceNotificationTemplate create ( long commerceNotificationTemplateId ) { } } | CommerceNotificationTemplate commerceNotificationTemplate = new CommerceNotificationTemplateImpl ( ) ; commerceNotificationTemplate . setNew ( true ) ; commerceNotificationTemplate . setPrimaryKey ( commerceNotificationTemplateId ) ; String uuid = PortalUUIDUtil . generate ( ) ; commerceNotificationTemplate . setUuid ( uuid ) ; commerceNotificationTemplate . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return commerceNotificationTemplate ; |
public class CmsExtendedWorkflowManager { /** * Checks that the parent folders of new resources which are released are either not new or are also released . < p >
* @ param userCms the user CMS context
* @ param resources the resources to check
* @ throws CmsException if the check fails */
protected void checkNewParentsInList ( CmsObject userCms , List < CmsResource > resources ) throws CmsException { } } | Map < String , CmsResource > resourcesByPath = new HashMap < String , CmsResource > ( ) ; CmsObject rootCms = OpenCms . initCmsObject ( m_adminCms ) ; rootCms . getRequestContext ( ) . setCurrentProject ( userCms . getRequestContext ( ) . getCurrentProject ( ) ) ; rootCms . getRequestContext ( ) . setSiteRoot ( "" ) ; for ( CmsResource resource : resources ) { resourcesByPath . put ( resource . getRootPath ( ) , resource ) ; } for ( CmsResource resource : resources ) { if ( resource . getState ( ) . isNew ( ) ) { String parentPath = CmsResource . getParentFolder ( resource . getRootPath ( ) ) ; CmsResource parent = resourcesByPath . get ( parentPath ) ; if ( parent == null ) { parent = rootCms . readResource ( parentPath ) ; if ( parent . getState ( ) . isNew ( ) ) { throw new CmsNewParentNotInWorkflowException ( Messages . get ( ) . container ( Messages . ERR_NEW_PARENT_NOT_IN_WORKFLOW_1 , resource . getRootPath ( ) ) ) ; } } } } |
public class ProvisionerImpl { /** * Start all previously installed bundles , but defer ( or not ) the ACTIVATION
* of those bundles based on the Bundle - ActivationPolicy value set in
* MANIFEST . MF .
* @ return BundleStartStatus object containing exceptions encountered while
* starting bundles
* @ see BundleStartStatus */
protected BundleStartStatus startBundles ( List < Bundle > installedBundles ) { } } | BundleStartStatus startStatus = new BundleStartStatus ( ) ; if ( installedBundles == null || installedBundles . size ( ) == 0 ) return startStatus ; for ( Bundle b : installedBundles ) { int state = b . getState ( ) ; // Only start bundles that aren ' t UNINSTALLED , and haven ' t already
// been started
if ( state == Bundle . UNINSTALLED || state >= org . osgi . framework . Bundle . STARTING ) continue ; try { b . start ( Bundle . START_ACTIVATION_POLICY ) ; } catch ( BundleException e ) { startStatus . addStartException ( b , e ) ; } } return startStatus ; |
public class CurrentThreadToServletContext { /** * Uses the FactoryManagerCacheKey system to find the ServletContext
* associated with the current ClassLoader , if any . */
Object getServletContextForCurrentClassLoader ( ) { } } | Object result ; FactoryManagerCacheKey key = new FactoryManagerCacheKey ( null , getClassLoader ( ) , applicationMap ) ; result = key . getContext ( ) ; return result ; |
public class LSrtSupplierBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static LSrtSupplier srtSupplierFrom ( Consumer < LSrtSupplierBuilder > buildingFunction ) { } } | LSrtSupplierBuilder builder = new LSrtSupplierBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class GraphCentricQueryBuilder { /** * Query Construction */
private GraphCentricQueryBuilder has ( String key , TitanPredicate predicate , Object condition ) { } } | Preconditions . checkNotNull ( key ) ; Preconditions . checkNotNull ( predicate ) ; Preconditions . checkArgument ( predicate . isValidCondition ( condition ) , "Invalid condition: %s" , condition ) ; constraints . add ( new PredicateCondition < > ( key , predicate , condition ) ) ; return this ; |
public class ModelsImpl { /** * Get one entity role for a given entity .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param hEntityId The hierarchical entity extractor ID .
* @ param roleId entity role ID .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < EntityRole > getHierarchicalEntityRoleAsync ( UUID appId , String versionId , UUID hEntityId , UUID roleId , final ServiceCallback < EntityRole > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getHierarchicalEntityRoleWithServiceResponseAsync ( appId , versionId , hEntityId , roleId ) , serviceCallback ) ; |
public class Sobel { /** * Expects a height mat as input
* @ param input - A grayscale height map
* @ return edges */
@ Override public ImageSource apply ( ImageSource input ) { } } | final int [ ] [ ] pixelMatrix = new int [ 3 ] [ 3 ] ; int w = input . getWidth ( ) ; int h = input . getHeight ( ) ; int [ ] [ ] output = new int [ h ] [ w ] ; for ( int j = 1 ; j < h - 1 ; j ++ ) { for ( int i = 1 ; i < w - 1 ; i ++ ) { pixelMatrix [ 0 ] [ 0 ] = input . getR ( i - 1 , j - 1 ) ; pixelMatrix [ 0 ] [ 1 ] = input . getRGB ( i - 1 , j ) ; pixelMatrix [ 0 ] [ 2 ] = input . getRGB ( i - 1 , j + 1 ) ; pixelMatrix [ 1 ] [ 0 ] = input . getRGB ( i , j - 1 ) ; pixelMatrix [ 1 ] [ 2 ] = input . getRGB ( i , j + 1 ) ; pixelMatrix [ 2 ] [ 0 ] = input . getRGB ( i + 1 , j - 1 ) ; pixelMatrix [ 2 ] [ 1 ] = input . getRGB ( i + 1 , j ) ; pixelMatrix [ 2 ] [ 2 ] = input . getRGB ( i + 1 , j + 1 ) ; int edge = ( int ) convolution ( pixelMatrix ) ; int rgb = ( edge << 16 | edge << 8 | edge ) ; output [ j ] [ i ] = rgb ; } } MatrixSource source = new MatrixSource ( output ) ; return source ; |
public class BaseDatabase { /** * Init this database and add it to the databaseOwner .
* @ param databaseOwner My databaseOwner .
* @ param iDatabaseType The database type ( LOCAL / REMOTE ) .
* @ param properties Initial database properties
* @ param strDBName The database name . */
public void init ( DatabaseOwner databaseOwner , String strDbName , int iDatabaseType , Map < String , Object > properties ) { } } | m_databaseOwner = databaseOwner ; m_strDbName = strDbName ; m_iDatabaseType = iDatabaseType ; m_vTableList = new Vector < BaseTable > ( ) ; m_iMasterSlave = RecordOwner . MASTER | RecordOwner . SLAVE ; m_bAutosequenceSupport = true ; if ( m_properties == null ) m_properties = new Hashtable < String , Object > ( ) ; if ( properties != null ) m_properties . putAll ( properties ) ; m_databaseOwner . addDatabase ( this ) ; |
public class AbstractRedisStorage { /** * Pause a job by pausing all of its triggers
* @ param jobKey the key of the job to be paused
* @ param jedis a thread - safe Redis connection */
public void pauseJob ( JobKey jobKey , T jedis ) throws JobPersistenceException { } } | for ( OperableTrigger trigger : getTriggersForJob ( jobKey , jedis ) ) { pauseTrigger ( trigger . getKey ( ) , jedis ) ; } |
public class JsonWriter { /** * Reset the state of the writer and flush already written content . */
protected void reset ( ) { } } | writer . flush ( ) ; stack . clear ( ) ; context = new JsonContext ( JsonContext . Mode . VALUE ) ; |
public class DateUtils { /** * < p > Returns the number of seconds within the
* fragment . All datefields greater than the fragment will be ignored . < / p >
* < p > Asking the seconds of any date will only return the number of seconds
* of the current minute ( resulting in a number between 0 and 59 ) . This
* method will retrieve the number of seconds for any fragment .
* For example , if you want to calculate the number of seconds past today ,
* your fragment is Calendar . DATE or Calendar . DAY _ OF _ YEAR . The result will
* be all seconds of the past hour ( s ) and minutes ( s ) . < / p >
* < p > Valid fragments are : Calendar . YEAR , Calendar . MONTH , both
* Calendar . DAY _ OF _ YEAR and Calendar . DATE , Calendar . HOUR _ OF _ DAY ,
* Calendar . MINUTE , Calendar . SECOND and Calendar . MILLISECOND
* A fragment less than or equal to a SECOND field will return 0 . < / p >
* < ul >
* < li > January 1 , 2008 7:15:10.538 with Calendar . MINUTE as fragment will return 10
* ( equivalent to calendar . get ( Calendar . SECOND ) ) < / li >
* < li > January 6 , 2008 7:15:10.538 with Calendar . MINUTE as fragment will return 10
* ( equivalent to calendar . get ( Calendar . SECOND ) ) < / li >
* < li > January 6 , 2008 7:15:10.538 with Calendar . DAY _ OF _ YEAR as fragment will return 26110
* ( 7*3600 + 15*60 + 10 ) < / li >
* < li > January 16 , 2008 7:15:10.538 with Calendar . MILLISECOND as fragment will return 0
* ( a millisecond cannot be split in seconds ) < / li >
* < / ul >
* @ param calendar the calendar to work with , not null
* @ param fragment the { @ code Calendar } field part of calendar to calculate
* @ return number of seconds within the fragment of date
* @ throws IllegalArgumentException if the date is < code > null < / code > or
* fragment is not supported
* @ since 2.4 */
@ GwtIncompatible ( "incompatible method" ) public static long getFragmentInSeconds ( final Calendar calendar , final int fragment ) { } } | return getFragment ( calendar , fragment , TimeUnit . SECONDS ) ; |
public class GuiText { /** * Sets the font options to use to render .
* @ param fontOptions the new font options */
public void setFontOptions ( FontOptions fontOptions ) { } } | checkNotNull ( fontOptions ) ; buildLines = this . fontOptions . isBold ( ) != fontOptions . isBold ( ) || this . fontOptions . getFontScale ( ) != fontOptions . getFontScale ( ) ; this . fontOptions = fontOptions ; |
public class Hash { /** * private static final int DENSITY = 5; */
protected final void checkResize ( ) { } } | if ( size == table . length ) { // size / table . length > DENSITY
int forSize = table . length + 1 ; // size + 1;
for ( int i = 0 , newCapacity = MIN_CAPA ; i < PRIMES . length ; i ++ , newCapacity <<= 1 ) { if ( newCapacity > forSize ) { resize ( PRIMES [ i ] ) ; return ; } } return ; } |
public class CmsDialogElements { /** * Updates the enabled / diabled status of all elements of the current page . < p >
* @ throws JspException if there is an error including the error page */
public void actionUpdateElements ( ) throws JspException { } } | try { List < CmsDialogElement > elementList = computeElements ( ) ; CmsFile file = getCms ( ) . readFile ( getParamTempfile ( ) , CmsResourceFilter . IGNORE_EXPIRATION ) ; CmsXmlPage page = CmsXmlPageFactory . unmarshal ( getCms ( ) , file ) ; boolean foundMandatory = false ; m_changeElement = "" ; Iterator < CmsDialogElement > i = elementList . iterator ( ) ; while ( i . hasNext ( ) ) { // get the current list element
CmsDialogElement element = i . next ( ) ; if ( element . isMandantory ( ) || element . getName ( ) . equals ( getParamElementname ( ) ) || Boolean . valueOf ( getJsp ( ) . getRequest ( ) . getParameter ( PREFIX_PARAM_BODY + element . getName ( ) ) ) . booleanValue ( ) ) { if ( ! element . isExisting ( ) ) { // create element in order to enable it properly
page . addValue ( element . getName ( ) , getElementLocale ( ) ) ; } page . setEnabled ( element . getName ( ) , getElementLocale ( ) , true ) ; if ( element . isMandantory ( ) && ! foundMandatory ) { m_changeElement = element . getName ( ) ; foundMandatory = true ; } } else { if ( element . isExisting ( ) ) { // remove element if it is already existing
page . removeValue ( element . getName ( ) , getElementLocale ( ) ) ; } } } // write the temporary file
file . setContents ( page . marshal ( ) ) ; getCms ( ) . writeFile ( file ) ; // set the javascript functions which should be executed
if ( page . isEnabled ( getParamElementname ( ) , getElementLocale ( ) ) ) { m_changeElement = getParamElementname ( ) ; } else if ( ! foundMandatory ) { if ( elementList . size ( ) > 0 ) { m_changeElement = elementList . get ( 0 ) . getName ( ) ; } } } catch ( Throwable e ) { // show error dialog
setParamMessage ( Messages . get ( ) . getBundle ( getLocale ( ) ) . key ( Messages . ERR_UPDATE_ELEMENTS_0 ) ) ; includeErrorpage ( this , e ) ; } |
public class CassandraSchemaManager { /** * Adds column to table if not exists previously i . e . alter table .
* @ param tableInfo
* the table info
* @ param column
* the column
* @ throws Exception
* the exception */
private void addColumnToTable ( TableInfo tableInfo , ColumnInfo column ) throws Exception { } } | CQLTranslator translator = new CQLTranslator ( ) ; StringBuilder addColumnQuery = new StringBuilder ( "ALTER TABLE " ) ; translator . ensureCase ( addColumnQuery , tableInfo . getTableName ( ) , false ) ; addColumnQuery . append ( " ADD " ) ; translator . ensureCase ( addColumnQuery , column . getColumnName ( ) , false ) ; addColumnQuery . append ( " " + translator . getCQLType ( CassandraValidationClassMapper . getValidationClass ( column . getType ( ) , isCql3Enabled ( tableInfo ) ) ) ) ; try { KunderaCoreUtils . printQuery ( addColumnQuery . toString ( ) , showQuery ) ; cassandra_client . execute_cql3_query ( ByteBuffer . wrap ( addColumnQuery . toString ( ) . getBytes ( ) ) , Compression . NONE , ConsistencyLevel . ONE ) ; } catch ( InvalidRequestException ireforAddColumn ) { StringBuilder ireforAddColumnbBuilder = new StringBuilder ( "Invalid column name " ) ; ireforAddColumnbBuilder . append ( column . getColumnName ( ) + " because it conflicts with an existing column" ) ; if ( ireforAddColumn . getWhy ( ) != null && ireforAddColumn . getWhy ( ) . equals ( ireforAddColumnbBuilder . toString ( ) ) ) { // alterColumnType ( tableInfo , translator , column ) ;
} else { log . error ( "Error occurred while altering column type of table {}, Caused by: ." , tableInfo . getTableName ( ) , ireforAddColumn ) ; throw new SchemaGenerationException ( "Error occurred while adding column into table " + tableInfo . getTableName ( ) , ireforAddColumn , "Cassandra" , databaseName ) ; } } |
public class PulldownFieldEditor { /** * Add the specified object as a choice . The name will be the
* toString ( ) of the object . */
public void addChoice ( Object choice ) { } } | String name = ( choice == null ) ? "null" : choice . toString ( ) ; addChoice ( new Choice ( name , choice ) ) ; |
public class AbstractSqlBuilder { /** * 设置long类型参数 .
* @ param fieldName 参数名
* @ param value 参数值 */
public void setLong ( String fieldName , Long value ) { } } | if ( value == null ) { throw new IllegalArgumentException ( "参数值[" + fieldName + "]不能为NULL." ) ; } fieldList . add ( fieldName ) ; statementParameter . setLong ( value ) ; |
public class TextWriterStream { /** * Print data into the inline part of the file . Data is sanitized : newlines
* are replaced with spaces , and text containing separators is put in quotes .
* Quotes and escape characters are escaped .
* @ param o object to print */
public void inlinePrint ( Object o ) { } } | if ( inline . length ( ) > 0 ) { inline . append ( SEPARATOR ) ; } // remove newlines
String str = o . toString ( ) . replace ( NEWLINE , " " ) ; // escaping
str = str . replace ( "\\" , "\\\\" ) . replace ( "\"" , "\\\"" ) ; // when needed , add quotes .
if ( str . contains ( SEPARATOR ) ) { inline . append ( '"' ) . append ( str ) . append ( '"' ) ; } else { inline . append ( str ) ; } |
public class NodeTraversingQueryHits { /** * { @ inheritDoc } */
public ScoreNode nextScoreNode ( ) throws IOException { } } | if ( nodes . hasNext ( ) ) { NodeImpl n = ( NodeImpl ) nodes . next ( ) ; return new ScoreNode ( n . getData ( ) . getIdentifier ( ) , 1.0f ) ; } else { return null ; } |
public class SQLiteProgram { /** * Given an array of String bindArgs , this method binds all of them in one single call .
* @ param bindArgs the String array of bind args , none of which must be null . */
public void bindAllArgsAsStrings ( String [ ] bindArgs ) { } } | if ( bindArgs != null ) { for ( int i = bindArgs . length ; i != 0 ; i -- ) { bindString ( i , bindArgs [ i - 1 ] ) ; } } |
public class TurfMeasurement { /** * Takes an arbitrary { @ link Geometry } and calculates a bounding box .
* @ param geometry a { @ link Geometry } object
* @ return a double array defining the bounding box in this order { @ code [ minX , minY , maxX , maxY ] }
* @ since 2.0.0 */
public static double [ ] bbox ( Geometry geometry ) { } } | if ( geometry instanceof Point ) { return bbox ( ( Point ) geometry ) ; } else if ( geometry instanceof MultiPoint ) { return bbox ( ( MultiPoint ) geometry ) ; } else if ( geometry instanceof LineString ) { return bbox ( ( LineString ) geometry ) ; } else if ( geometry instanceof MultiLineString ) { return bbox ( ( MultiLineString ) geometry ) ; } else if ( geometry instanceof Polygon ) { return bbox ( ( Polygon ) geometry ) ; } else if ( geometry instanceof MultiPolygon ) { return bbox ( ( MultiPolygon ) geometry ) ; } else if ( geometry instanceof GeometryCollection ) { List < Point > points = new ArrayList < > ( ) ; for ( Geometry geo : ( ( GeometryCollection ) geometry ) . geometries ( ) ) { // recursive
double [ ] bbox = bbox ( geo ) ; points . add ( Point . fromLngLat ( bbox [ 0 ] , bbox [ 1 ] ) ) ; points . add ( Point . fromLngLat ( bbox [ 2 ] , bbox [ 1 ] ) ) ; points . add ( Point . fromLngLat ( bbox [ 2 ] , bbox [ 3 ] ) ) ; points . add ( Point . fromLngLat ( bbox [ 0 ] , bbox [ 3 ] ) ) ; } return TurfMeasurement . bbox ( MultiPoint . fromLngLats ( points ) ) ; } else { throw new RuntimeException ( ( "Unknown geometry class: " + geometry . getClass ( ) ) ) ; } |
public class DataSinkNode { @ Override public void computeUnclosedBranchStack ( ) { } } | if ( this . openBranches != null ) { return ; } // we need to track open branches even in the sinks , because they get " closed " when
// we build a single " root " for the data flow plan
addClosedBranches ( getPredecessorNode ( ) . closedBranchingNodes ) ; this . openBranches = getPredecessorNode ( ) . getBranchesForParent ( this . input ) ; |
public class ARCReader { /** * Create new arc record .
* Encapsulate housekeeping that has to do w / creating a new record .
* < p > Call this method at end of constructor to read in the
* arcfile header . Will be problems reading subsequent arc records
* if you don ' t since arcfile header has the list of metadata fields for
* all records that follow .
* < p > When parsing through ARCs writing out CDX info , we spend about
* 38 % of CPU in here - - about 30 % of which is in getTokenizedHeaderLine
* - - of which 16 % is reading .
* @ param is InputStream to use .
* @ param offset Absolute offset into arc file .
* @ return An arc record .
* @ throws IOException */
protected ARCRecord createArchiveRecord ( InputStream is , long offset ) throws IOException { } } | try { String version = super . getVersion ( ) ; ARCRecord record = new ARCRecord ( is , getReaderIdentifier ( ) , offset , isDigest ( ) , isStrict ( ) , isParseHttpHeaders ( ) , isAlignedOnFirstRecord ( ) , version ) ; if ( version != null && super . getVersion ( ) == null ) super . setVersion ( version ) ; currentRecord ( record ) ; } catch ( IOException e ) { if ( e instanceof RecoverableIOException ) { // Don ' t mess with RecoverableIOExceptions . Let them out .
throw e ; } IOException newE = new IOException ( e . getMessage ( ) + " (Offset " + offset + ")." ) ; newE . setStackTrace ( e . getStackTrace ( ) ) ; throw newE ; } return ( ARCRecord ) getCurrentRecord ( ) ; |
public class Call { /** * Return an xml representation of the method call as specified in
* http : / / www . xmlrpc . com / spec . If flags have been set in the XMLRPCClient
* the returning xml does not comply strict to the standard .
* @ return The string of the xml representing this call .
* @ throws XMLRPCException Will be thrown whenever the xml representation cannot
* be build without errors .
* @ see XMLRPCClient */
public String getXML ( boolean debugMode ) throws XMLRPCException { } } | SimpleXMLCreator creator = new SimpleXMLCreator ( ) ; XmlElement methodCall = new XmlElement ( XMLRPCClient . METHOD_CALL ) ; creator . setRootElement ( methodCall ) ; XmlElement methodName = new XmlElement ( XMLRPCClient . METHOD_NAME ) ; methodName . setContent ( method ) ; methodCall . addChildren ( methodName ) ; if ( params != null && params . length > 0 ) { XmlElement callParams = new XmlElement ( XMLRPCClient . PARAMS ) ; methodCall . addChildren ( callParams ) ; for ( Object o : this . params ) { callParams . addChildren ( getXMLParam ( o ) ) ; } } String result = creator . toString ( ) ; if ( debugMode ) { System . out . println ( result ) ; } return result ; |
public class SynchroData { /** * Read data for a single table and store it .
* @ param is input stream
* @ param table table header */
private void readTable ( InputStream is , SynchroTable table ) throws IOException { } } | int skip = table . getOffset ( ) - m_offset ; if ( skip != 0 ) { StreamHelper . skip ( is , skip ) ; m_offset += skip ; } String tableName = DatatypeConverter . getString ( is ) ; int tableNameLength = 2 + tableName . length ( ) ; m_offset += tableNameLength ; int dataLength ; if ( table . getLength ( ) == - 1 ) { dataLength = is . available ( ) ; } else { dataLength = table . getLength ( ) - tableNameLength ; } SynchroLogger . log ( "READ" , tableName ) ; byte [ ] compressedTableData = new byte [ dataLength ] ; is . read ( compressedTableData ) ; m_offset += dataLength ; Inflater inflater = new Inflater ( ) ; inflater . setInput ( compressedTableData ) ; ByteArrayOutputStream outputStream = new ByteArrayOutputStream ( compressedTableData . length ) ; byte [ ] buffer = new byte [ 1024 ] ; while ( ! inflater . finished ( ) ) { int count ; try { count = inflater . inflate ( buffer ) ; } catch ( DataFormatException ex ) { throw new IOException ( ex ) ; } outputStream . write ( buffer , 0 , count ) ; } outputStream . close ( ) ; byte [ ] uncompressedTableData = outputStream . toByteArray ( ) ; SynchroLogger . log ( uncompressedTableData ) ; m_tableData . put ( table . getName ( ) , uncompressedTableData ) ; |
public class TokensApi { /** * Refresh Token
* Refresh Token
* @ param grantType Grant Type . ( required )
* @ param refreshToken Refresh Token . ( required )
* @ return ApiResponse & lt ; RefreshTokenResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < RefreshTokenResponse > refreshTokenWithHttpInfo ( String grantType , String refreshToken ) throws ApiException { } } | com . squareup . okhttp . Call call = refreshTokenValidateBeforeCall ( grantType , refreshToken , null , null ) ; Type localVarReturnType = new TypeToken < RefreshTokenResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class Project { /** * Return an exact copy of this Project . */
public Project duplicate ( ) { } } | Project dup = new Project ( ) ; dup . currentWorkingDirectoryList . addAll ( this . currentWorkingDirectoryList ) ; dup . projectName = this . projectName ; dup . analysisTargets . addAll ( this . analysisTargets ) ; dup . srcDirList . addAll ( this . srcDirList ) ; dup . auxClasspathEntryList . addAll ( this . auxClasspathEntryList ) ; dup . timestampForAnalyzedClasses = timestampForAnalyzedClasses ; dup . guiCallback = guiCallback ; return dup ; |
public class ModelsImpl { /** * Get one entity role for a given entity .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId entity ID .
* @ param roleId entity role ID .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < EntityRole > getClosedListEntityRoleAsync ( UUID appId , String versionId , UUID entityId , UUID roleId , final ServiceCallback < EntityRole > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getClosedListEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId ) , serviceCallback ) ; |
public class TransactionLogger { /** * Log details of component processing ( including processing time ) to debug for current instance
* @ param type - of component
* @ param details - of component processing */
public static void logComponent ( final String type , final String details ) { } } | TransactionLogger instance = getInstance ( ) ; logComponent ( type , details , instance ) ; |
public class RubyDependencyResolver { /** * in such cases , look for the relevant gem file in the cache with the highest version */
private String findGemVersion ( String gemName , String pathToGems ) { } } | String version = null ; File maxVersionFile = findMaxVersionFile ( gemName , pathToGems ) ; if ( maxVersionFile != null ) { String fileName = maxVersionFile . getName ( ) ; version = getVersionFromFileName ( fileName , gemName ) ; } return version ; |
public class DrizzleResultSet { /** * Retrieves the value of the designated column in the current row of this < code > ResultSet < / code > object as a stream
* of uninterpreted bytes . The value can then be read in chunks from the stream . This method is particularly
* suitable for retrieving large < code > LONGVARBINARY < / code > values .
* < P > < B > Note : < / B > All the data in the returned stream must be read prior to getting the value of any other column .
* The next call to a getter method implicitly closes the stream . Also , a stream may return < code > 0 < / code > when the
* method < code > InputStream . available < / code > is called whether there is data available or not .
* @ param columnIndex the first column is 1 , the second is 2 , . . .
* @ return a Java input stream that delivers the database column value as a stream of uninterpreted bytes ; if the
* value is SQL < code > NULL < / code > , the value returned is < code > null < / code >
* @ throws java . sql . SQLException if the columnIndex is not valid ; if a database access error occurs or this method
* is called on a closed result set */
public InputStream getBinaryStream ( final int columnIndex ) throws SQLException { } } | if ( protocol . supportsPBMS ( ) ) { try { return getValueObject ( columnIndex ) . getPBMSStream ( protocol ) ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; } catch ( IOException e ) { throw SQLExceptionMapper . getSQLException ( "Could not read back the data using http" , e ) ; } } return getValueObject ( columnIndex ) . getBinaryInputStream ( ) ; |
public class WebAppDescriptorImpl { /** * If not already created , a new < code > service - ref < / code > element will be created and returned .
* Otherwise , the first existing < code > service - ref < / code > element will be returned .
* @ return the instance defined for the element < code > service - ref < / code > */
public ServiceRefType < WebAppDescriptor > getOrCreateServiceRef ( ) { } } | List < Node > nodeList = model . get ( "service-ref" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new ServiceRefTypeImpl < WebAppDescriptor > ( this , "service-ref" , model , nodeList . get ( 0 ) ) ; } return createServiceRef ( ) ; |
public class CachingResourceLoaderImpl { /** * / * ( non - Javadoc )
* @ see org . apereo . portal . utils . cache . CachingResourceLoader # getResource ( org . springframework . core . io . Resource , org . apereo . portal . utils . cache . ResourceBuilder , org . apereo . portal . utils . cache . ResourceLoaderOptions ) */
@ Override public < T > CachedResource < T > getResource ( Resource resource , Loader < T > builder , long checkInterval ) throws IOException { } } | if ( Included . PLAIN == this . resourcesElementsProvider . getDefaultIncludedType ( ) ) { this . logger . trace ( "Resoure Aggregation Disabled, ignoring resource cache and loading '" + resource + "' directly" ) ; return this . loadResource ( resource , builder ) ; } // Look for the resource in the cache , since it has been wrapped with a SelfPopulatingCache
// it should never return null .
final GetResourceArguments < T > arguments = new GetResourceArguments < T > ( resource , builder ) ; final Element element = this . entryFactory . getWithData ( this . resourceCache , resource , arguments ) ; CachedResource < T > cachedResource = ( CachedResource < T > ) element . getObjectValue ( ) ; if ( this . logger . isTraceEnabled ( ) ) { this . logger . trace ( "Found " + cachedResource + " in cache" ) ; } // Found it , now check if the last - load time is within the check interval
final long lastCheckTime = cachedResource . getLastCheckTime ( ) ; if ( lastCheckTime + checkInterval >= System . currentTimeMillis ( ) ) { if ( this . logger . isTraceEnabled ( ) ) { this . logger . trace ( cachedResource + " is within checkInterval " + checkInterval + ", returning" ) ; } return cachedResource ; } if ( this . logger . isTraceEnabled ( ) ) { this . logger . trace ( cachedResource + " is older than checkInterval " + checkInterval + ", checking for modification" ) ; } // If the resource has not been modified return the cached resource .
final boolean resourceModified = this . checkIfModified ( cachedResource ) ; if ( ! resourceModified ) { cachedResource . setLastCheckTime ( System . currentTimeMillis ( ) ) ; this . resourceCache . put ( element ) ; // do a cache put to notify the cache the object has been modified
return cachedResource ; } // The resource has been modified , reload it .
cachedResource = this . loadResource ( resource , builder ) ; // Cache the loaded resource
this . resourceCache . put ( new Element ( resource , cachedResource ) ) ; if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Loaded and cached " + cachedResource ) ; } return cachedResource ; |
public class TransactionTagSupport { /** * Rollbacks the transaction and rethrows the Throwable . */
public void doCatch ( Throwable t ) throws Throwable { } } | if ( conn != null ) { try { conn . rollback ( ) ; } catch ( SQLException e ) { // Ignore to not hide orignal exception
} } throw t ; |
public class RandomCompat { /** * Returns a stream producing the given { @ code streamSize } number of
* pseudorandom { @ code int } values .
* < p > A pseudorandom { @ code int } value is generated as if it ' s the result of
* calling the method { @ link Random # nextInt ( ) }
* @ param streamSize the number of values to generate
* @ return a stream of pseudorandom { @ code int } values
* @ throws IllegalArgumentException if { @ code streamSize } is
* less than zero */
@ NotNull public IntStream ints ( long streamSize ) { } } | if ( streamSize < 0L ) throw new IllegalArgumentException ( ) ; if ( streamSize == 0L ) { return IntStream . empty ( ) ; } return ints ( ) . limit ( streamSize ) ; |
public class MasterWorkerInfo { /** * Adds new worker lost storage paths .
* @ param lostStorage the lost storage to add */
public void addLostStorage ( Map < String , StorageList > lostStorage ) { } } | for ( Map . Entry < String , StorageList > entry : lostStorage . entrySet ( ) ) { List < String > paths = mLostStorage . getOrDefault ( entry . getKey ( ) , new ArrayList < > ( ) ) ; paths . addAll ( entry . getValue ( ) . getStorageList ( ) ) ; mLostStorage . put ( entry . getKey ( ) , paths ) ; } |
public class RegisteredResources { /** * Informs the caller if a single 1PC CAPABLE resource is enlisted in this unit of work . */
public boolean isOnlyAgent ( ) { } } | final boolean result = ( _resourceObjects . size ( ) == 1 && _resourceObjects . get ( 0 ) instanceof ResourceSupportsOnePhaseCommit ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "isOnlyAgent" , result ) ; return result ; |
public class Application { /** * Get the path to the base servlet .
* NOTE : This DOES NOT get the servlet path , it gets the path up to the servlet .
* ie . , if the servlet is http : / / www . xyz . com : 3433 / abc / tourgeek I return http : / / www . xyz . com : 3433 / abc .
* This is useful for accessing the proxy or report generator .
* @ return The servlet path ( NOT including the trailing ' / ' . */
public String getBaseServletPath ( ) { } } | URL urlCodeBase = this . getCodeBase ( null ) ; String strCodeBase = urlCodeBase . toString ( ) ; if ( strCodeBase != null ) if ( strCodeBase . endsWith ( "/" ) ) strCodeBase = strCodeBase . substring ( 0 , strCodeBase . length ( ) - 1 ) ; return strCodeBase ; |
public class OperationsClient { /** * Lists operations that match the specified filter in the request . If the server doesn ' t support
* this method , it returns ` UNIMPLEMENTED ` .
* < p > NOTE : the ` name ` binding below allows API services to override the binding to use different
* resource name schemes , such as ` users / & # 42 ; / operations ` .
* < p > Sample code :
* < pre > < code >
* try ( OperationsClient operationsClient = OperationsClient . create ( ) ) {
* String name = " " ;
* String filter = " " ;
* for ( Operation element : operationsClient . listOperations ( name , filter ) . iterateAll ( ) ) {
* / / doThingsWith ( element ) ;
* < / code > < / pre >
* @ param name The name of the operation collection .
* @ param filter The standard list filter .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final ListOperationsPagedResponse listOperations ( String name , String filter ) { } } | ListOperationsRequest request = ListOperationsRequest . newBuilder ( ) . setName ( name ) . setFilter ( filter ) . build ( ) ; return listOperations ( request ) ; |
public class FraggleManager { /** * Peeks the last fragment in the Fragment stack .
* @ return Last Fragment in the fragment stack
* @ throws java . lang . NullPointerException if there is no Fragment Added */
protected FraggleFragment peek ( ) { } } | if ( fm . getBackStackEntryCount ( ) > 0 ) { return ( ( FraggleFragment ) fm . findFragmentByTag ( fm . getBackStackEntryAt ( fm . getBackStackEntryCount ( ) - 1 ) . getName ( ) ) ) ; } else { return new EmptyFragment ( ) ; } |
public class UriProcessor { /** * < p > Accepts the { @ link InvocationContext } along with the { @ link HttpRequestBase } and forms
* the complete request URI by appending the request subpath to the root path defined on the
* endpoint . Any placeholders in the URI are replaced with their matching path parameters found
* in the request arguments annotated with @ { @ link PathParam } . < / p >
* < p > Any processors which extract information from the < i > complete < / i > request URI or those which
* seek to manipulate the URI should use this processor as a prerequisite . < / p >
* < p > See { @ link AbstractRequestProcessor # process ( InvocationContext , HttpRequestBase ) } . < / p >
* @ param context
* the { @ link InvocationContext } used to discover root and subpath information
* < br > < br >
* @ param request
* the { @ link HttpRequestBase } whose URI will be initialized to the complete URI formulated
* using the endpoint ' s root path , the request ' s subpath and any path parameters
* < br > < br >
* @ return the same instance of { @ link HttpRequestBase } which was given for processing the URI
* < br > < br >
* @ throws RequestProcessorException
* if a URI failed to be created using the information found on the endpoint definition
* < br > < br >
* @ since 1.3.0 */
@ Override protected HttpRequestBase process ( InvocationContext context , HttpRequestBase request ) { } } | try { Endpoint endpoint = context . getEndpoint ( ) . getAnnotation ( Endpoint . class ) ; String path = endpoint . value ( ) + Metadata . findPath ( context . getRequest ( ) ) ; List < Entry < PathParam , Object > > pathParams = Metadata . onParams ( PathParam . class , context ) ; for ( Entry < PathParam , Object > entry : pathParams ) { String name = entry . getKey ( ) . value ( ) ; Object value = entry . getValue ( ) ; if ( ! ( value instanceof CharSequence ) ) { StringBuilder errorContext = new StringBuilder ( ) . append ( "Path parameters can only be of type " ) . append ( CharSequence . class . getName ( ) ) . append ( ". Please consider implementing CharSequence " ) . append ( "and providing a meaningful toString() representation for the " ) . append ( "<name> of the path parameter. " ) ; throw new RequestProcessorException ( new IllegalArgumentException ( errorContext . toString ( ) ) ) ; } path = path . replaceAll ( Pattern . quote ( "{" + name + "}" ) , ( ( CharSequence ) value ) . toString ( ) ) ; } request . setURI ( URI . create ( path ) ) ; return request ; } catch ( Exception e ) { throw new RequestProcessorException ( context , getClass ( ) , e ) ; } |
public class CmsRequestUtil { /** * Parses a request of the form < code > multipart / form - data < / code > .
* The result list will contain items of type < code > { @ link FileItem } < / code > .
* If the request is not of type < code > multipart / form - data < / code > , then < code > null < / code > is returned . < p >
* @ param request the HTTP servlet request to parse
* @ return the list of < code > { @ link FileItem } < / code > extracted from the multipart request ,
* or < code > null < / code > if the request was not of type < code > multipart / form - data < / code > */
public static List < FileItem > readMultipartFileItems ( HttpServletRequest request ) { } } | return readMultipartFileItems ( request , OpenCms . getSystemInfo ( ) . getPackagesRfsPath ( ) ) ; |
public class FacebookDialog { /** * Determines whether the version of the Facebook application installed on the user ' s device is recent
* enough to support specific features of the native Open Graph action dialog , which in turn may be used to
* determine which UI , etc . , to present to the user .
* @ param context the calling Context
* @ param features zero or more features to check for ; { @ link OpenGraphActionDialogFeature # OG _ ACTION _ DIALOG } is implicitly
* checked if not explicitly specified
* @ return true if all of the specified features are supported by the currently installed version of the
* Facebook application ; false if any of the features are not supported */
public static boolean canPresentOpenGraphActionDialog ( Context context , OpenGraphActionDialogFeature ... features ) { } } | return handleCanPresent ( context , EnumSet . of ( OpenGraphActionDialogFeature . OG_ACTION_DIALOG , features ) ) ; |
public class FilterUtil { /** * Find the first " label - like " column ( matching { @ link TypeUtil # GUESSED _ LABEL }
* ) in a type information .
* @ param meta Meta data
* @ return Column number , or { @ code - 1 } . */
public static int findLabelColumn ( BundleMeta meta ) { } } | for ( int i = 0 ; i < meta . size ( ) ; i ++ ) { if ( TypeUtil . GUESSED_LABEL . isAssignableFromType ( meta . get ( i ) ) ) { return i ; } } return - 1 ; |
public class CommerceDiscountUsageEntryUtil { /** * Returns a range of all the commerce discount usage entries where groupId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceDiscountUsageEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param start the lower bound of the range of commerce discount usage entries
* @ param end the upper bound of the range of commerce discount usage entries ( not inclusive )
* @ return the range of matching commerce discount usage entries */
public static List < CommerceDiscountUsageEntry > findByGroupId ( long groupId , int start , int end ) { } } | return getPersistence ( ) . findByGroupId ( groupId , start , end ) ; |
public class Attribute { /** * This method writes the 16 bit name constant index followed by the
* 32 bit attribute length , followed by the attribute specific data . */
public final void writeTo ( DataOutput dout ) throws IOException { } } | dout . writeShort ( mNameConstant . getIndex ( ) ) ; dout . writeInt ( getLength ( ) ) ; writeDataTo ( dout ) ; |
public class ReactionSetManipulator { /** * Get all Reactions object containing a Molecule from a set of Reactions .
* @ param reactSet The set of reaction to inspect
* @ param molecule The molecule to find
* @ return The IReactionSet */
public static IReactionSet getRelevantReactions ( IReactionSet reactSet , IAtomContainer molecule ) { } } | IReactionSet newReactSet = reactSet . getBuilder ( ) . newInstance ( IReactionSet . class ) ; IReactionSet reactSetProd = getRelevantReactionsAsProduct ( reactSet , molecule ) ; for ( IReaction reaction : reactSetProd . reactions ( ) ) newReactSet . addReaction ( reaction ) ; IReactionSet reactSetReact = getRelevantReactionsAsReactant ( reactSet , molecule ) ; for ( IReaction reaction : reactSetReact . reactions ( ) ) newReactSet . addReaction ( reaction ) ; return newReactSet ; |
public class StringNumberParser { /** * Parse given text as formated time and return a time in milliseconds . < br > < br > Formats
* supported : < br > milliseconds - ms < br > seconds - without sufix < br > minutes - m < br > hours - h
* < br > days - d < br > weeks - w
* @ param text
* - String
* @ return time in milliseconds
* @ throws NumberFormatException */
static public long parseTime ( final String text ) throws NumberFormatException { } } | if ( text . endsWith ( "ms" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 2 ) ) ; } else if ( text . endsWith ( "s" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 1 ) ) * 1000L ; } else if ( text . endsWith ( "m" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 1 ) ) * 60000L ; // 1000 * 60
} else if ( text . endsWith ( "h" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 1 ) ) * 3600000L ; // 1000 * 60 * 60
} else if ( text . endsWith ( "d" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 1 ) ) * 86400000L ; // 1000 * 60 * 60 * 24
} else if ( text . endsWith ( "w" ) ) { return Long . valueOf ( text . substring ( 0 , text . length ( ) - 1 ) ) * 604800000L ; // 1000 * 60 * 60 * 24
} else { // seconds by default
return Long . valueOf ( text ) * 1000L ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.