signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class POIProxy { /** * This method is used to get the pois from a category and return a list of
* { @ link JTSFeature } document with the data retrieved given a bounding box
* corners
* @ param id
* The id of the service
* @ param minX
* @ param minY
* @ param maxX
* @ param maxY
* @ return A list of { @ link JTSFeature } */
public ArrayList < JTSFeature > getFeaturesByCategory ( String category , double minX , double minY , double maxX , double maxY , List < Param > optionalParams ) throws Exception { } } | List < String > ids = serviceManager . getAvailableServices ( ) . getServicesIDByCategory ( category ) ; ArrayList < JTSFeature > features = new ArrayList < JTSFeature > ( ) ; for ( String id : ids ) { try { features . addAll ( getFeatures ( id , minX , minY , maxX , maxY , optionalParams ) ) ; } catch ( Exception e ) { logger . error ( "POIProxy" , e ) ; } } return features ; |
public class TabDemoModel { private Dockable dockable ( String modelKey ) { } } | return Dockable . create ( ) . modelKey ( Key . create ( SimpleModel . class , "Tab" + modelKey ) ) . name ( "Tab" + modelKey ) ; |
public class Result { /** * Accept Arrays and mark as empty or not
* @ param value
* @ return */
public static < R > Result < R [ ] > ok ( R value [ ] ) { } } | return new Result < R [ ] > ( value , OK , SUCCESS , null ) . emptyList ( value . length == 0 ) ; |
public class ReportGenerator { /** * Writes the dependency - check report ( s ) .
* @ param outputLocation the path where the reports should be written
* @ param format the format the report should be written in ( XML , HTML , ALL )
* @ throws ReportException is thrown if there is an error creating out the
* reports */
public void write ( String outputLocation , Format format ) throws ReportException { } } | if ( format == Format . ALL ) { for ( Format f : Format . values ( ) ) { if ( f != Format . ALL ) { write ( outputLocation , f ) ; } } } else { final File out = getReportFile ( outputLocation , format ) ; final String templateName = format . toString ( ) . toLowerCase ( ) + "Report" ; processTemplate ( templateName , out ) ; if ( format == Format . JSON ) { pretifyJson ( out . getPath ( ) ) ; } } |
public class TrueTypeFont { /** * Reads the font data .
* @ param ttfAfm the font as a < CODE > byte < / CODE > array , possibly < CODE > null < / CODE >
* @ throws DocumentException the font is invalid
* @ throws IOException the font file could not be read
* @ since2.1.5 */
void process ( byte ttfAfm [ ] , boolean preload ) throws DocumentException , IOException { } } | tables = new HashMap ( ) ; try { if ( ttfAfm == null ) rf = new RandomAccessFileOrArray ( fileName , preload , Document . plainRandomAccess ) ; else rf = new RandomAccessFileOrArray ( ttfAfm ) ; if ( ttcIndex . length ( ) > 0 ) { int dirIdx = Integer . parseInt ( ttcIndex ) ; if ( dirIdx < 0 ) throw new DocumentException ( "The font index for " + fileName + " must be positive." ) ; String mainTag = readStandardString ( 4 ) ; if ( ! mainTag . equals ( "ttcf" ) ) throw new DocumentException ( fileName + " is not a valid TTC file." ) ; rf . skipBytes ( 4 ) ; int dirCount = rf . readInt ( ) ; if ( dirIdx >= dirCount ) throw new DocumentException ( "The font index for " + fileName + " must be between 0 and " + ( dirCount - 1 ) + ". It was " + dirIdx + "." ) ; rf . skipBytes ( dirIdx * 4 ) ; directoryOffset = rf . readInt ( ) ; } rf . seek ( directoryOffset ) ; int ttId = rf . readInt ( ) ; if ( ttId != 0x00010000 && ttId != 0x4F54544F ) throw new DocumentException ( fileName + " is not a valid TTF or OTF file." ) ; int num_tables = rf . readUnsignedShort ( ) ; rf . skipBytes ( 6 ) ; for ( int k = 0 ; k < num_tables ; ++ k ) { String tag = readStandardString ( 4 ) ; rf . skipBytes ( 4 ) ; int table_location [ ] = new int [ 2 ] ; table_location [ 0 ] = rf . readInt ( ) ; table_location [ 1 ] = rf . readInt ( ) ; tables . put ( tag , table_location ) ; } checkCff ( ) ; fontName = getBaseFont ( ) ; fullName = getNames ( 4 ) ; // full name
familyName = getNames ( 1 ) ; // family name
allNameEntries = getAllNames ( ) ; if ( ! justNames ) { fillTables ( ) ; readGlyphWidths ( ) ; readCMaps ( ) ; readKerning ( ) ; readBbox ( ) ; // GlyphWidths = null ;
} } finally { if ( rf != null ) { rf . close ( ) ; if ( ! embedded ) rf = null ; } } |
public class CommerceWarehouseLocalServiceUtil { /** * Adds the commerce warehouse to the database . Also notifies the appropriate model listeners .
* @ param commerceWarehouse the commerce warehouse
* @ return the commerce warehouse that was added */
public static com . liferay . commerce . model . CommerceWarehouse addCommerceWarehouse ( com . liferay . commerce . model . CommerceWarehouse commerceWarehouse ) { } } | return getService ( ) . addCommerceWarehouse ( commerceWarehouse ) ; |
public class BasicScope { /** * Remove event listener from list of listeners
* @ param listener
* Listener to remove
* @ return true if listener is removed and false otherwise */
public boolean removeEventListener ( IEventListener listener ) { } } | log . debug ( "removeEventListener - scope: {} {}" , getName ( ) , listener ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Listeners - check #1: {}" , listeners ) ; } boolean removed = listeners . remove ( listener ) ; if ( ! keepOnDisconnect ) { if ( removed && keepAliveJobName == null ) { if ( ScopeUtils . isRoom ( this ) && listeners . isEmpty ( ) ) { // create job to kill the scope off if no listeners join within the delay
ISchedulingService schedulingService = ( ISchedulingService ) parent . getContext ( ) . getBean ( ISchedulingService . BEAN_NAME ) ; // by default keep a scope around for a fraction of a second
keepAliveJobName = schedulingService . addScheduledOnceJob ( ( keepDelay > 0 ? keepDelay * 1000 : 100 ) , new KeepAliveJob ( this ) ) ; } } } else { log . trace ( "Scope: {} is exempt from removal when empty" , getName ( ) ) ; } if ( log . isTraceEnabled ( ) ) { log . trace ( "Listeners - check #2: {}" , listeners ) ; } return removed ; |
public class PrimitiveTypeMapper { /** * Translate primitive type names ( such as int , long , boolean ) to Object
* type names ( such as Integer , Long , Boolean ) . If the primitive type is not
* defined this method will return the primitiveTypeName . */
public String mapPrimitiveType2ObjectTypeName ( String primitiveTypeName ) { } } | if ( primitive2ObjectTypeNameMap . containsKey ( primitiveTypeName ) ) { return primitive2ObjectTypeNameMap . get ( primitiveTypeName ) ; } else { return primitiveTypeName ; } |
public class UpdateIntegrationRequest { /** * A key - value map specifying request parameters that are passed from the method request to the backend . The key is
* an integration request parameter name and the associated value is a method request parameter value or static
* value that must be enclosed within single quotes and pre - encoded as required by the backend . The method request
* parameter value must match the pattern of method . request . { location } . { name } , where { location } is querystring ,
* path , or header ; and { name } must be a valid and unique method request parameter name .
* @ param requestParameters
* A key - value map specifying request parameters that are passed from the method request to the backend . The
* key is an integration request parameter name and the associated value is a method request parameter value
* or static value that must be enclosed within single quotes and pre - encoded as required by the backend . The
* method request parameter value must match the pattern of method . request . { location } . { name } , where
* { location } is querystring , path , or header ; and { name } must be a valid and unique method request parameter
* name .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateIntegrationRequest withRequestParameters ( java . util . Map < String , String > requestParameters ) { } } | setRequestParameters ( requestParameters ) ; return this ; |
public class CollectMethodsReturningImmutableCollections { /** * overrides the visitor to reset the stack for the new method , then checks if the immutability field is set to immutable and if so reports it
* @ param obj
* the context object of the currently parsed method */
@ Override public void visitCode ( Code obj ) { } } | try { String signature = SignatureUtils . getReturnSignature ( getMethod ( ) . getSignature ( ) ) ; if ( signature . startsWith ( Values . SIG_QUALIFIED_CLASS_PREFIX ) && CollectionUtils . isListSetMap ( SignatureUtils . stripSignature ( signature ) ) ) { stack . resetForMethodEntry ( this ) ; imType = ImmutabilityType . UNKNOWN ; super . visitCode ( obj ) ; if ( ( imType == ImmutabilityType . IMMUTABLE ) || ( imType == ImmutabilityType . POSSIBLY_IMMUTABLE ) ) { Method m = getMethod ( ) ; Statistics . getStatistics ( ) . addImmutabilityStatus ( clsName , m . getName ( ) , m . getSignature ( ) , imType ) ; } } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } |
public class AmazonRekognitionClient { /** * Provides information about a stream processor created by < a > CreateStreamProcessor < / a > . You can get information
* about the input and output streams , the input parameters for the face recognition being performed , and the
* current status of the stream processor .
* @ param describeStreamProcessorRequest
* @ return Result of the DescribeStreamProcessor operation returned by the service .
* @ throws AccessDeniedException
* You are not authorized to perform the action .
* @ throws InternalServerErrorException
* Amazon Rekognition experienced a service issue . Try your call again .
* @ throws ThrottlingException
* Amazon Rekognition is temporarily unable to process the request . Try your call again .
* @ throws InvalidParameterException
* Input parameter violated a constraint . Validate your parameter before calling the API operation again .
* @ throws ResourceNotFoundException
* The collection specified in the request cannot be found .
* @ throws ProvisionedThroughputExceededException
* The number of requests exceeded your throughput limit . If you want to increase this limit , contact Amazon
* Rekognition .
* @ sample AmazonRekognition . DescribeStreamProcessor */
@ Override public DescribeStreamProcessorResult describeStreamProcessor ( DescribeStreamProcessorRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeStreamProcessor ( request ) ; |
public class NativeJSON { /** * # string _ id _ map # */
@ Override protected int findPrototypeId ( String s ) { } } | int id ; // # generated # Last update : 2009-05-25 16:01:00 EDT
{ id = 0 ; String X = null ; L : switch ( s . length ( ) ) { case 5 : X = "parse" ; id = Id_parse ; break L ; case 8 : X = "toSource" ; id = Id_toSource ; break L ; case 9 : X = "stringify" ; id = Id_stringify ; break L ; } if ( X != null && X != s && ! X . equals ( s ) ) id = 0 ; } // # / generated #
return id ; |
public class EmbeddedCDXServerIndex { /** * create { @ link CDXWriter } for writing capture search result .
* < p > possible future changes :
* < ul >
* < li > drop unused argument { @ code waybackAuthToken } < / li >
* < li > change return type to super class ( as far up as appropriate ) < / li >
* < / ul >
* @ param wbRequest { @ link WaybackRequest } for configuring { @ link CDXQuery }
* @ param waybackAuthToken unused
* @ param isFuzzy { @ code true } to enable fuzzy query
* @ return CDXCaptureSearchResultWriter */
protected CDXToCaptureSearchResultsWriter getCaptureSearchWriter ( WaybackRequest wbRequest , AuthToken waybackAuthToken , boolean isFuzzy ) { } } | final CDXQuery query = createQuery ( wbRequest , isFuzzy ) ; if ( isFuzzy && query == null ) { return null ; } boolean resolveRevisits = wbRequest . isReplayRequest ( ) ; // For now , not using seek single capture to allow for run time checking of additional records
// boolean seekSingleCapture = resolveRevisits & & wbRequest . isTimestampSearchKey ( ) ;
boolean seekSingleCapture = false ; // boolean seekSingleCapture = resolveRevisits & & ( wbRequest . isTimestampSearchKey ( ) | | ( wbRequest . isBestLatestReplayRequest ( ) & & ! wbRequest . hasMementoAcceptDatetime ( ) ) ) ;
CDXToCaptureSearchResultsWriter captureWriter = new CDXToCaptureSearchResultsWriter ( query , resolveRevisits , seekSingleCapture , preferContains ) ; captureWriter . setTargetTimestamp ( wbRequest . getReplayTimestamp ( ) ) ; captureWriter . setSelfRedirFilter ( selfRedirFilter ) ; if ( "true" . equals ( wbRequest . get ( REQUEST_REVISIT_LOOKUP ) ) ) { captureWriter . setIncludeBlockedCaptures ( true ) ; } return captureWriter ; |
public class Option { /** * Read options .
* @ return true , if successful */
public boolean readOptions ( ) { } } | String filename = modelDir + File . separator + optionFile ; BufferedReader fin = null ; String line ; try { fin = new BufferedReader ( new FileReader ( filename ) ) ; System . out . println ( "Reading options ..." ) ; // read option lines
while ( ( line = fin . readLine ( ) ) != null ) { String trimLine = line . trim ( ) ; if ( trimLine . startsWith ( "#" ) ) { // comment line
continue ; } // System . out . println ( line ) ;
StringTokenizer strTok = new StringTokenizer ( line , "= \t\r\n" ) ; int len = strTok . countTokens ( ) ; if ( len != 2 ) { // invalid parameter line , ignore it
continue ; } String strOpt = strTok . nextToken ( ) ; String strVal = strTok . nextToken ( ) ; if ( strOpt . compareToIgnoreCase ( "trainDataFile" ) == 0 ) { trainDataFile = strVal ; } else if ( strOpt . compareToIgnoreCase ( "testDataFile" ) == 0 ) { testDataFile = strVal ; } else if ( strOpt . compareToIgnoreCase ( "isLogging" ) == 0 ) { if ( ! ( strVal . compareToIgnoreCase ( "true" ) == 0 || strVal . compareToIgnoreCase ( "false" ) == 0 ) ) { continue ; } isLogging = Boolean . valueOf ( strVal ) . booleanValue ( ) ; } else if ( strOpt . compareToIgnoreCase ( "cpRareThreshold" ) == 0 ) { int numTemp = Integer . parseInt ( strVal ) ; cpRareThreshold = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "fRareThreshold" ) == 0 ) { int numTemp = Integer . parseInt ( strVal ) ; fRareThreshold = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "numIterations" ) == 0 ) { int numTemp = Integer . parseInt ( strVal ) ; numIterations = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "initLambdaVal" ) == 0 ) { double numTemp = Double . parseDouble ( strVal ) ; initLambdaVal = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "sigmaSquare" ) == 0 ) { double numTemp = Double . parseDouble ( strVal ) ; sigmaSquare = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "epsForConvergence" ) == 0 ) { double numTemp = Double . parseDouble ( strVal ) ; epsForConvergence = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "mForHessian" ) == 0 ) { int numTemp = Integer . parseInt ( strVal ) ; mForHessian = numTemp ; } else if ( strOpt . compareToIgnoreCase ( "evaluateDuringTraining" ) == 0 ) { if ( ! ( strVal . compareToIgnoreCase ( "true" ) == 0 || strVal . compareToIgnoreCase ( "false" ) == 0 ) ) { continue ; } evaluateDuringTraining = Boolean . valueOf ( strVal ) . booleanValue ( ) ; } else if ( strOpt . compareToIgnoreCase ( "saveBestModel" ) == 0 ) { if ( ! ( strVal . compareToIgnoreCase ( "true" ) == 0 || strVal . compareToIgnoreCase ( "false" ) == 0 ) ) { continue ; } saveBestModel = Boolean . valueOf ( strVal ) . booleanValue ( ) ; } else if ( strOpt . compareToIgnoreCase ( "trainLogFile" ) == 0 ) { trainLogFile = strVal ; // for future use
} else if ( strOpt . compareToIgnoreCase ( "modelFile" ) == 0 ) { modelFile = strVal ; } else { // for future use
} } System . out . println ( "Reading options completed!" ) ; } catch ( IOException e ) { System . out . println ( e . toString ( ) ) ; return false ; } return true ; |
public class ReflectionUtils { /** * Check if the field supplied is parameterized with a valid JCR property type .
* @ param field the field
* @ return true if the field is parameterized with a valid JCR property type , else false
* @ deprecated this class is unused in morphia and will be removed in a future release */
public static boolean isFieldParameterizedWithPropertyType ( final Field field ) { } } | if ( field . getGenericType ( ) instanceof ParameterizedType ) { final ParameterizedType genericType = ( ParameterizedType ) field . getGenericType ( ) ; for ( final Type type : genericType . getActualTypeArguments ( ) ) { if ( isPropertyType ( ( Class ) type ) ) { return true ; } } } return false ; |
public class SignatureDef { /** * < pre >
* Named output parameters .
* < / pre >
* < code > map & lt ; string , . tensorflow . TensorInfo & gt ; outputs = 2 ; < / code > */
public boolean containsOutputs ( java . lang . String key ) { } } | if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } return internalGetOutputs ( ) . getMap ( ) . containsKey ( key ) ; |
public class AbstractSecureSocketHandler { /** * / * ( non - Javadoc )
* @ see org . openhealthtools . ihe . atna . nodeauth . SocketHandler # getSocket ( java . lang . String , int , boolean , org . openhealthtools . ihe . atna . nodeauth . SecurityDomain ) */
public Socket getSocket ( String host , int port , boolean useSecureSocket , SecurityDomain securityDomain ) throws Exception { } } | return getSocket ( host , port , useSecureSocket , securityDomain , null ) ; |
public class DiagramElement { /** * Sets the value of the style property .
* @ param value
* allowed object is
* { @ link JAXBElement } { @ code < } { @ link DMNStyle } { @ code > }
* { @ link JAXBElement } { @ code < } { @ link Style } { @ code > } */
public void setStyle ( org . kie . dmn . model . api . dmndi . Style value ) { } } | this . style = value ; |
public class ReceiveMessageRequest { /** * A list of s that need to be returned along with each message . These attributes include :
* < ul >
* < li >
* < code > All < / code > - Returns all values .
* < / li >
* < li >
* < code > ApproximateFirstReceiveTimestamp < / code > - Returns the time the message was first received from the queue
* ( < a href = " http : / / en . wikipedia . org / wiki / Unix _ time " > epoch time < / a > in milliseconds ) .
* < / li >
* < li >
* < code > ApproximateReceiveCount < / code > - Returns the number of times a message has been received from the queue but
* not deleted .
* < / li >
* < li >
* < code > SenderId < / code >
* < ul >
* < li >
* For an IAM user , returns the IAM user ID , for example < code > ABCDEFGHI1JKLMNOPQ23R < / code > .
* < / li >
* < li >
* For an IAM role , returns the IAM role ID , for example < code > ABCDE1F2GH3I4JK5LMNOP : i - a123b456 < / code > .
* < / li >
* < / ul >
* < / li >
* < li >
* < code > SentTimestamp < / code > - Returns the time the message was sent to the queue ( < a
* href = " http : / / en . wikipedia . org / wiki / Unix _ time " > epoch time < / a > in milliseconds ) .
* < / li >
* < li >
* < code > MessageDeduplicationId < / code > - Returns the value provided by the producer that calls the
* < code > < a > SendMessage < / a > < / code > action .
* < / li >
* < li >
* < code > MessageGroupId < / code > - Returns the value provided by the producer that calls the
* < code > < a > SendMessage < / a > < / code > action . Messages with the same < code > MessageGroupId < / code > are returned in
* sequence .
* < / li >
* < li >
* < code > SequenceNumber < / code > - Returns the value provided by Amazon SQS .
* < / li >
* < / ul >
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAttributeNames ( java . util . Collection ) } or { @ link # withAttributeNames ( java . util . Collection ) } if you want
* to override the existing values .
* @ param attributeNames
* A list of s that need to be returned along with each message . These attributes include : < / p >
* < ul >
* < li >
* < code > All < / code > - Returns all values .
* < / li >
* < li >
* < code > ApproximateFirstReceiveTimestamp < / code > - Returns the time the message was first received from the
* queue ( < a href = " http : / / en . wikipedia . org / wiki / Unix _ time " > epoch time < / a > in milliseconds ) .
* < / li >
* < li >
* < code > ApproximateReceiveCount < / code > - Returns the number of times a message has been received from the
* queue but not deleted .
* < / li >
* < li >
* < code > SenderId < / code >
* < ul >
* < li >
* For an IAM user , returns the IAM user ID , for example < code > ABCDEFGHI1JKLMNOPQ23R < / code > .
* < / li >
* < li >
* For an IAM role , returns the IAM role ID , for example < code > ABCDE1F2GH3I4JK5LMNOP : i - a123b456 < / code > .
* < / li >
* < / ul >
* < / li >
* < li >
* < code > SentTimestamp < / code > - Returns the time the message was sent to the queue ( < a
* href = " http : / / en . wikipedia . org / wiki / Unix _ time " > epoch time < / a > in milliseconds ) .
* < / li >
* < li >
* < code > MessageDeduplicationId < / code > - Returns the value provided by the producer that calls the
* < code > < a > SendMessage < / a > < / code > action .
* < / li >
* < li >
* < code > MessageGroupId < / code > - Returns the value provided by the producer that calls the
* < code > < a > SendMessage < / a > < / code > action . Messages with the same < code > MessageGroupId < / code > are returned
* in sequence .
* < / li >
* < li >
* < code > SequenceNumber < / code > - Returns the value provided by Amazon SQS .
* < / li >
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see QueueAttributeName */
public ReceiveMessageRequest withAttributeNames ( String ... attributeNames ) { } } | if ( this . attributeNames == null ) { setAttributeNames ( new com . amazonaws . internal . SdkInternalList < String > ( attributeNames . length ) ) ; } for ( String ele : attributeNames ) { this . attributeNames . add ( ele ) ; } return this ; |
public class GSMSImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . GSMS__LCID : setLCID ( LCID_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ; |
public class WikipediaTraceReader { /** * Returns the path segment of the URL . */
private String getPath ( String url ) { } } | int index = url . indexOf ( '/' , 7 ) ; if ( index == - 1 ) { return url ; } // Replace the html entities that we want to search for inside paths
String cleansed = url . substring ( index + 1 ) ; for ( int i = 0 ; i < SEARCH_LIST . length ; i ++ ) { cleansed = StringUtils . replace ( cleansed , SEARCH_LIST [ i ] , REPLACEMENT_LIST [ i ] ) ; } return cleansed ; |
public class IonReaderBuilder { /** * Declares the catalog to use when building an { @ link IonReader } ,
* returning a new mutable builder the current one is immutable .
* @ param catalog the catalog to use in built readers .
* If null , a new { @ link SimpleCatalog } will be used .
* @ return this builder instance , if mutable ;
* otherwise a mutable copy of this builder .
* @ see # setCatalog ( IonCatalog )
* @ see # withCatalog ( IonCatalog ) */
public IonReaderBuilder withCatalog ( IonCatalog catalog ) { } } | IonReaderBuilder b = mutable ( ) ; b . setCatalog ( catalog ) ; return b ; |
public class Input { /** * Read Vector & lt ; Object & gt ; object .
* @ return Vector & lt ; Object & gt ; object */
@ SuppressWarnings ( "unchecked" ) @ Override public Vector < Object > readVectorObject ( ) { } } | log . debug ( "readVectorObject" ) ; int type = readInteger ( ) ; log . debug ( "Type: {}" , type ) ; if ( ( type & 1 ) == 0 ) { return ( Vector < Object > ) getReference ( type >> 1 ) ; } int len = type >> 1 ; log . debug ( "Length: {}" , len ) ; Vector < Object > array = new Vector < Object > ( len ) ; storeReference ( array ) ; int ref2 = readInteger ( ) ; log . debug ( "Ref2: {}" , ref2 ) ; buf . skip ( 1 ) ; Object object = null ; for ( int j = 0 ; j < len ; ++ j ) { byte objectType = buf . get ( ) ; log . debug ( "Object type: {}" , objectType ) ; switch ( objectType ) { case AMF3 . TYPE_UNDEFINED : case AMF3 . TYPE_NULL : object = null ; break ; case AMF3 . TYPE_STRING : object = readString ( ) ; break ; case AMF3 . TYPE_NUMBER : object = readNumber ( ) ; break ; case AMF3 . TYPE_INTEGER : object = readInteger ( ) ; break ; case AMF3 . TYPE_BYTEARRAY : object = readByteArray ( ) ; break ; case AMF3 . TYPE_VECTOR_INT : object = readVectorInt ( ) ; break ; case AMF3 . TYPE_VECTOR_UINT : object = readVectorUInt ( ) ; break ; case AMF3 . TYPE_VECTOR_NUMBER : object = readVectorNumber ( ) ; break ; case AMF3 . TYPE_VECTOR_OBJECT : object = readVectorObject ( ) ; break ; default : object = readObject ( ) ; } array . add ( object ) ; } log . debug ( "Vector: {}" , array ) ; return array ; |
public class StatementUpdate { /** * Marks a given list of statements for insertion into the current document .
* Inserted statements can have an id if they should update an existing
* statement , or use an empty string as id if they should be added . The
* method removes duplicates and avoids unnecessary modifications by
* checking the current content of the given document before marking
* statements for being written .
* @ param currentDocument
* the document with the current statements
* @ param addStatements
* the list of new statements to be added */
protected void markStatementsForInsertion ( StatementDocument currentDocument , List < Statement > addStatements ) { } } | for ( Statement statement : addStatements ) { addStatement ( statement , true ) ; } for ( StatementGroup sg : currentDocument . getStatementGroups ( ) ) { if ( this . toKeep . containsKey ( sg . getProperty ( ) ) ) { for ( Statement statement : sg ) { if ( ! this . toDelete . contains ( statement . getStatementId ( ) ) ) { addStatement ( statement , false ) ; } } } } |
public class B2buaHelperImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . B2buaHelper # createRequest ( javax . servlet . sip . SipServletRequest ) */
public SipServletRequest createRequest ( SipServletRequest origRequest ) { } } | final SipServletRequestImpl newSipServletRequest = ( SipServletRequestImpl ) sipFactoryImpl . createRequest ( origRequest , false ) ; final SipServletRequestImpl origRequestImpl = ( SipServletRequestImpl ) origRequest ; final MobicentsSipSession originalSession = origRequestImpl . getSipSession ( ) ; final MobicentsSipSession session = newSipServletRequest . getSipSession ( ) ; // B2buaHelperTest . testLinkSipSessions101 assumes the sessions shouldn ' t be linked together
// sessionMap . put ( originalSession . getKey ( ) , session . getKey ( ) ) ;
// sessionMap . put ( session . getKey ( ) , originalSession . getKey ( ) ) ;
// dumpLinkedSessions ( ) ;
// linkedRequestMap . put ( newSipServletRequest , origRequestImpl ) ;
// linkedRequestMap . put ( origRequestImpl , newSipServletRequest ) ;
session . setB2buaHelper ( this ) ; originalSession . setB2buaHelper ( this ) ; setOriginalRequest ( session , newSipServletRequest ) ; dumpAppSession ( session ) ; return newSipServletRequest ; |
public class HTODDynacache { /** * close ( )
* Close all HTOD instances and then the filemanager . */
public void close ( ) { } } | try { rwLock . writeLock ( ) . lock ( ) ; closeNoRWLock ( ) ; } finally { rwLock . writeLock ( ) . unlock ( ) ; } if ( this . deleteDiskFiles ) { deleteDiskCacheFiles ( ) ; this . deleteDiskFiles = false ; } |
public class LinkedList { /** * Append a link to the end of the list .
* @ param link */
public synchronized final void append ( Link link ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "append" ) ; Link prev = _dummyTail . _getPreviousLink ( ) ; link . _link ( prev , _dummyTail , _nextPositionToIssue ++ , this ) ; prev . _setNextLink ( link ) ; _dummyTail . _setPreviousLink ( link ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "append" , _debugString ( ) ) ; |
public class EventSubscriptionManager { /** * @ param tenantId
* @ return the conditional start event subscriptions with the given tenant id */
@ SuppressWarnings ( "unchecked" ) public List < EventSubscriptionEntity > findConditionalStartEventSubscriptionByTenantId ( String tenantId ) { } } | Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "tenantId" , tenantId ) ; configureParameterizedQuery ( parameters ) ; return getDbEntityManager ( ) . selectList ( "selectConditionalStartEventSubscriptionByTenantId" , parameters ) ; |
public class MetricsController { /** * Internal API for encoding a registry that can be encoded as JSON .
* This is a helper function for the REST endpoint and to test against . */
Map < String , MetricValues > encodeRegistry ( Registry sourceRegistry , Predicate < Measurement > filter ) { } } | Map < String , MetricValues > metricMap = new HashMap < > ( ) ; /* * Flatten the meter measurements into a map of measurements keyed by
* the name and mapped to the different tag variants . */
for ( Meter meter : sourceRegistry ) { String kind = knownMeterKinds . computeIfAbsent ( meter . id ( ) , k -> meterToKind ( sourceRegistry , meter ) ) ; for ( Measurement measurement : meter . measure ( ) ) { if ( ! filter . test ( measurement ) ) { continue ; } if ( Double . isNaN ( measurement . value ( ) ) ) { continue ; } String meterName = measurement . id ( ) . name ( ) ; MetricValues have = metricMap . get ( meterName ) ; if ( have == null ) { metricMap . put ( meterName , new MetricValues ( kind , measurement ) ) ; } else { have . addMeasurement ( measurement ) ; } } } return metricMap ; |
public class XMLHTTPResponseHandler { /** * This function returns the requested value from the object data . < br >
* The path is a set of key names seperated by ' ; ' .
* @ param object
* The object holding all the data
* @ param path
* The path to the value ( elements seperated by ; )
* @ return The value ( null if not found ) */
@ Override protected String findValueImpl ( Document object , String path ) { } } | // split path to parts
String [ ] pathParts = path . split ( AbstractMappingHTTPResponseHandler . VALUES_SEPERATOR ) ; int pathPartsAmount = pathParts . length ; String pathPart = null ; StringBuilder buffer = new StringBuilder ( 500 ) ; for ( int index = 0 ; index < pathPartsAmount ; index ++ ) { // get next path part
pathPart = pathParts [ index ] ; if ( pathPart . length ( ) > 0 ) { buffer . append ( "/" ) ; buffer . append ( pathPart ) ; } } String xpathExpressionStr = buffer . toString ( ) ; XPathFactory xpathFactory = XPathFactory . newInstance ( ) ; XPath xpath = xpathFactory . newXPath ( ) ; String value = null ; try { // compile path to xpath
XPathExpression xpathExpression = xpath . compile ( xpathExpressionStr ) ; // find value
value = xpathExpression . evaluate ( object ) ; } catch ( XPathExpressionException exception ) { throw new FaxException ( "Unable to parse/evaluate xpath expression: " + path , exception ) ; } return value ; |
public class EnableHlsTaskRunner { /** * Enable streaming */
public String performTask ( String taskParameters ) { } } | EnableStreamingTaskParameters taskParams = EnableStreamingTaskParameters . deserialize ( taskParameters ) ; String spaceId = taskParams . getSpaceId ( ) ; boolean secure = taskParams . isSecure ( ) ; List < String > allowedOrigins = taskParams . getAllowedOrigins ( ) ; log . info ( "Performing " + TASK_NAME + " task on space " + spaceId + ". Secure streaming set to " + secure ) ; // Will throw if bucket does not exist
String bucketName = unwrappedS3Provider . getBucketName ( spaceId ) ; String domainName = null ; String distId = null ; String oaIdentityId = getOriginAccessId ( ) ; EnableStreamingTaskResult taskResult = new EnableStreamingTaskResult ( ) ; DistributionSummary existingDist = getExistingDistribution ( bucketName ) ; if ( existingDist != null ) { // There is an existing distribution
// Ensure that this is not an attempt to change the security type
// of this existing distribution
boolean existingSecure = ! existingDist . getDefaultCacheBehavior ( ) . getTrustedSigners ( ) . getItems ( ) . isEmpty ( ) ; if ( ( secure && ! existingSecure ) || ( ! secure && existingSecure ) ) { throw new UnsupportedTaskException ( TASK_NAME , "The space " + spaceId + " is already configured to stream as " + ( secure ? "OPEN" : "SECURE" ) + " and cannot be updated to stream as " + ( secure ? "SECURE" : "OPEN" ) + ". To do this, you must first execute the " + StorageTaskConstants . DELETE_HLS_TASK_NAME + " task." ) ; } distId = existingDist . getId ( ) ; if ( ! existingDist . isEnabled ( ) ) { // Distribution is disabled , enable it
setDistributionState ( distId , true ) ; } domainName = existingDist . getDomainName ( ) ; } else { // No existing distribution , need to create one
// Create S3 Origin
S3OriginConfig s3OriginConfig = new S3OriginConfig ( ) . withOriginAccessIdentity ( S3_ORIGIN_OAI_PREFIX + oaIdentityId ) ; Origin s3Origin = new Origin ( ) . withDomainName ( bucketName + S3_ORIGIN_SUFFIX ) . withS3OriginConfig ( s3OriginConfig ) . withId ( "S3-" + bucketName ) ; // Only include trusted signers on secure distributions
TrustedSigners signers = new TrustedSigners ( ) ; if ( secure ) { signers . setItems ( Collections . singletonList ( cfAccountId ) ) ; signers . setEnabled ( true ) ; signers . setQuantity ( 1 ) ; } else { signers . setEnabled ( false ) ; signers . setQuantity ( 0 ) ; } DefaultCacheBehavior defaultCacheBehavior = new DefaultCacheBehavior ( ) ; defaultCacheBehavior . setTrustedSigners ( signers ) ; defaultCacheBehavior . setViewerProtocolPolicy ( ViewerProtocolPolicy . RedirectToHttps ) ; // Forwarding headers to support CORS , see :
// https : / / docs . aws . amazon . com / AmazonCloudFront / latest / DeveloperGuide / header - caching . html # header - caching - web - cors
defaultCacheBehavior . setAllowedMethods ( new AllowedMethods ( ) . withItems ( Method . GET , Method . HEAD , Method . OPTIONS ) . withQuantity ( 3 ) ) ; defaultCacheBehavior . setForwardedValues ( new ForwardedValues ( ) . withQueryString ( false ) . withCookies ( new CookiePreference ( ) . withForward ( ItemSelection . None ) ) . withHeaders ( new Headers ( ) . withItems ( "Origin" , "Access-Control-Request-Headers" , "Access-Control-Request-Method" ) . withQuantity ( 3 ) ) ) ; // Setting other cache behaviors required by the client
defaultCacheBehavior . setMinTTL ( 0l ) ; defaultCacheBehavior . setTargetOriginId ( s3Origin . getId ( ) ) ; // Create origins list
Origins origins ; CacheBehaviors cacheBehaviors = new CacheBehaviors ( ) ; if ( secure ) { // Create Origin to allow signed cookies to be set through a CloudFront call
CustomOriginConfig cookiesOriginConfig = new CustomOriginConfig ( ) . withOriginProtocolPolicy ( OriginProtocolPolicy . HttpsOnly ) . withHTTPPort ( 80 ) . withHTTPSPort ( 443 ) ; String getCookiesPath = "/durastore/aux" ; String cookiesOriginId = "Custom origin - " + dcHost + getCookiesPath ; Origin cookiesOrigin = new Origin ( ) . withDomainName ( dcHost ) . withOriginPath ( getCookiesPath ) . withId ( cookiesOriginId ) . withCustomOriginConfig ( cookiesOriginConfig ) ; origins = new Origins ( ) . withItems ( s3Origin , cookiesOrigin ) . withQuantity ( 2 ) ; // Create behavior for cookies origin
CookiePreference cookiePreference = new CookiePreference ( ) . withForward ( ItemSelection . All ) ; CacheBehavior cookiesCacheBehavior = new CacheBehavior ( ) . withPathPattern ( "/cookies" ) . withTargetOriginId ( cookiesOriginId ) . withViewerProtocolPolicy ( ViewerProtocolPolicy . RedirectToHttps ) . withAllowedMethods ( new AllowedMethods ( ) . withItems ( Method . GET , Method . HEAD ) . withQuantity ( 2 ) ) . withForwardedValues ( new ForwardedValues ( ) . withQueryString ( true ) . withCookies ( cookiePreference ) ) . withTrustedSigners ( new TrustedSigners ( ) . withEnabled ( false ) . withQuantity ( 0 ) ) . withMinTTL ( 0l ) ; cacheBehaviors = cacheBehaviors . withItems ( cookiesCacheBehavior ) . withQuantity ( 1 ) ; } else { origins = new Origins ( ) . withItems ( s3Origin ) . withQuantity ( 1 ) ; } // Build distribution
DistributionConfig distributionConfig = new DistributionConfig ( ) . withCallerReference ( "" + System . currentTimeMillis ( ) ) . withOrigins ( origins ) . withEnabled ( true ) . withComment ( "HLS streaming for space: " + spaceId ) . withDefaultCacheBehavior ( defaultCacheBehavior ) ; if ( secure ) { distributionConfig . setCacheBehaviors ( cacheBehaviors ) ; } Distribution dist = cfClient . createDistribution ( new CreateDistributionRequest ( distributionConfig ) ) . getDistribution ( ) ; domainName = dist . getDomainName ( ) ; } // Set bucket policy to accept origin access identity
setBucketAccessPolicy ( bucketName , oaIdentityId ) ; // Set CORS policy on bucket
setCorsPolicy ( bucketName , allowedOrigins , dcHost ) ; // Update bucket tags to include streaming host
Map < String , String > spaceProps = s3Provider . getSpaceProperties ( spaceId ) ; spaceProps . put ( HLS_STREAMING_HOST_PROP , domainName ) ; spaceProps . put ( HLS_STREAMING_TYPE_PROP , secure ? STREAMING_TYPE . SECURE . name ( ) : STREAMING_TYPE . OPEN . name ( ) ) ; unwrappedS3Provider . setNewSpaceProperties ( spaceId , spaceProps ) ; taskResult . setResult ( TASK_NAME + " task completed successfully" ) ; // Return results
taskResult . setStreamingHost ( domainName ) ; String toReturn = taskResult . serialize ( ) ; log . info ( "Result of " + TASK_NAME + " task: " + toReturn ) ; return toReturn ; |
public class CommerceOrderItemPersistenceImpl { /** * Removes all the commerce order items where CPInstanceId = & # 63 ; from the database .
* @ param CPInstanceId the cp instance ID */
@ Override public void removeByCPInstanceId ( long CPInstanceId ) { } } | for ( CommerceOrderItem commerceOrderItem : findByCPInstanceId ( CPInstanceId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceOrderItem ) ; } |
public class ReflectionUtils { /** * Determines if the class or interface represented by first Class parameter is either the same as , or is a superclass or
* superinterface of , the class or interface represented by the second Class parameter .
* @ param c1 Class to check .
* @ param c2 Class to check against .
* @ return { @ code true } if the first class parameter is either the same as or a superinterface of the second class parameter . */
public static boolean isAssignableFrom ( Class < ? > c1 , Class < ? > c2 ) { } } | assertReflectionAccessor ( ) ; return accessor . isAssignableFrom ( c1 , c2 ) ; |
public class WorkClassLoader { /** * { @ inheritDoc } */
@ Override public Class < ? > findClass ( String name ) throws ClassNotFoundException { } } | if ( trace ) log . tracef ( "%s: findClass(%s)" , Integer . toHexString ( System . identityHashCode ( this ) ) , name ) ; if ( resourceAdapterClassLoader != null ) { try { return resourceAdapterClassLoader . findClass ( name ) ; } catch ( Throwable t ) { // Default to parent
} } return super . findClass ( name ) ; |
public class CompensatedSum { /** * Increments the Kahan sum by adding two sums , and updating the correction term for reducing numeric errors . */
public CompensatedSum add ( CompensatedSum other ) { } } | double correctedSum = other . value ( ) + ( delta + other . delta ( ) ) ; double updatedValue = value + correctedSum ; double updatedDelta = correctedSum - ( updatedValue - value ) ; return new CompensatedSum ( updatedValue , updatedDelta ) ; |
public class MethodConfig { /** * Sets parameter .
* @ param key the key
* @ param value the value */
public MethodConfig setParameter ( String key , String value ) { } } | if ( parameters == null ) { parameters = new ConcurrentHashMap < String , String > ( ) ; } parameters . put ( key , value ) ; return this ; |
public class ParagraphVectors { /** * Get top N elements
* @ param vec the vec to extract the top elements from
* @ param N the number of elements to extract
* @ return the indices and the sorted top N elements */
private List < Double > getTopN ( INDArray vec , int N ) { } } | BasicModelUtils . ArrayComparator comparator = new BasicModelUtils . ArrayComparator ( ) ; PriorityQueue < Double [ ] > queue = new PriorityQueue < > ( vec . rows ( ) , comparator ) ; for ( int j = 0 ; j < vec . length ( ) ; j ++ ) { final Double [ ] pair = new Double [ ] { vec . getDouble ( j ) , ( double ) j } ; if ( queue . size ( ) < N ) { queue . add ( pair ) ; } else { Double [ ] head = queue . peek ( ) ; if ( comparator . compare ( pair , head ) > 0 ) { queue . poll ( ) ; queue . add ( pair ) ; } } } List < Double > lowToHighSimLst = new ArrayList < > ( ) ; while ( ! queue . isEmpty ( ) ) { double ind = queue . poll ( ) [ 1 ] ; lowToHighSimLst . add ( ind ) ; } return Lists . reverse ( lowToHighSimLst ) ; |
public class AutomationExecution { /** * The combination of AWS Regions and / or AWS accounts where you want to run the Automation .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTargetLocations ( java . util . Collection ) } or { @ link # withTargetLocations ( java . util . Collection ) } if you
* want to override the existing values .
* @ param targetLocations
* The combination of AWS Regions and / or AWS accounts where you want to run the Automation .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AutomationExecution withTargetLocations ( TargetLocation ... targetLocations ) { } } | if ( this . targetLocations == null ) { setTargetLocations ( new com . amazonaws . internal . SdkInternalList < TargetLocation > ( targetLocations . length ) ) ; } for ( TargetLocation ele : targetLocations ) { this . targetLocations . add ( ele ) ; } return this ; |
public class JavaXmlProcessor { /** * Returns the string value for the node
* @ param node
* the node
* @ return the string value for the node */
private String getStringValue ( Node node ) { } } | switch ( node . getNodeType ( ) ) { case Node . ATTRIBUTE_NODE : case Node . TEXT_NODE : return node . getNodeValue ( ) ; default : { try { Transformer transformer = TRANSFORMER_FACTORY . newTransformer ( ) ; StringWriter buffer = new StringWriter ( ) ; transformer . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , "yes" ) ; transformer . transform ( new DOMSource ( node ) , new StreamResult ( buffer ) ) ; return buffer . toString ( ) ; } catch ( Exception e ) { } return null ; } } |
public class ByteArray { /** * 写入ByteBuffer指定长度的数据
* @ param buffer 数据
* @ param len 指定长度 */
public void write ( ByteBuffer buffer , int len ) { } } | if ( len < 1 ) return ; if ( count >= content . length - len ) { byte [ ] ns = new byte [ content . length + len ] ; System . arraycopy ( content , 0 , ns , 0 , count ) ; this . content = ns ; } buffer . get ( content , count , len ) ; count += len ; |
public class ViewSelectorAssertions { /** * Fluent assertion entry point for a selection of views from the given view
* based on the given selector . It may be helpful to statically import this rather
* than { @ link # assertThat ( ViewSelection ) } to avoid conflicts with other statically
* imported { @ code assertThat ( ) } methods . */
public static ViewSelectionAssert assertThatSelection ( String selector , View view ) { } } | return assertThat ( selection ( selector , view ) ) ; |
public class EllipseClustersIntoHexagonalGrid { /** * Combines the inner and outer grid into one grid for output . See { @ link Grid } for a discussion
* on how elements are ordered internally . */
void saveResults ( List < List < NodeInfo > > graph ) { } } | Grid g = foundGrids . grow ( ) ; g . reset ( ) ; g . columns = graph . get ( 0 ) . size ( ) + graph . get ( 1 ) . size ( ) ; g . rows = graph . size ( ) ; for ( int row = 0 ; row < g . rows ; row ++ ) { List < NodeInfo > list = graph . get ( row ) ; for ( int i = 0 ; i < g . columns ; i ++ ) { if ( ( i % 2 ) == ( row % 2 ) ) g . ellipses . add ( list . get ( i / 2 ) . ellipse ) ; else g . ellipses . add ( null ) ; } } |
public class DNSInput { /** * Reads a byte array of a specified length from the stream into an existing
* array .
* @ param b The array to read into .
* @ param off The offset of the array to start copying data into .
* @ param len The number of bytes to copy .
* @ throws WireParseException The end of the stream was reached . */
public void readByteArray ( byte [ ] b , int off , int len ) throws WireParseException { } } | require ( len ) ; System . arraycopy ( array , pos , b , off , len ) ; pos += len ; |
public class Synthetic { /** * Returns a sequence of items constructed by the generator . */
private static LongStream generate ( NumberGenerator generator , long count ) { } } | return LongStream . range ( 0 , count ) . map ( ignored -> generator . nextValue ( ) . longValue ( ) ) ; |
public class AnnisUI { /** * Get a cached version of the { @ link CorpusConfig } for a corpus .
* @ param corpus
* @ return */
public CorpusConfig getCorpusConfigWithCache ( String corpus ) { } } | CorpusConfig config = new CorpusConfig ( ) ; if ( corpusConfigCache != null ) { config = corpusConfigCache . getIfPresent ( corpus ) ; if ( config == null ) { if ( corpus . equals ( DEFAULT_CONFIG ) ) { config = Helper . getDefaultCorpusConfig ( ) ; } else { config = Helper . getCorpusConfig ( corpus ) ; } corpusConfigCache . put ( corpus , config ) ; } } return config ; |
public class P2sVpnGatewaysInner { /** * Generates VPN profile for P2S client of the P2SVpnGateway in the specified resource group .
* @ param resourceGroupName The name of the resource group .
* @ param gatewayName The name of the P2SVpnGateway .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the VpnProfileResponseInner object */
public Observable < ServiceResponse < VpnProfileResponseInner > > beginGenerateVpnProfileWithServiceResponseAsync ( String resourceGroupName , String gatewayName ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( gatewayName == null ) { throw new IllegalArgumentException ( "Parameter gatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } final String apiVersion = "2018-08-01" ; final AuthenticationMethod authenticationMethod = null ; P2SVpnProfileParameters parameters = new P2SVpnProfileParameters ( ) ; parameters . withAuthenticationMethod ( null ) ; return service . beginGenerateVpnProfile ( resourceGroupName , gatewayName , this . client . subscriptionId ( ) , apiVersion , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VpnProfileResponseInner > > > ( ) { @ Override public Observable < ServiceResponse < VpnProfileResponseInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VpnProfileResponseInner > clientResponse = beginGenerateVpnProfileDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class ChaincodeEvent { /** * Binary data associated with this event .
* @ return binary data set by the chaincode for this event . This may return null . */
public byte [ ] getPayload ( ) { } } | ByteString ret = getChaincodeEvent ( ) . getPayload ( ) ; if ( null == ret ) { return null ; } return ret . toByteArray ( ) ; |
public class Metadata { /** * Adds a metadata value to the dictionary .
* @ param name the name
* @ param value the value
* @ param isDC the is dublin core */
public void add ( String name , TiffObject value , boolean isDC , String path ) { } } | if ( ! metadata . containsKey ( name ) ) { metadata . put ( name , new MetadataObject ( ) ) ; metadata . get ( name ) . setIsDublinCore ( isDC ) ; metadata . get ( name ) . setPath ( path ) ; } metadata . get ( name ) . getObjectList ( ) . add ( value ) ; |
public class PrometheusExportUtils { /** * Converts a point value in Metric to a list of Prometheus Samples . */
@ VisibleForTesting static List < Sample > getSamples ( final String name , final List < String > labelNames , List < LabelValue > labelValuesList , Value value ) { } } | Preconditions . checkArgument ( labelNames . size ( ) == labelValuesList . size ( ) , "Keys and Values don't have same size." ) ; final List < Sample > samples = Lists . newArrayList ( ) ; final List < String > labelValues = new ArrayList < String > ( labelValuesList . size ( ) ) ; for ( LabelValue labelValue : labelValuesList ) { String val = labelValue == null ? "" : labelValue . getValue ( ) ; labelValues . add ( val == null ? "" : val ) ; } return value . match ( new Function < Double , List < Sample > > ( ) { @ Override public List < Sample > apply ( Double arg ) { samples . add ( new Sample ( name , labelNames , labelValues , arg ) ) ; return samples ; } } , new Function < Long , List < Sample > > ( ) { @ Override public List < Sample > apply ( Long arg ) { samples . add ( new Sample ( name , labelNames , labelValues , arg ) ) ; return samples ; } } , new Function < Distribution , List < Sample > > ( ) { @ Override public List < Sample > apply ( final Distribution arg ) { BucketOptions bucketOptions = arg . getBucketOptions ( ) ; List < Double > boundaries = new ArrayList < > ( ) ; if ( bucketOptions != null ) { boundaries = bucketOptions . match ( new Function < ExplicitOptions , List < Double > > ( ) { @ Override public List < Double > apply ( ExplicitOptions arg ) { return arg . getBucketBoundaries ( ) ; } } , Functions . < List < Double > > throwIllegalArgumentException ( ) ) ; } List < String > labelNamesWithLe = new ArrayList < String > ( labelNames ) ; labelNamesWithLe . add ( LABEL_NAME_BUCKET_BOUND ) ; long cumulativeCount = 0 ; for ( int i = 0 ; i < arg . getBuckets ( ) . size ( ) ; i ++ ) { List < String > labelValuesWithLe = new ArrayList < String > ( labelValues ) ; // The label value of " le " is the upper inclusive bound .
// For the last bucket , it should be " + Inf " .
String bucketBoundary = doubleToGoString ( i < boundaries . size ( ) ? boundaries . get ( i ) : Double . POSITIVE_INFINITY ) ; labelValuesWithLe . add ( bucketBoundary ) ; cumulativeCount += arg . getBuckets ( ) . get ( i ) . getCount ( ) ; samples . add ( new MetricFamilySamples . Sample ( name + SAMPLE_SUFFIX_BUCKET , labelNamesWithLe , labelValuesWithLe , cumulativeCount ) ) ; } samples . add ( new MetricFamilySamples . Sample ( name + SAMPLE_SUFFIX_COUNT , labelNames , labelValues , arg . getCount ( ) ) ) ; samples . add ( new MetricFamilySamples . Sample ( name + SAMPLE_SUFFIX_SUM , labelNames , labelValues , arg . getSum ( ) ) ) ; return samples ; } } , new Function < Summary , List < Sample > > ( ) { @ Override public List < Sample > apply ( Summary arg ) { Long count = arg . getCount ( ) ; if ( count != null ) { samples . add ( new MetricFamilySamples . Sample ( name + SAMPLE_SUFFIX_COUNT , labelNames , labelValues , count ) ) ; } Double sum = arg . getSum ( ) ; if ( sum != null ) { samples . add ( new MetricFamilySamples . Sample ( name + SAMPLE_SUFFIX_SUM , labelNames , labelValues , sum ) ) ; } List < ValueAtPercentile > valueAtPercentiles = arg . getSnapshot ( ) . getValueAtPercentiles ( ) ; List < String > labelNamesWithQuantile = new ArrayList < String > ( labelNames ) ; labelNamesWithQuantile . add ( LABEL_NAME_QUANTILE ) ; for ( ValueAtPercentile valueAtPercentile : valueAtPercentiles ) { List < String > labelValuesWithQuantile = new ArrayList < String > ( labelValues ) ; labelValuesWithQuantile . add ( doubleToGoString ( valueAtPercentile . getPercentile ( ) / 100 ) ) ; samples . add ( new MetricFamilySamples . Sample ( name , labelNamesWithQuantile , labelValuesWithQuantile , valueAtPercentile . getValue ( ) ) ) ; } return samples ; } } , Functions . < List < Sample > > throwIllegalArgumentException ( ) ) ; |
public class BELScriptLexer { /** * $ ANTLR start " T _ _ 111" */
public final void mT__111 ( ) throws RecognitionException { } } | try { int _type = T__111 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // BELScript . g : 98:8 : ( ' causesNoChange ' )
// BELScript . g : 98:10 : ' causesNoChange '
{ match ( "causesNoChange" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class KeyVaultClientBaseImpl { /** * Merges a certificate or a certificate chain with a key pair existing on the server .
* The MergeCertificate operation performs the merging of a certificate or certificate chain with a key pair currently available in the service . This operation requires the certificates / create permission .
* @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net .
* @ param certificateName The name of the certificate .
* @ param x509Certificates The certificate or the certificate chain to merge .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < CertificateBundle > mergeCertificateAsync ( String vaultBaseUrl , String certificateName , List < byte [ ] > x509Certificates , final ServiceCallback < CertificateBundle > serviceCallback ) { } } | return ServiceFuture . fromResponse ( mergeCertificateWithServiceResponseAsync ( vaultBaseUrl , certificateName , x509Certificates ) , serviceCallback ) ; |
public class DiagnosticModule { /** * Check the the ThreadLocal to see if we should continue processing this
* FFDC exception
* @ return */
private boolean continueProcessing ( ) { } } | Boolean currentValue = _continueProcessing . get ( ) ; if ( currentValue != null ) return currentValue . booleanValue ( ) ; return true ; |
public class PlanNodeStatsEstimateMath { /** * Subtracts subset stats from supersets stats .
* It is assumed that each NDV from subset has a matching NDV in superset . */
public static PlanNodeStatsEstimate subtractSubsetStats ( PlanNodeStatsEstimate superset , PlanNodeStatsEstimate subset ) { } } | if ( superset . isOutputRowCountUnknown ( ) || subset . isOutputRowCountUnknown ( ) ) { return PlanNodeStatsEstimate . unknown ( ) ; } double supersetRowCount = superset . getOutputRowCount ( ) ; double subsetRowCount = subset . getOutputRowCount ( ) ; double outputRowCount = max ( supersetRowCount - subsetRowCount , 0 ) ; // everything will be filtered out after applying negation
if ( outputRowCount == 0 ) { return createZeroStats ( superset ) ; } PlanNodeStatsEstimate . Builder result = PlanNodeStatsEstimate . builder ( ) ; result . setOutputRowCount ( outputRowCount ) ; superset . getSymbolsWithKnownStatistics ( ) . forEach ( symbol -> { SymbolStatsEstimate supersetSymbolStats = superset . getSymbolStatistics ( symbol ) ; SymbolStatsEstimate subsetSymbolStats = subset . getSymbolStatistics ( symbol ) ; SymbolStatsEstimate . Builder newSymbolStats = SymbolStatsEstimate . builder ( ) ; // for simplicity keep the average row size the same as in the input
// in most cases the average row size doesn ' t change after applying filters
newSymbolStats . setAverageRowSize ( supersetSymbolStats . getAverageRowSize ( ) ) ; // nullsCount
double supersetNullsCount = supersetSymbolStats . getNullsFraction ( ) * supersetRowCount ; double subsetNullsCount = subsetSymbolStats . getNullsFraction ( ) * subsetRowCount ; double newNullsCount = max ( supersetNullsCount - subsetNullsCount , 0 ) ; newSymbolStats . setNullsFraction ( min ( newNullsCount , outputRowCount ) / outputRowCount ) ; // distinctValuesCount
double supersetDistinctValues = supersetSymbolStats . getDistinctValuesCount ( ) ; double subsetDistinctValues = subsetSymbolStats . getDistinctValuesCount ( ) ; double newDistinctValuesCount ; if ( isNaN ( supersetDistinctValues ) || isNaN ( subsetDistinctValues ) ) { newDistinctValuesCount = NaN ; } else if ( supersetDistinctValues == 0 ) { newDistinctValuesCount = 0 ; } else if ( subsetDistinctValues == 0 ) { newDistinctValuesCount = supersetDistinctValues ; } else { double supersetNonNullsCount = supersetRowCount - supersetNullsCount ; double subsetNonNullsCount = subsetRowCount - subsetNullsCount ; double supersetValuesPerDistinctValue = supersetNonNullsCount / supersetDistinctValues ; double subsetValuesPerDistinctValue = subsetNonNullsCount / subsetDistinctValues ; if ( supersetValuesPerDistinctValue <= subsetValuesPerDistinctValue ) { newDistinctValuesCount = max ( supersetDistinctValues - subsetDistinctValues , 0 ) ; } else { newDistinctValuesCount = supersetDistinctValues ; } } newSymbolStats . setDistinctValuesCount ( newDistinctValuesCount ) ; // range
newSymbolStats . setLowValue ( supersetSymbolStats . getLowValue ( ) ) ; newSymbolStats . setHighValue ( supersetSymbolStats . getHighValue ( ) ) ; result . addSymbolStatistics ( symbol , newSymbolStats . build ( ) ) ; } ) ; return result . build ( ) ; |
public class ArgumentDefinition { /** * Initialize a collection value for this field . If the collection can ' t be instantiated directly
* because its the underlying type is not a concrete type , an attempt assign an ArrayList will be made .
* @ param annotationType the type of annotation used for ths argument , for error reporting purposes */
protected void intializeCollection ( final String annotationType ) { } } | final Field field = getUnderlyingField ( ) ; final Object callerArguments = containingObject ; try { if ( field . get ( containingObject ) == null ) { field . set ( callerArguments , field . getType ( ) . newInstance ( ) ) ; } } catch ( final Exception ex ) { // If we can ' t instantiate the collection , try falling back to Note : I assume this catches Exception to
// handle the case where the type is not instantiable
try { field . set ( callerArguments , new ArrayList < > ( ) ) ; } catch ( final IllegalArgumentException e ) { throw new CommandLineException . CommandLineParserInternalException ( String . format ( "Collection member %s of type %s must be explicitly initialized. " + "It cannot be constructed or auto-initialized with ArrayList." , field . getName ( ) , annotationType ) ) ; } catch ( final IllegalAccessException e ) { throw new CommandLineException . ShouldNeverReachHereException ( "We should not have reached here because we set accessible to true" , e ) ; } } |
public class AWSServerlessApplicationRepositoryClient { /** * Sets the permission policy for an application . For the list of actions supported for this operation , see < a href =
* " https : / / docs . aws . amazon . com / serverlessrepo / latest / devguide / access - control - resource - based . html # application - permissions "
* > Application Permissions < / a > .
* @ param putApplicationPolicyRequest
* @ return Result of the PutApplicationPolicy operation returned by the service .
* @ throws NotFoundException
* The resource ( for example , an access policy statement ) specified in the request doesn ' t exist .
* @ throws TooManyRequestsException
* The client is sending more than the allowed number of requests per unit of time .
* @ throws BadRequestException
* One of the parameters in the request is invalid .
* @ throws InternalServerErrorException
* The AWS Serverless Application Repository service encountered an internal error .
* @ throws ForbiddenException
* The client is not authenticated .
* @ sample AWSServerlessApplicationRepository . PutApplicationPolicy
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / serverlessrepo - 2017-09-08 / PutApplicationPolicy "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public PutApplicationPolicyResult putApplicationPolicy ( PutApplicationPolicyRequest request ) { } } | request = beforeClientExecution ( request ) ; return executePutApplicationPolicy ( request ) ; |
public class AESHelper { /** * Encrypts a string .
* @ param c The string to encrypt .
* @ return The encrypted string in HEX . */
public static String encrypt ( String c , String key ) { } } | try { SecretKeySpec skeySpec = new SecretKeySpec ( Hex . decodeHex ( key . toCharArray ( ) ) , "AES" ) ; Cipher cipher = Cipher . getInstance ( "AES" ) ; cipher . init ( Cipher . ENCRYPT_MODE , skeySpec ) ; byte [ ] encoded = cipher . doFinal ( c . getBytes ( ) ) ; return new String ( Hex . encodeHex ( encoded ) ) ; } catch ( final Exception e ) { logger . warn ( "Could not encrypt string" , e ) ; return null ; } |
public class IndirectJndiLookupObjectFactory { /** * Try to get an object instance by looking in the OSGi service registry
* similar to how / com . ibm . ws . jndi / implements the default namespace .
* @ return the object instance , or null if an object could not be found */
@ FFDCIgnore ( PrivilegedActionException . class ) private Object getJNDIServiceObjectInstance ( final String className , final String bindingName , final Hashtable < ? , ? > envmt ) throws Exception { } } | try { return AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { @ Override public Object run ( ) throws Exception { return getJNDIServiceObjectInstancePrivileged ( className , bindingName , envmt ) ; } } ) ; } catch ( PrivilegedActionException paex ) { Throwable cause = paex . getCause ( ) ; if ( cause instanceof Exception ) { throw ( Exception ) cause ; } throw new Error ( cause ) ; } |
public class PromiseCombiner { /** * Adds a new future to be combined . New futures may be added until an aggregate promise is added via the
* { @ link PromiseCombiner # finish ( Promise ) } method .
* @ param future the future to add to this promise combiner */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) public void add ( Future future ) { checkAddAllowed ( ) ; checkInEventLoop ( ) ; ++ expectedCount ; future . addListener ( listener ) ; |
public class TridiagonalDecompositionHouseholder_ZDRM { /** * Computes and performs the similar a transform for submatrix k . */
private void similarTransform ( int k ) { } } | double t [ ] = QT . data ; // find the largest value in this column
// this is used to normalize the column and mitigate overflow / underflow
double max = QrHelperFunctions_ZDRM . computeRowMax ( QT , k , k + 1 , N ) ; if ( max > 0 ) { double gamma = QrHelperFunctions_ZDRM . computeTauGammaAndDivide ( k * N + k + 1 , k * N + N , t , max , tau ) ; gammas [ k ] = gamma ; // divide u by u _ 0
double real_u_0 = t [ ( k * N + k + 1 ) * 2 ] + tau . real ; double imag_u_0 = t [ ( k * N + k + 1 ) * 2 + 1 ] + tau . imaginary ; QrHelperFunctions_ZDRM . divideElements ( k + 2 , N , t , k * N , real_u_0 , imag_u_0 ) ; // A column is zeroed first . However a row is being used to store because it reduces
// cache misses . Need to compute the conjugate to have the correct householder operation
for ( int i = k + 2 ; i < N ; i ++ ) { t [ ( k * N + i ) * 2 + 1 ] = - t [ ( k * N + i ) * 2 + 1 ] ; } t [ ( k * N + k + 1 ) * 2 ] = 1.0 ; t [ ( k * N + k + 1 ) * 2 + 1 ] = 0 ; // - - - - - Specialized householder that takes advantage of the symmetry
// QrHelperFunctions _ ZDRM . rank1UpdateMultR ( QT , QT . data , k * N , gamma , k + 1 , k + 1 , N , w ) ;
// QrHelperFunctions _ ZDRM . rank1UpdateMultL ( QT , QT . data , k * N , gamma , k + 1 , k + 1 , N ) ;
householderSymmetric ( k , gamma ) ; // since the first element in the householder vector is known to be 1
// store the full upper hessenberg
t [ ( k * N + k + 1 ) * 2 ] = - tau . real * max ; t [ ( k * N + k + 1 ) * 2 + 1 ] = - tau . imaginary * max ; } else { gammas [ k ] = 0 ; } |
public class JmolSymmetryScriptGeneratorH { /** * Orients layer lines from lowest y - axis value to largest y - axis value */
private List < List < Integer > > orientLayerLines ( List < List < Integer > > layerLines ) { } } | Matrix4d transformation = helixAxisAligner . getTransformation ( ) ; List < Point3d > centers = helixAxisAligner . getSubunits ( ) . getOriginalCenters ( ) ; for ( int i = 0 ; i < layerLines . size ( ) ; i ++ ) { List < Integer > layerLine = layerLines . get ( i ) ; // get center of first subunit in layerline and transform to standard orientation ( helix axis aligned with y - axis )
int first = layerLine . get ( 0 ) ; Point3d firstSubunit = new Point3d ( centers . get ( first ) ) ; transformation . transform ( firstSubunit ) ; // get center of last subunit in layerline and transform to standard orientation ( helix axis aligned with y - axis )
int last = layerLine . get ( layerLine . size ( ) - 1 ) ; Point3d lastSubunit = new Point3d ( centers . get ( last ) ) ; transformation . transform ( lastSubunit ) ; // a layerline should start at the lowest y - value , so all layerlines have a consistent direction from - y value to + y value
if ( firstSubunit . y > lastSubunit . y ) { // System . out . println ( " reorienting layer line : " + layerLine ) ;
Collections . reverse ( layerLine ) ; } } return layerLines ; |
public class SQLiteQueryBuilder { /** * Build an SQL query string from the given clauses .
* @ param distinct
* true if you want each row to be unique , false otherwise .
* @ param table
* The table name to compile the query against .
* @ param columns
* A list of which columns to return . Passing null will return
* all columns , which is discouraged to prevent reading data from
* storage that isn ' t going to be used .
* @ param columnsAs
* A list of values to return the corresponding columns as
* @ param where
* A filter declaring which rows to return , formatted as an SQL
* WHERE clause ( excluding the WHERE itself ) . Passing null will
* return all rows for the given URL .
* @ param groupBy
* A filter declaring how to group rows , formatted as an SQL
* GROUP BY clause ( excluding the GROUP BY itself ) . Passing null
* will cause the rows to not be grouped .
* @ param having
* A filter declare which row groups to include in the cursor , if
* row grouping is being used , formatted as an SQL HAVING clause
* ( excluding the HAVING itself ) . Passing null will cause all row
* groups to be included , and is required when row grouping is
* not being used .
* @ param orderBy
* How to order the rows , formatted as an SQL ORDER BY clause
* ( excluding the ORDER BY itself ) . Passing null will use the
* default sort order , which may be unordered .
* @ param limit
* Limits the number of rows returned by the query , formatted as
* LIMIT clause . Passing null denotes no LIMIT clause .
* @ return the SQL query string */
public static String buildQueryString ( boolean distinct , String table , String [ ] columns , String [ ] columnsAs , String where , String groupBy , String having , String orderBy , String limit ) { } } | return buildQueryString ( distinct , new String [ ] { table } , columns , columnsAs , where , groupBy , having , orderBy , limit ) ; |
public class DomUtils { /** * < p > Returns the first child element with the given name . Returns
* < code > null < / code > if not found . < / p >
* @ param parent parent element
* @ param name name of the child element
* @ return child element */
public static Element getChildElementByName ( Element parent , String name ) { } } | NodeList children = parent . getChildNodes ( ) ; for ( int i = 0 ; i < children . getLength ( ) ; i ++ ) { Node node = children . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { Element element = ( Element ) node ; if ( element . getTagName ( ) . equals ( name ) ) { return element ; } } } return null ; |
public class BatchedJmsTemplate { /** * { @ inheritDoc } */
@ Override public Message receiveSelected ( String messageSelector ) throws JmsException { } } | Destination defaultDestination = getDefaultDestination ( ) ; if ( defaultDestination != null ) { return receiveSelected ( defaultDestination , messageSelector ) ; } else { return receiveSelected ( getRequiredDefaultDestinationName ( ) , messageSelector ) ; } |
public class WriteMethodUtil { /** * Performs the validation of keys .
* The key ( s ) in the ' OData URI ' should match the existing key ( s ) in the passed entity .
* @ param entity The passed entity .
* @ param type The entity type of the passed entity .
* @ throws com . sdl . odata . api . ODataClientException
* @ throws com . sdl . odata . api . processor . ODataProcessorException */
public static void validateKeys ( Object entity , EntityType type , ODataUri oDataUri , EntityDataModel entityDataModel ) throws ODataClientException , ODataProcessorException { } } | final Map < String , Object > oDataUriKeyValues = asJavaMap ( getEntityKeyMap ( oDataUri , entityDataModel ) ) ; final Map < String , Object > keyValues = getKeyValues ( entity , type ) ; if ( oDataUriKeyValues . size ( ) != keyValues . size ( ) ) { throw new ODataClientException ( PROCESSOR_ERROR , "Number of keys don't match" ) ; } for ( Map . Entry < String , Object > oDataUriEntry : oDataUriKeyValues . entrySet ( ) ) { String oDataUriKey = oDataUriEntry . getKey ( ) ; Object value = keyValues . get ( oDataUriKey ) ; if ( value == null || ! normalize ( value ) . equals ( normalize ( oDataUriEntry . getValue ( ) ) ) ) { throw new ODataClientException ( PROCESSOR_ERROR , "Key/Values in OData URI and the entity don't match" ) ; } } |
public class DescribeQueriesResult { /** * The list of queries that match the request .
* @ return The list of queries that match the request . */
public java . util . List < QueryInfo > getQueries ( ) { } } | if ( queries == null ) { queries = new com . amazonaws . internal . SdkInternalList < QueryInfo > ( ) ; } return queries ; |
public class UserDataHelper { /** * FIXME : there must be a shorter way with XPath . . . */
private static String getValueOfTagInXMLFile ( String filePath , String tagName ) throws ParserConfigurationException , SAXException , IOException { } } | File fXmlFile = new File ( filePath ) ; DocumentBuilderFactory dbFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder dBuilder = dbFactory . newDocumentBuilder ( ) ; Document doc = dBuilder . parse ( fXmlFile ) ; // Optional , but recommended
// Read this : http : / / stackoverflow . com / questions / 13786607 / normalization - in - dom - parsing - with - java - how - does - it - work
doc . getDocumentElement ( ) . normalize ( ) ; NodeList nList = doc . getElementsByTagName ( tagName ) ; String valueOfTagName = "" ; for ( int temp = 0 ; temp < nList . getLength ( ) ; temp ++ ) { Node nNode = nList . item ( temp ) ; valueOfTagName = nNode . getTextContent ( ) ; } return valueOfTagName ; |
public class UrlConnectionGetTransport { /** * { @ inheritDoc } */
@ Override public < T > T sendRequest ( final Request msg , final ScepResponseHandler < T > handler ) throws TransportException { } } | URL url = getUrl ( msg . getOperation ( ) , msg . getMessage ( ) ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Sending {} to {}" , msg , url ) ; } HttpURLConnection conn ; try { conn = ( HttpURLConnection ) url . openConnection ( ) ; if ( conn instanceof HttpsURLConnection && sslSocketFactory != null ) { ( ( HttpsURLConnection ) conn ) . setSSLSocketFactory ( sslSocketFactory ) ; } } catch ( IOException e ) { throw new TransportException ( e ) ; } try { int responseCode = conn . getResponseCode ( ) ; String responseMessage = conn . getResponseMessage ( ) ; LOGGER . debug ( "Received '{} {}' when sending {} to {}" , varargs ( responseCode , responseMessage , msg , url ) ) ; if ( responseCode != HttpURLConnection . HTTP_OK ) { throw new TransportException ( responseCode + " " + responseMessage ) ; } } catch ( IOException e ) { throw new TransportException ( "Error connecting to server" , e ) ; } byte [ ] response ; try { response = IOUtils . toByteArray ( conn . getInputStream ( ) ) ; } catch ( IOException e ) { throw new TransportException ( "Error reading response stream" , e ) ; } return handler . getResponse ( response , conn . getContentType ( ) ) ; |
public class ThriftService { /** * - - - - - Public DBService methods : Queries */
@ Override public List < DColumn > getColumns ( String storeName , String rowKey , String startColumn , String endColumn , int count ) { } } | DBConn dbConn = getDBConnection ( ) ; try { List < ColumnOrSuperColumn > columns = dbConn . getSlice ( CassandraDefs . columnParent ( storeName ) , CassandraDefs . slicePredicateStartEndCol ( Utils . toBytes ( startColumn ) , Utils . toBytes ( endColumn ) , count ) , Utils . toByteBuffer ( rowKey ) ) ; List < DColumn > result = new ArrayList < > ( columns . size ( ) ) ; for ( ColumnOrSuperColumn column : columns ) { result . add ( new DColumn ( column . getColumn ( ) . getName ( ) , column . getColumn ( ) . getValue ( ) ) ) ; } return result ; } finally { returnDBConnection ( dbConn ) ; } |
public class HandshakeUtil { /** * Prepare for upgrade */
public static void prepareUpgrade ( final ServerEndpointConfig config , final WebSocketHttpExchange exchange ) { } } | ExchangeHandshakeRequest request = new ExchangeHandshakeRequest ( exchange ) ; ExchangeHandshakeResponse response = new ExchangeHandshakeResponse ( exchange ) ; ServerEndpointConfig . Configurator c = config . getConfigurator ( ) ; c . modifyHandshake ( config , request , response ) ; response . update ( ) ; |
public class Logger { /** * Log a message at the supplied level according to the specified format and ( optional ) parameters . The message should contain
* a pair of empty curly braces for each of the parameter , which should be passed in the correct order . This method is
* efficient and avoids superfluous object creation when the logger is disabled for the desired level .
* @ param level the level at which to log
* @ param message the ( localized ) message string
* @ param params the parameter values that are to replace the variables in the format string */
public void log ( Level level , I18nResource message , Object ... params ) { } } | if ( message == null ) return ; switch ( level ) { case DEBUG : debug ( message . text ( LOGGING_LOCALE . get ( ) , params ) ) ; break ; case ERROR : error ( message , params ) ; break ; case INFO : info ( message , params ) ; break ; case TRACE : trace ( message . text ( LOGGING_LOCALE . get ( ) , params ) ) ; break ; case WARNING : warn ( message , params ) ; break ; case OFF : break ; } |
public class TaskSession { /** * Make a table for this database .
* @ param strRecordClassName The record class name .
* @ param strTableSessionClassName The ( optional ) session name for the table .
* @ param properties The properties for the remote table .
* @ param remoteDB The remote db to add this new table to . */
public RemoteTable makeRemoteTable ( String strRecordClassName , String strTableSessionClassName , Map < String , Object > properties , Map < String , Object > propDatabase ) throws RemoteException { } } | Record record = ( Record ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strRecordClassName ) ; if ( record == null ) return null ; if ( strTableSessionClassName == null ) this . setMasterSlave ( RecordOwner . SLAVE ) ; // If no table is specified , TableSession is a slave
if ( properties != null ) if ( properties . get ( RecordMessageConstants . TABLE_NAME ) != null ) record . setTableNames ( ( String ) properties . get ( RecordMessageConstants . TABLE_NAME ) ) ; Map < String , Object > propOld = this . getProperties ( ) ; this . setProperties ( propDatabase ) ; // This will cause the database owner to be correct ( App by default , this if AUTO _ COMMIT is off )
Map < String , Object > propDBOld = this . addDatabaseProperties ( propDatabase , this . getDatabaseOwner ( ) ) ; BaseDatabase database = null ; if ( this . getDatabaseOwner ( ) != null ) { // Always - Make sure the database is set up correctly BEFORE I create the record .
database = ( BaseDatabase ) this . getDatabaseOwner ( ) . getDatabase ( record . getDatabaseName ( ) , record . getDatabaseType ( ) , null ) ; if ( database != null ) { // Rare - only on querytables , then the database will have already been set up .
database . setMasterSlave ( RecordOwner . SLAVE ) ; // Don ' t create client behaviors
if ( propDatabase != null ) { propDatabase . remove ( DBParams . MESSAGES_TO_REMOTE ) ; // Don ' t copy these down
propDatabase . remove ( DBParams . CREATE_REMOTE_FILTER ) ; propDatabase . remove ( DBParams . UPDATE_REMOTE_FILTER ) ; database . getProperties ( ) . putAll ( propDatabase ) ; // Add these properties to the current db properties .
} } } record . init ( this ) ; if ( database == null ) { // Better late then never
database = record . getTable ( ) . getDatabase ( ) ; if ( database != null ) { // Rare - only on querytables , then the database will have already been set up .
database . setMasterSlave ( RecordOwner . SLAVE ) ; // Don ' t create client behaviors
if ( propDatabase != null ) database . getProperties ( ) . putAll ( propDatabase ) ; // Add these properties to the current db properties .
} } this . getDatabaseOwner ( ) . setProperties ( propDBOld ) ; this . setProperties ( propOld ) ; this . setMasterSlave ( - 1 ) ; // Back to default
RemoteTable remoteTable = null ; if ( strTableSessionClassName != null ) remoteTable = ( TableSession ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strTableSessionClassName ) ; try { if ( remoteTable == null ) remoteTable = new TableSession ( ) ; this . removeRecord ( record ) ; // Record should belong to the TableSessionObject . ( I just added it to this , so it could access the environment )
( ( TableSession ) remoteTable ) . init ( this , record , null ) ; } catch ( Exception ex ) { remoteTable = null ; } if ( remoteTable != null ) if ( properties != null ) { for ( String strName : properties . keySet ( ) ) { String strValue = ( String ) properties . get ( strName ) ; try { remoteTable . setRemoteProperty ( strName , strValue ) ; } catch ( RemoteException ex ) { // Never
} } } return remoteTable ; |
public class VLDockingUtils { /** * Transforms a UI object key into an activation aware key name .
* @ param key
* the key .
* @ param active
* < code > true < / code > for < em > active < / em > UI keys and < code > false < / code > for inactive .
* @ return the transformed key . */
public static String activationKey ( String key , Boolean active ) { } } | Assert . notNull ( key , "key" ) ; Assert . notNull ( active , "active" ) ; final int index = key . lastIndexOf ( VLDockingUtils . DOT ) ; final String overlay = active ? VLDockingUtils . ACTIVE_INFIX : VLDockingUtils . INACTIVE_INFIX ; // return StringUtils . overlay ( key , overlay , index , index ) ;
return key ; // TODO |
public class BigtableTableAdminClient { /** * Helper method to transform ApiFuture < Empty > to ApiFuture < Void > */
private static ApiFuture < Void > transformToVoid ( ApiFuture < Empty > future ) { } } | return ApiFutures . transform ( future , new ApiFunction < Empty , Void > ( ) { @ Override public Void apply ( Empty empty ) { return null ; } } , MoreExecutors . directExecutor ( ) ) ; |
public class CPOptionCategoryUtil { /** * Returns a range of all the cp option categories where companyId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPOptionCategoryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param companyId the company ID
* @ param start the lower bound of the range of cp option categories
* @ param end the upper bound of the range of cp option categories ( not inclusive )
* @ return the range of matching cp option categories */
public static List < CPOptionCategory > findByCompanyId ( long companyId , int start , int end ) { } } | return getPersistence ( ) . findByCompanyId ( companyId , start , end ) ; |
public class UnnecessaryStoreBeforeReturn { /** * implements the visitor to make sure method returns a value , and then clears the targets
* @ param obj
* the context object of the currently parsed code block */
@ Override public void visitCode ( Code obj ) { } } | Method m = getMethod ( ) ; String sig = m . getSignature ( ) ; if ( ! Values . SIG_VOID . equals ( SignatureUtils . getReturnSignature ( sig ) ) ) { state = State . SEEN_NOTHING ; branchTargets . clear ( ) ; CodeException [ ] ces = obj . getExceptionTable ( ) ; catchTargets . clear ( ) ; stack . resetForMethodEntry ( this ) ; for ( CodeException ce : ces ) { if ( ce . getCatchType ( ) != 0 ) { catchTargets . set ( ce . getHandlerPC ( ) ) ; } } super . visitCode ( obj ) ; } |
public class JarFile { /** * Synchronized method for adding a new conent
* @ param record
* @ param filename */
protected synchronized void addContent ( Artifact record , String filename ) { } } | if ( record != null ) { if ( filename . endsWith ( ".jar" ) ) { // this is an embedded archive
embedded . add ( record ) ; } else { contents . add ( record ) ; } } |
public class TransactionImpl { /** * { @ inheritDoc } */
public void commit ( ) throws RollbackException , HeuristicMixedException , HeuristicRollbackException , SecurityException , IllegalStateException , SystemException { } } | if ( status == Status . STATUS_UNKNOWN ) throw new IllegalStateException ( "Status unknown" ) ; if ( status == Status . STATUS_MARKED_ROLLBACK ) throw new IllegalStateException ( "Status marked rollback" ) ; finish ( true ) ; |
public class HSTreeNode { /** * Update the mass profile of this node .
* @ param inst the instance being passed through the HSTree .
* @ param referenceWindow if the HSTree is in the initial reference window : < b > true < / b > , else : < b > false < / b > */
public void updateMass ( Instance inst , boolean referenceWindow ) { } } | if ( referenceWindow ) r ++ ; else l ++ ; if ( internalNode ) { if ( inst . value ( this . splitAttribute ) > this . splitValue ) right . updateMass ( inst , referenceWindow ) ; else left . updateMass ( inst , referenceWindow ) ; } |
public class HashIntSet { /** * Find position of the integer in { @ link # cells } . If not found , returns the
* first removed cell .
* @ param element element to search
* @ return if < code > returned value > = 0 < / code > , it returns the index of the
* element ; if < code > returned value < 0 < / code > , the index of the
* first empty cell is < code > - ( returned value - 1 ) < / code > */
private int findElementOrRemoved ( int element ) { } } | assert element >= 0 ; int index = toIndex ( IntHashCode . hashCode ( element ) ) ; int offset = 1 ; int removed = - 1 ; while ( cells [ index ] != EMPTY ) { // element found !
if ( cells [ index ] == element ) { return index ; } // remember the last removed cell if we don ' t find the element
if ( cells [ index ] == REMOVED ) { removed = index ; } index = toIndex ( index + offset ) ; offset <<= 1 ; offset ++ ; if ( offset < 0 ) { offset = 2 ; } } if ( removed >= 0 ) { return - ( removed + 1 ) ; } return index ; |
public class MongoDBClient { /** * Method to find entity for given association name and association value .
* @ param colName
* the col name
* @ param colValue
* the col value
* @ param entityClazz
* the entity clazz
* @ return the list */
public List < Object > findByRelation ( String colName , Object colValue , Class entityClazz ) { } } | EntityMetadata m = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entityClazz ) ; // you got column name and column value .
DBCollection dbCollection = mongoDb . getCollection ( m . getTableName ( ) ) ; BasicDBObject query = new BasicDBObject ( ) ; query . put ( colName , MongoDBUtils . populateValue ( colValue , colValue . getClass ( ) ) ) ; KunderaCoreUtils . printQuery ( "Find by relation:" + query , showQuery ) ; DBCursor cursor = dbCollection . find ( query ) ; DBObject fetchedDocument = null ; List < Object > results = new ArrayList < Object > ( ) ; while ( cursor . hasNext ( ) ) { fetchedDocument = cursor . next ( ) ; populateEntity ( m , results , fetchedDocument ) ; } return results . isEmpty ( ) ? null : results ; |
public class BuiltinAuthorizationService { /** * Check if the subject has a WScredential , is authenticated , and is not a basic auth credential .
* @ param subject
* the subject to check
* @ return true if the subject has a WSCredential that is not marked as
* unauthenticated and is not marked as basic auth , otherwise false */
private boolean isSubjectValid ( Subject subject ) { } } | final WSCredential wsCred = getWSCredentialFromSubject ( subject ) ; if ( wsCred == null ) { return false ; } else { // TODO revisit this when EJBs are supported add additional
// checks would be required
return ! wsCred . isUnauthenticated ( ) && ! wsCred . isBasicAuth ( ) ; } |
public class ListModelPackagesResult { /** * An array of < code > ModelPackageSummary < / code > objects , each of which lists a model package .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setModelPackageSummaryList ( java . util . Collection ) } or
* { @ link # withModelPackageSummaryList ( java . util . Collection ) } if you want to override the existing values .
* @ param modelPackageSummaryList
* An array of < code > ModelPackageSummary < / code > objects , each of which lists a model package .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListModelPackagesResult withModelPackageSummaryList ( ModelPackageSummary ... modelPackageSummaryList ) { } } | if ( this . modelPackageSummaryList == null ) { setModelPackageSummaryList ( new java . util . ArrayList < ModelPackageSummary > ( modelPackageSummaryList . length ) ) ; } for ( ModelPackageSummary ele : modelPackageSummaryList ) { this . modelPackageSummaryList . add ( ele ) ; } return this ; |
public class JTrees { /** * Returns the parent of the given node in the given tree model .
* This parent may be < code > null < / code > , if the given node is
* the root node ( or not contained in the tree model at all ) .
* @ param treeModel The tree model
* @ param node The node
* @ param potentialParent The potential parent
* @ return The parent */
private static Object getParent ( TreeModel treeModel , Object node , Object potentialParent ) { } } | List < Object > children = getChildren ( treeModel , potentialParent ) ; for ( Object child : children ) { if ( child == node ) { return potentialParent ; } Object parent = getParent ( treeModel , node , child ) ; if ( parent != null ) { return parent ; } } return null ; |
public class BitcoinSerializer { /** * Make a block from the payload . Extension point for alternative
* serialization format support . */
@ Override public Block makeBlock ( final byte [ ] payloadBytes , final int offset , final int length ) throws ProtocolException { } } | return new Block ( params , payloadBytes , offset , this , length ) ; |
public class SecretListEntry { /** * A list of all of the currently assigned < code > SecretVersionStage < / code > staging labels and the
* < code > SecretVersionId < / code > that each is attached to . Staging labels are used to keep track of the different
* versions during the rotation process .
* < note >
* A version that does not have any < code > SecretVersionStage < / code > is considered deprecated and subject to
* deletion . Such versions are not included in this list .
* < / note >
* @ param secretVersionsToStages
* A list of all of the currently assigned < code > SecretVersionStage < / code > staging labels and the
* < code > SecretVersionId < / code > that each is attached to . Staging labels are used to keep track of the
* different versions during the rotation process . < / p > < note >
* A version that does not have any < code > SecretVersionStage < / code > is considered deprecated and subject to
* deletion . Such versions are not included in this list . */
public void setSecretVersionsToStages ( java . util . Map < String , java . util . List < String > > secretVersionsToStages ) { } } | this . secretVersionsToStages = secretVersionsToStages ; |
public class TransformerIdentityImpl { /** * Get an output property that is in effect for the
* transformation . The property specified may be a property
* that was set with setOutputProperty , or it may be a
* property specified in the stylesheet .
* @ param name A non - null String that specifies an output
* property name , which may be namespace qualified .
* @ return The string value of the output property , or null
* if no property was found .
* @ throws IllegalArgumentException If the property is not supported .
* @ see javax . xml . transform . OutputKeys */
public String getOutputProperty ( String name ) throws IllegalArgumentException { } } | String value = null ; OutputProperties props = m_outputFormat ; value = props . getProperty ( name ) ; if ( null == value ) { if ( ! OutputProperties . isLegalPropertyKey ( name ) ) throw new IllegalArgumentException ( XSLMessages . createMessage ( XSLTErrorResources . ER_OUTPUT_PROPERTY_NOT_RECOGNIZED , new Object [ ] { name } ) ) ; // " output property not recognized : "
// + name ) ;
} return value ; |
public class TransliteratorIDParser { /** * Parse a filter ID , that is , an ID of the general form
* " [ f1 ] s1 - t1 / v1 " , with the filters optional , and the variants optional .
* @ param id the id to be parsed
* @ param pos INPUT - OUTPUT parameter . On input , the position of
* the first character to parse . On output , the position after
* the last character parsed .
* @ return a SingleID object or null if the parse fails */
public static SingleID parseFilterID ( String id , int [ ] pos ) { } } | int start = pos [ 0 ] ; Specs specs = parseFilterID ( id , pos , true ) ; if ( specs == null ) { pos [ 0 ] = start ; return null ; } // Assemble return results
SingleID single = specsToID ( specs , FORWARD ) ; single . filter = specs . filter ; return single ; |
public class DataStorage { /** * Analyze storage directories .
* Recover from previous transitions if required .
* Perform fs state transition if necessary depending on the namespace info .
* Read storage info .
* @ param nsInfo namespace information
* @ param dataDirs array of data storage directories
* @ param startOpt startup option
* @ throws IOException */
synchronized void recoverTransitionRead ( DataNode datanode , NamespaceInfo nsInfo , Collection < File > dataDirs , StartupOption startOpt ) throws IOException { } } | if ( initialized ) { // DN storage has been initialized , no need to do anything
return ; } if ( FSConstants . LAYOUT_VERSION != nsInfo . getLayoutVersion ( ) ) { throw new IOException ( "Data-node and name-node layout versions must be the same. Namenode LV: " + nsInfo . getLayoutVersion ( ) + ", current LV: " + FSConstants . LAYOUT_VERSION ) ; } // 1 . For each data directory calculate its state and
// check whether all is consistent before transitioning .
// Format and recover .
analyzeStorageDirs ( nsInfo , dataDirs , startOpt ) ; // 2 . Do transitions
// Each storage directory is treated individually .
// During startup some of them can upgrade or rollback
// while others could be uptodate for the regular startup .
doTransition ( storageDirs , nsInfo , startOpt ) ; // 3 . make sure we have storage id set - if not - generate new one
createStorageID ( datanode . getPort ( ) ) ; // 4 . Update all storages . Some of them might have just been formatted .
this . writeAll ( ) ; this . initialized = true ; |
public class ScalingPlanMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ScalingPlan scalingPlan , ProtocolMarshaller protocolMarshaller ) { } } | if ( scalingPlan == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( scalingPlan . getScalingPlanName ( ) , SCALINGPLANNAME_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getScalingPlanVersion ( ) , SCALINGPLANVERSION_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getApplicationSource ( ) , APPLICATIONSOURCE_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getScalingInstructions ( ) , SCALINGINSTRUCTIONS_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getStatusCode ( ) , STATUSCODE_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getStatusMessage ( ) , STATUSMESSAGE_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getStatusStartTime ( ) , STATUSSTARTTIME_BINDING ) ; protocolMarshaller . marshall ( scalingPlan . getCreationTime ( ) , CREATIONTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ElementWithOptions { /** * Adds an option to the group of this element with the given id .
* If the group is not found , it ' s created with null text .
* @ param value Unique value in this element
* @ param text Option text
* @ param groupId Id of the option group
* @ return This element */
public ElementWithOptions addOptionToGroup ( Object value , String text , String groupId ) { } } | String valueString = "" ; if ( value != null ) { valueString = value . toString ( ) ; } return addOptionToGroup ( valueString , text , groupId ) ; |
public class ActivationImpl { /** * Get the beanValidationGroups .
* @ return the beanValidationGroups . */
@ Override public List < String > getBeanValidationGroups ( ) { } } | return beanValidationGroups == null ? null : Collections . unmodifiableList ( beanValidationGroups ) ; |
public class DbxClientV1 { /** * Create a file or folder at { @ code toPath } based on the given copy ref ( created with
* { @ link # createCopyRef } ) . */
public /* @ Nullable */
DbxEntry copyFromCopyRef ( String copyRef , String toPath ) throws DbxException { } } | if ( copyRef == null ) throw new IllegalArgumentException ( "'copyRef' can't be null" ) ; if ( copyRef . length ( ) == 0 ) throw new IllegalArgumentException ( "'copyRef' can't be empty" ) ; DbxPathV1 . checkArgNonRoot ( "toPath" , toPath ) ; String [ ] params = { "root" , "auto" , "from_copy_ref" , copyRef , "to_path" , toPath , } ; return doPost ( host . getApi ( ) , "1/fileops/copy" , params , null , new DbxRequestUtil . ResponseHandler < /* @ Nullable */
DbxEntry > ( ) { @ Override public /* @ Nullable */
DbxEntry handle ( HttpRequestor . Response response ) throws DbxException { if ( response . getStatusCode ( ) != 200 ) throw DbxRequestUtil . unexpectedStatus ( response ) ; DbxEntry . WithChildren dwc = DbxRequestUtil . readJsonFromResponse ( DbxEntry . WithChildren . Reader , response ) ; if ( dwc == null ) return null ; // TODO : When can this happen ?
return dwc . entry ; } } ) ; |
public class PojoDataParser { /** * { @ inheritDoc } */
@ NonNull @ Override public ObservableTransformer < ParseComponentsOp , List < BaseCell > > getComponentTransformer ( ) { } } | return new ObservableTransformer < ParseComponentsOp , List < BaseCell > > ( ) { @ Override public ObservableSource < List < BaseCell > > apply ( Observable < ParseComponentsOp > upstream ) { return upstream . map ( new Function < ParseComponentsOp , List < BaseCell > > ( ) { @ Override public List < BaseCell > apply ( ParseComponentsOp parseComponentsOp ) throws Exception { return parseComponent ( parseComponentsOp . getArg1 ( ) , parseComponentsOp . getArg2 ( ) , parseComponentsOp . getArg3 ( ) ) ; } } ) ; } } ; |
public class CheckJSDoc { /** * Check that a parameter with a default value is marked as optional .
* TODO ( bradfordcsmith ) : This is redundant . We shouldn ' t require it . */
private void validateDefaultValue ( Node n ) { } } | if ( n . isDefaultValue ( ) && n . getParent ( ) . isParamList ( ) ) { Node targetNode = n . getFirstChild ( ) ; JSDocInfo info = targetNode . getJSDocInfo ( ) ; if ( info == null ) { return ; } JSTypeExpression typeExpr = info . getType ( ) ; if ( typeExpr == null ) { return ; } Node typeNode = typeExpr . getRoot ( ) ; if ( typeNode . getToken ( ) != Token . EQUALS ) { report ( typeNode , DEFAULT_PARAM_MUST_BE_MARKED_OPTIONAL ) ; } } |
public class OntRelationMention { /** * getter for range - gets
* @ generated
* @ return value of the feature */
public Annotation getRange ( ) { } } | if ( OntRelationMention_Type . featOkTst && ( ( OntRelationMention_Type ) jcasType ) . casFeat_range == null ) jcasType . jcas . throwFeatMissing ( "range" , "de.julielab.jules.types.OntRelationMention" ) ; return ( Annotation ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( OntRelationMention_Type ) jcasType ) . casFeatCode_range ) ) ) ; |
public class CIFReader { /** * Process double in the format : ' . 071(1 ) ' . */
private double parseIntoDouble ( String value ) { } } | double returnVal = 0.0 ; if ( value . charAt ( 0 ) == '.' ) value = "0" + value ; int bracketIndex = value . indexOf ( '(' ) ; if ( bracketIndex != - 1 ) { value = value . substring ( 0 , bracketIndex ) ; } try { returnVal = Double . parseDouble ( value ) ; } catch ( Exception exception ) { logger . error ( "Could not parse double string: " , value ) ; } return returnVal ; |
public class MMDCfgPanel { /** * GEN - LAST : event _ colorChooserRootTextActionPerformed */
private void colorChooser1stBackgroundActionPerformed ( java . awt . event . ActionEvent evt ) { } } | // GEN - FIRST : event _ colorChooser1stBackgroundActionPerformed
if ( this . colorChooser1stBackground . isLastOkPressed ( ) && changeNotificationAllowed ) { this . controller . changed ( ) ; } |
public class HttpTools { /** * POST content to the URL with the specified body
* @ param url URL to use in the request
* @ param jsonBody Body to use in the request
* @ return String content
* @ throws MovieDbException exception */
public String postRequest ( final URL url , final String jsonBody ) throws MovieDbException { } } | try { HttpPost httpPost = new HttpPost ( url . toURI ( ) ) ; httpPost . addHeader ( HTTP . CONTENT_TYPE , APPLICATION_JSON ) ; httpPost . addHeader ( HttpHeaders . ACCEPT , APPLICATION_JSON ) ; StringEntity params = new StringEntity ( jsonBody , ContentType . APPLICATION_JSON ) ; httpPost . setEntity ( params ) ; return validateResponse ( DigestedResponseReader . postContent ( httpClient , httpPost , CHARSET ) , url ) ; } catch ( URISyntaxException | IOException ex ) { throw new MovieDbException ( ApiExceptionType . CONNECTION_ERROR , null , url , ex ) ; } |
public class GlobalUsersInner { /** * List Environments for the user .
* @ param userName The name of the user .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ListEnvironmentsResponseInner > listEnvironmentsAsync ( String userName , final ServiceCallback < ListEnvironmentsResponseInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listEnvironmentsWithServiceResponseAsync ( userName ) , serviceCallback ) ; |
public class InstanceGroup { /** * The EBS block devices that are mapped to this instance group .
* @ return The EBS block devices that are mapped to this instance group . */
public java . util . List < EbsBlockDevice > getEbsBlockDevices ( ) { } } | if ( ebsBlockDevices == null ) { ebsBlockDevices = new com . amazonaws . internal . SdkInternalList < EbsBlockDevice > ( ) ; } return ebsBlockDevices ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.