signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BaseMojo { /** * Assembles the archive file . Optionally , copies to the war application directory if the * packaging type is " war " . * @ throws Exception Unspecified exception . */ protected void assembleArchive ( ) throws Exception { } }
getLog ( ) . info ( "Assembling " + classifier + " archive" ) ; if ( resources != null && ! resources . isEmpty ( ) ) { getLog ( ) . info ( "Copying additional resources." ) ; new ResourceProcessor ( this , moduleBase , resources ) . transform ( ) ; } if ( configTemplate != null ) { getLog ( ) . info ( "Creating config file." ) ; configTemplate . addEntry ( "info" , pluginDescriptor . getName ( ) , pluginDescriptor . getVersion ( ) , new Date ( ) . toString ( ) ) ; configTemplate . createFile ( stagingDirectory ) ; } try { File archive = createArchive ( ) ; if ( "war" . equalsIgnoreCase ( mavenProject . getPackaging ( ) ) && this . warInclusion ) { webappLibDirectory . mkdirs ( ) ; File webappLibArchive = new File ( this . webappLibDirectory , archive . getName ( ) ) ; Files . copy ( archive , webappLibArchive ) ; } } catch ( Exception e ) { throw new RuntimeException ( "Exception occurred assembling archive." , e ) ; }
public class CmsPriorityResourceCollector { /** * Returns a list of all resource from specified folder that have been mapped to * the currently requested uri , sorted by priority , then date ascending or descending . < p > * @ param cms the current OpenCms user context * @ param param the folder name to use * @ param asc if true , the date sort order is ascending , otherwise descending * @ param numResults the number of results * @ return all resources in the folder matching the given criteria * @ throws CmsException if something goes wrong */ protected List < CmsResource > allMappedToUriPriorityDate ( CmsObject cms , String param , boolean asc , int numResults ) throws CmsException { } }
CmsCollectorData data = new CmsCollectorData ( param ) ; String foldername = CmsResource . getFolderPath ( data . getFileName ( ) ) ; CmsResourceFilter filter = CmsResourceFilter . DEFAULT . addRequireType ( data . getType ( ) ) . addExcludeFlags ( CmsResource . FLAG_TEMPFILE ) ; if ( data . isExcludeTimerange ( ) && ! cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) { // include all not yet released and expired resources in an offline project filter = filter . addExcludeTimerange ( ) ; } List < CmsResource > result = cms . readResources ( foldername , filter , true ) ; List < CmsResource > mapped = new ArrayList < CmsResource > ( ) ; // sort out the resources mapped to the current page Iterator < CmsResource > i = result . iterator ( ) ; while ( i . hasNext ( ) ) { CmsResource res = i . next ( ) ; // read all properties - reason : comparator will do this later anyway , so we just prefill the cache CmsProperty prop = cms . readPropertyObject ( res , PROPERTY_CHANNEL , false ) ; if ( ! prop . isNullProperty ( ) ) { if ( CmsProject . isInsideProject ( prop . getValueList ( ) , cms . getRequestContext ( ) . getSiteRoot ( ) + cms . getRequestContext ( ) . getUri ( ) ) ) { mapped . add ( res ) ; } } } if ( mapped . isEmpty ( ) ) { // nothing was mapped , no need for further processing return mapped ; } // create priority comparator to use to sort the resources CmsPriorityDateResourceComparator comparator = new CmsPriorityDateResourceComparator ( cms , asc ) ; Collections . sort ( mapped , comparator ) ; return shrinkToFit ( mapped , data . getCount ( ) , numResults ) ;
public class CommerceTaxFixedRateLocalServiceUtil { /** * Updates the commerce tax fixed rate in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param commerceTaxFixedRate the commerce tax fixed rate * @ return the commerce tax fixed rate that was updated */ public static com . liferay . commerce . tax . engine . fixed . model . CommerceTaxFixedRate updateCommerceTaxFixedRate ( com . liferay . commerce . tax . engine . fixed . model . CommerceTaxFixedRate commerceTaxFixedRate ) { } }
return getService ( ) . updateCommerceTaxFixedRate ( commerceTaxFixedRate ) ;
public class Permission { /** * Turn permissions into text . */ public static List < String > transformText ( Context context , String ... permissions ) { } }
return transformText ( context , Arrays . asList ( permissions ) ) ;
public class Splitter { /** * Calls either of two op methods depending on start + length > size . If so * calls op with 5 parameters , otherwise op with 3 parameters . * @ param obj * @ param start * @ param length * @ return * @ throws IOException */ public int split ( T obj , int start , int length ) throws IOException { } }
int count = 0 ; if ( length > 0 ) { int end = ( start + length ) % size ; if ( start < end ) { count = op ( obj , start , end ) ; } else { if ( end > 0 ) { count = op ( obj , start , size , 0 , end ) ; } else { count = op ( obj , start , size ) ; } } } assert count <= length ; return count ;
public class BinaryTokenizer { /** * @ see org . apache . hadoop . hdfs . tools . offlineEditsViewer . Tokenizer # read * @ param t a Token to read * @ return token that was just read */ @ Override public Token read ( Token t ) throws IOException { } }
t . offset = is . getChannel ( ) . position ( ) ; t . fromBinary ( in ) ; return t ;
public class Db { /** * Save record . * < pre > * Example : * Record userRole = new Record ( ) . set ( " user _ id " , 123 ) . set ( " role _ id " , 456 ) ; * Db . save ( " user _ role " , " user _ id , role _ id " , userRole ) ; * < / pre > * @ param tableName the table name of the table * @ param primaryKey the primary key of the table , composite primary key is separated by comma character : " , " * @ param record the record will be saved * @ param true if save succeed otherwise false */ public static boolean save ( String tableName , String primaryKey , Record record ) { } }
return MAIN . save ( tableName , primaryKey , record ) ;
public class ReportInstanceStatusRequest { /** * The reason codes that describe the health state of your instance . * < ul > * < li > * < code > instance - stuck - in - state < / code > : My instance is stuck in a state . * < / li > * < li > * < code > unresponsive < / code > : My instance is unresponsive . * < / li > * < li > * < code > not - accepting - credentials < / code > : My instance is not accepting my credentials . * < / li > * < li > * < code > password - not - available < / code > : A password is not available for my instance . * < / li > * < li > * < code > performance - network < / code > : My instance is experiencing performance problems that I believe are network * related . * < / li > * < li > * < code > performance - instance - store < / code > : My instance is experiencing performance problems that I believe are * related to the instance stores . * < / li > * < li > * < code > performance - ebs - volume < / code > : My instance is experiencing performance problems that I believe are related * to an EBS volume . * < / li > * < li > * < code > performance - other < / code > : My instance is experiencing performance problems . * < / li > * < li > * < code > other < / code > : [ explain using the description parameter ] * < / li > * < / ul > * @ param reasonCodes * The reason codes that describe the health state of your instance . < / p > * < ul > * < li > * < code > instance - stuck - in - state < / code > : My instance is stuck in a state . * < / li > * < li > * < code > unresponsive < / code > : My instance is unresponsive . * < / li > * < li > * < code > not - accepting - credentials < / code > : My instance is not accepting my credentials . * < / li > * < li > * < code > password - not - available < / code > : A password is not available for my instance . * < / li > * < li > * < code > performance - network < / code > : My instance is experiencing performance problems that I believe are * network related . * < / li > * < li > * < code > performance - instance - store < / code > : My instance is experiencing performance problems that I believe * are related to the instance stores . * < / li > * < li > * < code > performance - ebs - volume < / code > : My instance is experiencing performance problems that I believe are * related to an EBS volume . * < / li > * < li > * < code > performance - other < / code > : My instance is experiencing performance problems . * < / li > * < li > * < code > other < / code > : [ explain using the description parameter ] * < / li > * @ see ReportInstanceReasonCodes */ public void setReasonCodes ( java . util . Collection < String > reasonCodes ) { } }
if ( reasonCodes == null ) { this . reasonCodes = null ; return ; } this . reasonCodes = new com . amazonaws . internal . SdkInternalList < String > ( reasonCodes ) ;
public class Fn { /** * Only for temporary use in sequential stream / single thread , not for parallel stream / multiple threads . * The returned Collection will clean up before it ' s returned every time when { @ code get } is called . * Don ' t save the returned Collection object or use it to save objects . * @ param supplier * @ return * @ see { @ code Stream . split / sliding } ; */ public static < T , C extends Collection < T > > IntFunction < ? extends C > reuse ( final IntFunction < ? extends C > supplier ) { } }
return new IntFunction < C > ( ) { private C c ; @ Override public C apply ( int size ) { if ( c == null ) { c = supplier . apply ( size ) ; } else if ( c . size ( ) > 0 ) { c . clear ( ) ; } return c ; } } ;
public class ConversionSchemas { /** * A ConversionSchema builder that defaults to building { @ link # V1 } . */ public static Builder v1Builder ( String name ) { } }
return new Builder ( name , V1MarshallerSet . marshallers ( ) , V1MarshallerSet . setMarshallers ( ) , StandardUnmarshallerSet . unmarshallers ( ) , StandardUnmarshallerSet . setUnmarshallers ( ) ) ;
public class OAuth20Utils { /** * Check the client secret . * @ param registeredService the registered service * @ param clientSecret the client secret * @ return whether the secret is valid */ public static boolean checkClientSecret ( final OAuthRegisteredService registeredService , final String clientSecret ) { } }
LOGGER . debug ( "Found: [{}] in secret check" , registeredService ) ; if ( StringUtils . isBlank ( registeredService . getClientSecret ( ) ) ) { LOGGER . debug ( "The client secret is not defined for the registered service [{}]" , registeredService . getName ( ) ) ; return true ; } if ( ! StringUtils . equals ( registeredService . getClientSecret ( ) , clientSecret ) ) { LOGGER . error ( "Wrong client secret for service: [{}]" , registeredService . getServiceId ( ) ) ; return false ; } return true ;
public class IfcObjectDefinitionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcRelAggregates > getDecomposes ( ) { } }
return ( EList < IfcRelAggregates > ) eGet ( Ifc4Package . Literals . IFC_OBJECT_DEFINITION__DECOMPOSES , true ) ;
public class DeviceAttribute_3DAODefaultImpl { public int getType ( ) throws DevFailed { } }
int type = - 1 ; try { final TypeCode tc = attrval . value . type ( ) ; // Special case for test if ( tc . kind ( ) . value ( ) == TCKind . _tk_enum ) { return TangoConst . Tango_DEV_STATE ; } final TypeCode tc_alias = tc . content_type ( ) ; final TypeCode tc_seq = tc_alias . content_type ( ) ; final TCKind kind = tc_seq . kind ( ) ; switch ( kind . value ( ) ) { case TCKind . _tk_void : type = TangoConst . Tango_DEV_VOID ; break ; case TCKind . _tk_boolean : type = TangoConst . Tango_DEV_BOOLEAN ; break ; case TCKind . _tk_char : type = TangoConst . Tango_DEV_CHAR ; break ; case TCKind . _tk_octet : type = TangoConst . Tango_DEV_UCHAR ; break ; case TCKind . _tk_short : type = TangoConst . Tango_DEV_SHORT ; break ; case TCKind . _tk_ushort : type = TangoConst . Tango_DEV_USHORT ; break ; case TCKind . _tk_long : type = TangoConst . Tango_DEV_LONG ; break ; case TCKind . _tk_ulong : type = TangoConst . Tango_DEV_ULONG ; break ; case TCKind . _tk_longlong : type = TangoConst . Tango_DEV_LONG64 ; break ; case TCKind . _tk_ulonglong : type = TangoConst . Tango_DEV_ULONG64 ; break ; case TCKind . _tk_float : type = TangoConst . Tango_DEV_FLOAT ; break ; case TCKind . _tk_double : type = TangoConst . Tango_DEV_DOUBLE ; break ; case TCKind . _tk_string : type = TangoConst . Tango_DEV_STRING ; break ; case TCKind . _tk_enum : type = TangoConst . Tango_DEV_STATE ; break ; default : Except . throw_exception ( "AttributeTypeNotSupported" , "Attribute Type (" + kind . value ( ) + ") Not Supported" , "DeviceAttribute.getType()" ) ; } } catch ( final org . omg . CORBA . TypeCodePackage . BadKind e ) { Except . throw_exception ( "Api_TypeCodePackage.BadKind" , "Bad or unknown type " , "DeviceAttribute.getType()" ) ; } return type ;
public class ComponentImpl { /** * return element that has at least given access or null * @ param access * @ param name * @ return matching value * @ throws PageException */ public Object get ( int access , String name ) throws PageException { } }
return get ( access , KeyImpl . init ( name ) ) ;
public class Oauth2LoginConfigImpl { /** * { @ inheritDoc } */ @ Override public AuthenticationFilter getAuthFilter ( ) { } }
if ( this . authFilter == null ) { this . authFilter = SocialLoginTAI . getAuthFilter ( this . authFilterRef ) ; } return this . authFilter ;
public class CommerceOrderItemUtil { /** * Returns a range of all the commerce order items where CProductId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceOrderItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CProductId the c product ID * @ param start the lower bound of the range of commerce order items * @ param end the upper bound of the range of commerce order items ( not inclusive ) * @ return the range of matching commerce order items */ public static List < CommerceOrderItem > findByCProductId ( long CProductId , int start , int end ) { } }
return getPersistence ( ) . findByCProductId ( CProductId , start , end ) ;
public class BasicBinder { /** * Initialises standard bindings for Java built in types */ private void initJdkBindings ( ) { } }
registerBinding ( AtomicBoolean . class , String . class , new AtomicBooleanStringBinding ( ) ) ; registerBinding ( AtomicInteger . class , String . class , new AtomicIntegerStringBinding ( ) ) ; registerBinding ( AtomicLong . class , String . class , new AtomicLongStringBinding ( ) ) ; registerBinding ( BigDecimal . class , String . class , new BigDecimalStringBinding ( ) ) ; registerBinding ( BigInteger . class , String . class , new BigIntegerStringBinding ( ) ) ; registerBinding ( Boolean . class , String . class , new BooleanStringBinding ( ) ) ; registerBinding ( Byte . class , String . class , new ByteStringBinding ( ) ) ; registerBinding ( Calendar . class , String . class , new CalendarStringBinding ( ) ) ; registerBinding ( Character . class , String . class , new CharacterStringBinding ( ) ) ; registerBinding ( CharSequence . class , String . class , new CharSequenceStringBinding ( ) ) ; registerBinding ( Class . class , String . class , new ClassStringBinding ( ) ) ; registerBinding ( Currency . class , String . class , new CurrencyStringBinding ( ) ) ; registerBinding ( Date . class , String . class , new DateStringBinding ( ) ) ; registerBinding ( Double . class , String . class , new DoubleStringBinding ( ) ) ; registerBinding ( File . class , String . class , new FileStringBinding ( ) ) ; registerBinding ( Float . class , String . class , new FloatStringBinding ( ) ) ; registerBinding ( InetAddress . class , String . class , new InetAddressStringBinding ( ) ) ; registerBinding ( Integer . class , String . class , new IntegerStringBinding ( ) ) ; registerBinding ( Locale . class , String . class , new LocaleStringBinding ( ) ) ; registerBinding ( Long . class , String . class , new LongStringBinding ( ) ) ; registerBinding ( Package . class , String . class , new PackageStringBinding ( ) ) ; registerBinding ( Short . class , String . class , new ShortStringBinding ( ) ) ; registerBinding ( StringBuffer . class , String . class , new StringBufferStringBinding ( ) ) ; registerBinding ( StringBuilder . class , String . class , new StringBuilderStringBinding ( ) ) ; registerBinding ( String . class , String . class , new StringStringBinding ( ) ) ; registerBinding ( TimeZone . class , String . class , new TimeZoneStringBinding ( ) ) ; registerBinding ( URI . class , String . class , new URIStringBinding ( ) ) ; registerBinding ( URL . class , String . class , new URLStringBinding ( ) ) ; registerBinding ( UUID . class , String . class , new UUIDStringBinding ( ) ) ;
public class SourceStreamSetControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPPtoPOutboundTransmitControllable # getTransmitMessagesIterator ( ) */ public SIMPIterator getTransmitMessagesIterator ( int maxMsgs ) throws SIMPControllableNotFoundException , SIMPRuntimeOperationFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getTransmitMessagesIterator" ) ; assertValidControllable ( ) ; SIMPIterator msgItr = null ; try { msgItr = new SourceStreamSetXmitMessageIterator ( maxMsgs ) ; } catch ( SIResourceException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.SourceStreamSetControl.getTransmitMessagesIterator" , "1:283:1.39" , this ) ; SIMPRuntimeOperationFailedException finalE = new SIMPRuntimeOperationFailedException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "SourceStreamSetControl.getTransmitMessagesIterator" , "1:291:1.39" , e } , null ) , e ) ; SibTr . exception ( tc , finalE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getTransmitMessagesIterator" , finalE ) ; throw finalE ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getTransmitMessagesIterator" , msgItr ) ; return msgItr ;
public class Table { /** * Create a PdfPTable based on this Table object . * @ return a PdfPTable object * @ throws BadElementException */ public PdfPTable createPdfPTable ( ) throws BadElementException { } }
if ( ! convert2pdfptable ) { throw new BadElementException ( "No error, just an old style table" ) ; } setAutoFillEmptyCells ( true ) ; complete ( ) ; PdfPTable pdfptable = new PdfPTable ( widths ) ; pdfptable . setComplete ( complete ) ; if ( isNotAddedYet ( ) ) pdfptable . setSkipFirstHeader ( true ) ; SimpleTable t_evt = new SimpleTable ( ) ; t_evt . cloneNonPositionParameters ( this ) ; t_evt . setCellspacing ( cellspacing ) ; pdfptable . setTableEvent ( t_evt ) ; pdfptable . setHeaderRows ( lastHeaderRow + 1 ) ; pdfptable . setSplitLate ( cellsFitPage ) ; pdfptable . setKeepTogether ( tableFitsPage ) ; if ( ! Float . isNaN ( offset ) ) { pdfptable . setSpacingBefore ( offset ) ; } pdfptable . setHorizontalAlignment ( alignment ) ; if ( locked ) { pdfptable . setTotalWidth ( width ) ; pdfptable . setLockedWidth ( true ) ; } else { pdfptable . setWidthPercentage ( width ) ; } for ( Row row : this . rows ) { Element cell ; PdfPCell pcell ; for ( int i = 0 ; i < row . getColumns ( ) ; i ++ ) { if ( ( cell = ( Element ) row . getCell ( i ) ) != null ) { if ( cell instanceof Table ) { pcell = new PdfPCell ( ( ( Table ) cell ) . createPdfPTable ( ) ) ; } else if ( cell instanceof Cell ) { pcell = ( ( Cell ) cell ) . createPdfPCell ( ) ; pcell . setPadding ( cellpadding + cellspacing / 2f ) ; SimpleCell c_evt = new SimpleCell ( SimpleCell . CELL ) ; c_evt . cloneNonPositionParameters ( ( Cell ) cell ) ; c_evt . setSpacing ( cellspacing * 2f ) ; pcell . setCellEvent ( c_evt ) ; } else { pcell = new PdfPCell ( ) ; } pdfptable . addCell ( pcell ) ; } } } return pdfptable ;
public class CqlNativeStorage { /** * set the value to the position of the tuple */ private void setTupleValue ( Tuple tuple , int position , Object value , AbstractType < ? > validator ) throws ExecException { } }
if ( validator instanceof CollectionType ) setCollectionTupleValues ( tuple , position , value , validator ) ; else setTupleValue ( tuple , position , value ) ;
public class LogUtil { /** * 打印请求报文 * @ param reqParam */ public static void printRequestLog ( Map < String , String > reqParam ) { } }
writeMessage ( LOG_STRING_REQ_MSG_BEGIN ) ; Iterator < Entry < String , String > > it = reqParam . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Entry < String , String > en = it . next ( ) ; writeMessage ( "[" + en . getKey ( ) + "] = [" + en . getValue ( ) + "]" ) ; } writeMessage ( LOG_STRING_REQ_MSG_END ) ;
public class CmsJspTagFormatter { /** * Sets the name for the optional attribute that provides direct access to the content value map . < p > * @ param val the name for the optional attribute that provides direct access to the content value map */ public void setVal ( String val ) { } }
if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( val ) ) { m_value = val . trim ( ) ; } else { m_value = val ; }
public class ExampleSegmentSuperpixels { /** * Visualizes results three ways . 1 ) Colorized segmented image where each region is given a random color . * 2 ) Each pixel is assigned the mean color through out the region . 3 ) Black pixels represent the border * between regions . */ public static < T extends ImageBase < T > > void visualize ( GrayS32 pixelToRegion , T color , int numSegments ) { } }
// Computes the mean color inside each region ImageType < T > type = color . getImageType ( ) ; ComputeRegionMeanColor < T > colorize = FactorySegmentationAlg . regionMeanColor ( type ) ; FastQueue < float [ ] > segmentColor = new ColorQueue_F32 ( type . getNumBands ( ) ) ; segmentColor . resize ( numSegments ) ; GrowQueue_I32 regionMemberCount = new GrowQueue_I32 ( ) ; regionMemberCount . resize ( numSegments ) ; ImageSegmentationOps . countRegionPixels ( pixelToRegion , numSegments , regionMemberCount . data ) ; colorize . process ( color , pixelToRegion , regionMemberCount , segmentColor ) ; // Draw each region using their average color BufferedImage outColor = VisualizeRegions . regionsColor ( pixelToRegion , segmentColor , null ) ; // Draw each region by assigning it a random color BufferedImage outSegments = VisualizeRegions . regions ( pixelToRegion , numSegments , null ) ; // Make region edges appear red BufferedImage outBorder = new BufferedImage ( color . width , color . height , BufferedImage . TYPE_INT_RGB ) ; ConvertBufferedImage . convertTo ( color , outBorder , true ) ; VisualizeRegions . regionBorders ( pixelToRegion , 0xFF0000 , outBorder ) ; // Show the visualization results ListDisplayPanel gui = new ListDisplayPanel ( ) ; gui . addImage ( outColor , "Color of Segments" ) ; gui . addImage ( outBorder , "Region Borders" ) ; gui . addImage ( outSegments , "Regions" ) ; ShowImages . showWindow ( gui , "Superpixels" , true ) ;
public class PeerManager { /** * Initializes this peer manager and initiates the process of connecting to its peer nodes . * This will also reconfigure the ConnectionManager and ClientManager with peer related bits , * so this should not be called until < em > after < / em > the main server has set up its client * factory and authenticator . * @ param nodeName this node ' s unique name . * @ param sharedSecret a shared secret used to allow the peers to authenticate with one * another . * @ param hostName the DNS name of the server running this node . * @ param publicHostName if non - null , a separate public DNS hostname by which the node is to * be known to normal clients ( we may want inter - peer communication to take place over a * different network than the communication between real clients and the various peer * servers ) . * @ param region the region in which the node lives , which may be null . Nodes in different * regions must connect to each other through the public host name . * @ param port the port on which other nodes should connect to us . * @ param nodeNamespace The namespace for nodes to peer with . This node will connect to other * nodes with the same prefix from the NODES table . */ public void init ( String nodeName , String sharedSecret , String hostName , String publicHostName , String region , int port , String nodeNamespace ) { } }
init ( nodeName , sharedSecret , hostName , publicHostName , region , port , nodeNamespace , false ) ;
public class SearchCollectorExample { /** * set up the query options for the collecting search */ public static void configureQueryOptions ( DatabaseClient client ) throws FailedRequestException , ForbiddenUserException , ResourceNotFoundException , ResourceNotResendableException { } }
// create a manager for writing query options QueryOptionsManager optionsMgr = client . newServerConfigManager ( ) . newQueryOptionsManager ( ) ; // create the query options String options = "<search:options " + "xmlns:search='http://marklogic.com/appservices/search'>" + "<search:constraint name='industry'>" + "<search:value>" + "<search:element name='industry' ns=''/>" + "</search:value>" + "</search:constraint>" + "<search:return-aggregates>false</search:return-aggregates>" + "<search:return-constraints>false</search:return-constraints>" + "<search:return-facets>false</search:return-facets>" + "<search:return-frequencies>false</search:return-frequencies>" + "<search:return-metrics>false</search:return-metrics>" + "<search:return-plan>false</search:return-plan>" + "<search:return-qtext>false</search:return-qtext>" + "<search:return-query>false</search:return-query>" + "<search:return-similar>true</search:return-similar>" + "<search:return-values>false</search:return-values>" + "<search:return-results>false</search:return-results>" + "<search:transform-results apply='empty-snippet'/>" + "</search:options>" ; // create a handle to send the query options StringHandle writeHandle = new StringHandle ( options ) ; // write the query options to the database optionsMgr . writeOptions ( OPTIONS_NAME , writeHandle ) ; System . out . println ( "Configured the query options on the server" ) ;
public class ConsumerSessionImpl { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . sib . core . ConsumerSession # unlockSet ( long [ ] ) */ @ Override public void unlockSet ( SIMessageHandle [ ] msgHandles ) throws SIMPMessageNotLockedException , SISessionUnavailableException , SIConnectionLostException , SIIncorrectCallException , SIResourceException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConsumerSession . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPIConsumerSession . tc , "unlockSet" , new Object [ ] { this , SIMPUtils . messageHandleArrayToString ( msgHandles ) } ) ; // pass the unlockSet call on to the LCP _localConsumerPoint . processMsgSet ( msgHandles , null , null , true , false , false , true ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPIConsumerSession . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPIConsumerSession . tc , "unlockSet" ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcStructuralLoad ( ) { } }
if ( ifcStructuralLoadEClass == null ) { ifcStructuralLoadEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 640 ) ; } return ifcStructuralLoadEClass ;
public class DistributedCache { /** * To delete the caches which have a refcount of zero */ private static void deleteCache ( Configuration conf , MRAsyncDiskService asyncDiskService ) throws IOException { } }
List < CacheStatus > deleteSet = new LinkedList < CacheStatus > ( ) ; // try deleting cache Status with refcount of zero synchronized ( cachedArchives ) { for ( Iterator < String > it = cachedArchives . keySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { String cacheId = ( String ) it . next ( ) ; CacheStatus lcacheStatus = cachedArchives . get ( cacheId ) ; if ( lcacheStatus . refcount == 0 ) { // delete this cache entry from the global list // and mark the localized file for deletion deleteSet . add ( lcacheStatus ) ; it . remove ( ) ; } } } // do the deletion asynchronously , after releasing the global lock Thread cacheFileCleaner = new Thread ( new CacheFileCleanTask ( asyncDiskService , FileSystem . getLocal ( conf ) , deleteSet ) ) ; cacheFileCleaner . start ( ) ;
public class ClientConversationState { /** * Returns the SICoreConnection in use with this conversation * @ return SICoreConnection */ public SICoreConnection getSICoreConnection ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getSICoreConnection" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getSICoreConnection" , siCoreConnection ) ; return siCoreConnection ;
public class ReferrerURLCookieHandler { /** * Retrieve the referrer URL from the HttpServletRequest ' s cookies . * This will decode the URL and restore the host name if it was removed . * @ param req * @ return referrerURL */ @ Sensitive public String getReferrerURLFromCookies ( HttpServletRequest req , String cookieName ) { } }
Cookie [ ] cookies = req . getCookies ( ) ; String referrerURL = CookieHelper . getCookieValue ( cookies , cookieName ) ; if ( referrerURL != null ) { StringBuffer URL = req . getRequestURL ( ) ; referrerURL = decodeURL ( referrerURL ) ; referrerURL = restoreHostNameToURL ( referrerURL , URL . toString ( ) ) ; } return referrerURL ;
public class SREsPreferencePage { /** * Edit the selected SRE . */ protected void editSRE ( ) { } }
final IStructuredSelection selection = ( IStructuredSelection ) this . sresList . getSelection ( ) ; final ISREInstall sre = ( ISREInstall ) selection . getFirstElement ( ) ; if ( sre == null ) { return ; } final EditSREInstallWizard wizard = new EditSREInstallWizard ( sre , this . sreArray . toArray ( new ISREInstall [ this . sreArray . size ( ) ] ) ) ; final WizardDialog dialog = new WizardDialog ( getShell ( ) , wizard ) ; if ( dialog . open ( ) == Window . OK ) { this . sresList . setSelection ( new StructuredSelection ( sre ) ) ; this . sresList . refresh ( true ) ; updateUI ( ) ; }
public class CmsSitemapTreeItem { /** * Updates the in navigation properties of the displayed entry . < p > * @ param entry the sitemap entry */ public void updateInNavigation ( CmsClientSitemapEntry entry ) { } }
if ( entry . isInNavigation ( ) ) { m_inNavigationStyle . setValue ( null ) ; getListItemWidget ( ) . setTitleEditable ( true ) ; } else { m_inNavigationStyle . setValue ( CSS . notInNavigationEntry ( ) ) ; getListItemWidget ( ) . setTitleEditable ( false ) ; }
public class GeometryColumnsUtils { /** * Indicates if the < code > GEOMETRY _ COLUMNS < / code > table or view exists . * @ param database * the database to check . * @ return < code > true < / code > if the table or view exists . */ public static boolean geometryColumnsExists ( final Database database ) { } }
String geometryColumnsName = database . correctObjectName ( "geometry_columns" , Table . class ) ; DatabaseObject example = null ; if ( database instanceof DerbyDatabase || database instanceof H2Database ) { final Table tableExample = new Table ( ) ; tableExample . setName ( geometryColumnsName ) ; tableExample . setSchema ( database . getDefaultCatalogName ( ) , database . getDefaultSchemaName ( ) ) ; example = tableExample ; } else if ( database instanceof PostgresDatabase ) { final View viewExample = new View ( ) ; viewExample . setName ( geometryColumnsName ) ; viewExample . setSchema ( database . getDefaultCatalogName ( ) , "public" ) ; example = viewExample ; } try { return example != null && SnapshotGeneratorFactory . getInstance ( ) . has ( example , database ) ; } catch ( final LiquibaseException e ) { throw new UnexpectedLiquibaseException ( "Failed to determine if the geometry_columns table or view exists" , e ) ; }
public class HtmlReport { /** * runFailure . . . set null if not error */ private List < ReportCodeLine > generateReportCodeBody ( TestMethod rootMethod , RunFailure runFailure , boolean executed ) throws IllegalTestScriptException { } }
String currentStepLabelTtId = null ; List < ReportCodeLine > result = new ArrayList < > ( rootMethod . getCodeBody ( ) . size ( ) ) ; for ( int i = 0 ; i < rootMethod . getCodeBody ( ) . size ( ) ; i ++ ) { CodeLine codeLine = rootMethod . getCodeBody ( ) . get ( i ) ; String rootTtId = Integer . toString ( i ) ; String parentTtIdForRoot = null ; if ( codeLine . getCode ( ) instanceof TestStepLabel ) { parentTtIdForRoot = null ; currentStepLabelTtId = rootTtId ; } else if ( codeLine . getCode ( ) instanceof TestStep ) { throw new RuntimeException ( "not supported" ) ; } else { parentTtIdForRoot = currentStepLabelTtId ; } StackLine rootStackLine = generateStackLine ( rootMethod , rootMethod . getKey ( ) , i , codeLine . getStartLine ( ) ) ; List < StackLine > rootStackLines = new ArrayList < > ( 1 ) ; rootStackLines . add ( rootStackLine ) ; ReportCodeLine reportCodeLine = generateReportCodeLine ( codeLine , rootMethod . getArgVariables ( ) , rootStackLines , runFailure , executed , rootTtId , parentTtIdForRoot ) ; result . add ( reportCodeLine ) ; // add direct child to HTML report if ( codeLine . getCode ( ) instanceof SubMethodInvoke ) { SubMethodInvoke invoke = ( SubMethodInvoke ) codeLine . getCode ( ) ; // don ' t add child HTML report for childInvoke // since the code body for the sub method is not the code body of // the actually invoked method . // TODO consider about this behavior if ( ! invoke . isChildInvoke ( ) ) { List < String > parentMethodArgTestDocs = reportCodeLine . getMethodArgTestDocs ( ) ; List < CodeLine > codeBody = invoke . getSubMethod ( ) . getCodeBody ( ) ; for ( int j = 0 ; j < codeBody . size ( ) ; j ++ ) { CodeLine childCodeLine = codeBody . get ( j ) ; if ( childCodeLine . getCode ( ) instanceof TestStepLabel ) { throw new RuntimeException ( "nested TestStepLabel is not supported yet" ) ; } else if ( childCodeLine . getCode ( ) instanceof TestStep ) { throw new RuntimeException ( "not supported" ) ; } StackLine childStackLine = generateStackLine ( invoke . getSubMethod ( ) , invoke . getSubMethodKey ( ) , j , childCodeLine . getStartLine ( ) ) ; List < StackLine > childStackLines = new ArrayList < > ( 2 ) ; childStackLines . add ( childStackLine ) ; childStackLines . add ( rootStackLine ) ; ReportCodeLine childReportCodeLine = generateReportCodeLine ( childCodeLine , parentMethodArgTestDocs , childStackLines , runFailure , executed , rootTtId + "_" + j , rootTtId ) ; result . add ( childReportCodeLine ) ; } } } } // update hasError and alreadyRun status of TestStepLabel CodeLine // according to its child code block ReportCodeLine currentStepLabelLine = null ; for ( int i = 0 ; i < result . size ( ) ; i ++ ) { ReportCodeLine reportCodeLine = result . get ( i ) ; Code code = reportCodeLine . getCodeLine ( ) . getCode ( ) ; if ( code instanceof TestStepLabel ) { currentStepLabelLine = result . get ( i ) ; continue ; } if ( currentStepLabelLine == null ) { continue ; // no TestStepLabel to be updated } if ( reportCodeLine . hasError ( ) ) { // If child code block contains error , // TestStepLabel also has error currentStepLabelLine . setHasError ( true ) ; currentStepLabelLine . setAlreadyRun ( true ) ; // use error image for child code block currentStepLabelLine . setImageId ( reportCodeLine . getImageId ( ) ) ; } else if ( ! reportCodeLine . isAlreadyRun ( ) && ! currentStepLabelLine . hasError ( ) ) { // If child code block contains not executed line , // TestStepLabel is also not executed currentStepLabelLine . setHasError ( false ) ; currentStepLabelLine . setAlreadyRun ( false ) ; } } return result ;
public class HalReader { /** * Read and return HalResource * @ param reader Reader * @ return Hal resource */ public HalResource read ( final Reader reader ) { } }
final ContentRepresentation readableRepresentation = representationFactory . readRepresentation ( RepresentationFactory . HAL_JSON , reader ) ; return new HalResource ( objectMapper , readableRepresentation ) ;
public class HConnectionManager { /** * Remove the { @ link HClientPool } referenced by the { @ link CassandraHost } from * the active host pools . This does not shut down the pool , only removes it as a candidate from * future operations . * @ param cassandraHost * @ return true if the operation was successful . */ public boolean suspendCassandraHost ( CassandraHost cassandraHost ) { } }
HClientPool pool = hostPools . remove ( cassandraHost ) ; boolean removed = pool != null ; if ( removed ) { suspendedHostPools . put ( cassandraHost , pool ) ; } listenerHandler . fireOnSuspendHost ( cassandraHost , removed ) ; log . info ( "Suspend operation status was {} for CassandraHost {}" , removed , cassandraHost ) ; return removed ;
public class RootHeartbeat { /** * Returns the cluster with the given name , creating it if necessary . */ ClusterHeartbeat createCluster ( String clusterName ) { } }
ClusterHeartbeat cluster = _clusterMap . get ( clusterName ) ; if ( cluster == null ) { cluster = new ClusterHeartbeat ( clusterName , this ) ; _clusterMap . putIfAbsent ( clusterName , cluster ) ; cluster = _clusterMap . get ( clusterName ) ; } return cluster ;
public class DoubleArrayTrie { /** * 沿着路径转移状态 * @ param path 路径 * @ param from 起点 ( 根起点为base [ 0 ] = 1) * @ return 转移后的状态 ( 双数组下标 ) */ public int transition ( String path , int from ) { } }
int b = from ; int p ; for ( int i = 0 ; i < path . length ( ) ; ++ i ) { p = b + ( int ) ( path . charAt ( i ) ) + 1 ; if ( b == check [ p ] ) b = base [ p ] ; else return - 1 ; } p = b ; return p ;
public class DoubleColumn { /** * Maps the function across all rows , appending the results to a new NumberColumn * @ param fun function to map * @ return the NumberColumn with the results */ public DoubleColumn map ( ToDoubleFunction < Double > fun ) { } }
DoubleColumn result = DoubleColumn . create ( name ( ) ) ; for ( double t : this ) { try { result . append ( fun . applyAsDouble ( t ) ) ; } catch ( Exception e ) { result . appendMissing ( ) ; } } return result ;
public class WikipediaTemplateInfo { /** * Does the same as revisionContainsTemplateName ( ) without using a template index * @ param revId * @ param templateName * @ return * @ throws WikiApiException */ public boolean revisionContainsTemplateNameWithoutIndex ( int revId , String templateName ) throws WikiApiException { } }
if ( revApi == null ) { revApi = new RevisionApi ( wiki . getDatabaseConfiguration ( ) ) ; } if ( parser == null ) { // TODO switch to SWEBLE MediaWikiParserFactory pf = new MediaWikiParserFactory ( wiki . getDatabaseConfiguration ( ) . getLanguage ( ) ) ; pf . setTemplateParserClass ( ShowTemplateNamesAndParameters . class ) ; parser = pf . createParser ( ) ; } List < Template > tplList = parser . parse ( revApi . getRevision ( revId ) . getRevisionText ( ) ) . getTemplates ( ) ; for ( Template tpl : tplList ) { if ( tpl . getName ( ) . equalsIgnoreCase ( templateName ) ) { return true ; } } return false ;
public class SipStandardService { /** * Find a sip Connector by it ' s ip address , port and transport * @ param ipAddress ip address of the connector to find * @ param port port of the connector to find * @ param transport transport of the connector to find * @ return the found sip connector or null if noting found */ private SipProtocolHandler findSipConnector ( String ipAddress , int port , String transport ) { } }
SipConnector connectorToRemove = null ; for ( SipProtocolHandler protocolHandler : connectors ) { if ( protocolHandler . getIpAddress ( ) . equals ( ipAddress ) && protocolHandler . getPort ( ) == port && protocolHandler . getSignalingTransport ( ) . equalsIgnoreCase ( transport ) ) { connectorToRemove = protocolHandler . getSipConnector ( ) ; return protocolHandler ; } } return null ;
public class MercatorUtils { /** * Resolution ( meters / pixel ) for given zoom level ( measured at Equator ) * @ param zoom zoomlevel . * @ param tileSize tile size . * @ return resolution . */ public static double getResolution ( int zoom , int tileSize ) { } }
// return ( 2 * Math . PI * 6378137 ) / ( this . tileSize * 2 * * zoom ) double initialResolution = 2 * Math . PI * 6378137 / tileSize ; return initialResolution / Math . pow ( 2 , zoom ) ;
public class EnableOnPhysicalHandler { /** * Called when a valid record is read from the table / query . * Enables or disables the target field ( s ) . * @ param bDisplayOption If true , display any changes . */ public void doValidRecord ( boolean bDisplayOption ) // Init this field override for other value { } }
Record record = this . getOwner ( ) ; BaseDatabase database = record . getTable ( ) . getDatabase ( ) ; m_bEnableOnValid = true ; int counter = ( int ) record . getCounterField ( ) . getValue ( ) ; String startingID = database . getProperty ( BaseDatabase . STARTING_ID ) ; String endingID = database . getProperty ( BaseDatabase . ENDING_ID ) ; if ( startingID != null ) if ( counter < Integer . parseInt ( Converter . stripNonNumber ( startingID ) ) ) m_bEnableOnValid = false ; if ( endingID != null ) if ( counter > Integer . parseInt ( Converter . stripNonNumber ( endingID ) ) ) m_bEnableOnValid = false ; // System . out . println ( " start , end : " + startingID + " " + endingID ) ; super . doValidRecord ( bDisplayOption ) ;
public class ItemAttribute { /** * Returns the value of attribute * @ return the value */ @ JsonValue @ SuppressWarnings ( "unchecked" ) public < T > T get ( ) { } }
return ( T ) ( this . isString ? ( String ) this . value : ( Number ) this . value ) ;
public class OpMap { /** * Given a location step position , return the end position , i . e . the * beginning of the next step . * @ param opPos the position of a location step . * @ return the position of the next location step . */ public int getNextStepPos ( int opPos ) { } }
int stepType = getOp ( opPos ) ; if ( ( stepType >= OpCodes . AXES_START_TYPES ) && ( stepType <= OpCodes . AXES_END_TYPES ) ) { return getNextOpPos ( opPos ) ; } else if ( ( stepType >= OpCodes . FIRST_NODESET_OP ) && ( stepType <= OpCodes . LAST_NODESET_OP ) ) { int newOpPos = getNextOpPos ( opPos ) ; while ( OpCodes . OP_PREDICATE == getOp ( newOpPos ) ) { newOpPos = getNextOpPos ( newOpPos ) ; } stepType = getOp ( newOpPos ) ; if ( ! ( ( stepType >= OpCodes . AXES_START_TYPES ) && ( stepType <= OpCodes . AXES_END_TYPES ) ) ) { return OpCodes . ENDOP ; } return newOpPos ; } else { throw new RuntimeException ( XSLMessages . createXPATHMessage ( XPATHErrorResources . ER_UNKNOWN_STEP , new Object [ ] { String . valueOf ( stepType ) } ) ) ; // " Programmer ' s assertion in getNextStepPos : unknown stepType : " + stepType ) ; }
public class StopWord { /** * 判断一个词是否是停用词 * @ param word * @ return */ public static boolean is ( String word ) { } }
if ( word == null ) { return false ; } word = word . trim ( ) ; return isStopChar ( word ) || stopwords . contains ( word ) ;
public class DefaultGroovyMethods { /** * A convenience method for creating an immutable map . * @ param self a Map * @ return an immutable Map * @ see java . util . Collections # unmodifiableMap ( java . util . Map ) * @ since 1.0 */ public static < K , V > Map < K , V > asImmutable ( Map < ? extends K , ? extends V > self ) { } }
return Collections . unmodifiableMap ( self ) ;
public class Section { /** * indexed setter for textObjects - sets an indexed value - the text objects ( figure , table , boxed text etc . ) that are associated with a particular section * @ generated * @ param i index in the array to set * @ param v value to set into the array */ public void setTextObjects ( int i , TextObject v ) { } }
if ( Section_Type . featOkTst && ( ( Section_Type ) jcasType ) . casFeat_textObjects == null ) jcasType . jcas . throwFeatMissing ( "textObjects" , "de.julielab.jules.types.Section" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Section_Type ) jcasType ) . casFeatCode_textObjects ) , i ) ; jcasType . ll_cas . ll_setRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Section_Type ) jcasType ) . casFeatCode_textObjects ) , i , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class InputElementStack { /** * Implementation of NamespaceContext : */ @ Override public final String getNamespaceURI ( String prefix ) { } }
if ( prefix == null ) { throw new IllegalArgumentException ( ErrorConsts . ERR_NULL_ARG ) ; } if ( prefix . length ( ) == 0 ) { if ( mDepth == 0 ) { // unexpected . . . but let ' s not err at this point /* 07 - Sep - 2007 , TSa : Default / " no namespace " does map to * " URI " of empty String . */ return XmlConsts . DEFAULT_NAMESPACE_URI ; } return mCurrElement . mDefaultNsURI ; } if ( prefix . equals ( XMLConstants . XML_NS_PREFIX ) ) { return XMLConstants . XML_NS_URI ; } if ( prefix . equals ( XMLConstants . XMLNS_ATTRIBUTE ) ) { return XMLConstants . XMLNS_ATTRIBUTE_NS_URI ; } /* Ok , need to find the match , if any ; starting from end of the * list of active namespaces . Note that we can not count on prefix * being interned / canonicalized . */ return mNamespaces . findLastNonInterned ( prefix ) ;
public class Index { /** * Gets the facets config . * @ return the facets config */ public static FacetsConfig getFacetsConfig ( ) { } }
final FacetsConfig ret = new FacetsConfig ( ) ; ret . setHierarchical ( Indexer . Dimension . DIMCREATED . name ( ) , true ) ; return ret ;
public class AWSStorageGatewayClient { /** * Updates a Server Message Block ( SMB ) file share . * < note > * To leave a file share field unchanged , set the corresponding input field to null . This operation is only * supported for file gateways . * < / note > < important > * File gateways require AWS Security Token Service ( AWS STS ) to be activated to enable you to create a file share . * Make sure that AWS STS is activated in the AWS Region you are creating your file gateway in . If AWS STS is not * activated in this AWS Region , activate it . For information about how to activate AWS STS , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / id _ credentials _ temp _ enable - regions . html " > Activating and * Deactivating AWS STS in an AWS Region < / a > in the < i > AWS Identity and Access Management User Guide . < / i > * File gateways don ' t support creating hard or symbolic links on a file share . * < / important > * @ param updateSMBFileShareRequest * UpdateSMBFileShareInput * @ return Result of the UpdateSMBFileShare operation returned by the service . * @ throws InvalidGatewayRequestException * An exception occurred because an invalid gateway request was issued to the service . For more information , * see the error and message fields . * @ throws InternalServerErrorException * An internal server error has occurred during the request . For more information , see the error and message * fields . * @ sample AWSStorageGateway . UpdateSMBFileShare * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / UpdateSMBFileShare " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateSMBFileShareResult updateSMBFileShare ( UpdateSMBFileShareRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateSMBFileShare ( request ) ;
public class SocketAdapter { /** * This method must be implemented for PoolableAdapter */ @ Override public Object openConnection ( ) throws ConnectionException , AdapterException { } }
try { int k = hostport . indexOf ( ':' ) ; if ( k < 0 ) throw new AdapterException ( "Invalid host:port specification - " + hostport ) ; connection = new SoccomClient ( hostport . substring ( 0 , k ) , Integer . parseInt ( hostport . substring ( k + 1 ) ) ) ; return connection ; } catch ( SoccomException e ) { if ( e . getErrorCode ( ) == SoccomException . CONNECT ) throw new ConnectionException ( ConnectionException . CONNECTION_DOWN , e . getMessage ( ) , e ) ; else throw new AdapterException ( e . getErrorCode ( ) , e . getMessage ( ) , e ) ; }
public class Utils { /** * NULL safe get ( ) */ public static < T > T get ( Map < ? , T > data , Object key ) { } }
return data == null ? null : data . get ( key ) ;
public class CustomShape { /** * 获取各个顶点的角度列表 */ private double [ ] getAngles ( double startAngle ) { } }
double [ ] angles = new double [ _corners ] ; angles [ 0 ] = startAngle ; // 角度的增量 double incremental = getAngleIncremental ( ) ; for ( int i = 1 ; i < _corners ; i ++ ) { angles [ i ] = angles [ i - 1 ] + incremental ; } return angles ;
public class TreeQuery { /** * Finds a child TreeNode based on its path . * Searches the child nodes for the first element , then that * node ' s children for the second element , etc . * @ param treeDefdefines a tree * @ param nodestarting point for the search * @ param treeMappermaps elements in the tree to some value for comparison with the path elements * @ param paththe path of nodes which we ' re looking * @ param pathMappermaps elements in the path to some value for comparison with the tree elements */ public static < T , P > Optional < T > findByPath ( TreeDef < T > treeDef , T node , Function < ? super T , ? > treeMapper , List < P > path , Function < ? super P , ? > pathMapper ) { } }
return findByPath ( treeDef , node , path , ( treeSide , pathSide ) -> { return Objects . equals ( treeMapper . apply ( treeSide ) , pathMapper . apply ( pathSide ) ) ; } ) ;
public class SocketBar { /** * Returns the server inet address that accepted the request . */ public String getLocalHost ( ) { } }
InetAddress localAddress = addressLocal ( ) ; if ( localAddress != null ) return localAddress . getHostAddress ( ) ; else return null ;
public class Vector3f { /** * / * ( non - Javadoc ) * @ see org . joml . Vector3fc # cross ( float , float , float , org . joml . Vector3f ) */ public Vector3f cross ( float x , float y , float z , Vector3f dest ) { } }
float rx = this . y * z - this . z * y ; float ry = this . z * x - this . x * z ; float rz = this . x * y - this . y * x ; dest . x = rx ; dest . y = ry ; dest . z = rz ; return dest ;
public class ClientWindow { /** * < p class = " changed _ added _ 2_2 " > Methods that append the ClientWindow to generated * URLs must call this method to see if they are permitted to do so . If * { @ link # CLIENT _ WINDOW _ MODE _ PARAM _ NAME } is " url " without the quotes , all generated * URLs that cause a GET request must append the ClientWindow by default . * This is specified as a static method because callsites need to access it * without having access to an actual { @ code ClientWindow } instance . < / p > * @ param context the { @ link FacesContext } for this request . * @ since 2.2 */ public boolean isClientWindowRenderModeEnabled ( FacesContext context ) { } }
boolean result = false ; Map < Object , Object > attrMap = context . getAttributes ( ) ; result = ! attrMap . containsKey ( PER_USE_CLIENT_WINDOW_URL_QUERY_PARAMETER_DISABLED_KEY ) ; return result ;
public class StreamSegmentNameUtils { /** * Gets the name of the meta - Segment mapped to the given Segment Name that is responsible with storing its Rollover * information . * Existence of this file should also indicate that a Segment with this file has a rollover policy in place . * @ param segmentName The name of the Segment to get the Header segment name for . * @ return The result . */ public static String getHeaderSegmentName ( String segmentName ) { } }
Preconditions . checkArgument ( ! segmentName . endsWith ( HEADER_SUFFIX ) , "segmentName is already a segment header name" ) ; return segmentName + HEADER_SUFFIX ;
public class Configuration { public static final AlarmManager getAlarmManager ( ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getAlarmManager" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getAlarmManager" , _alarmManager ) ; return _alarmManager ;
public class MountPointInfo { /** * < code > optional string ufsType = 2 ; < / code > */ public java . lang . String getUfsType ( ) { } }
java . lang . Object ref = ufsType_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( bs . isValidUtf8 ( ) ) { ufsType_ = s ; } return s ; }
public class Blade { /** * Add a delete route to routes * @ param path your route path * @ param handler route implement * @ return return blade instance */ public Blade delete ( @ NonNull String path , @ NonNull RouteHandler handler ) { } }
this . routeMatcher . addRoute ( path , handler , HttpMethod . DELETE ) ; return this ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcRoot ( ) { } }
if ( ifcRootEClass == null ) { ifcRootEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 498 ) ; } return ifcRootEClass ;
public class ORBManager { /** * Initialise the ORB * @ param useDb * is using tango db * @ param adminDeviceName * admin device name * @ throws DevFailed */ public static synchronized void init ( final boolean useDb , final String adminDeviceName ) throws DevFailed { } }
// Modified properties fo ORB usage . final Properties props = System . getProperties ( ) ; props . put ( "org.omg.CORBA.ORBClass" , "org.jacorb.orb.ORB" ) ; props . put ( "org.omg.CORBA.ORBSingletonClass" , "org.jacorb.orb.ORBSingleton" ) ; // register interceptors props . put ( "org.omg.PortableInterceptor.ORBInitializerClass.ForwardInit" , InterceptorInitializer . class . getCanonicalName ( ) ) ; // Set retry properties props . put ( "jacorb.retries" , "0" ) ; props . put ( "jacorb.retry_interval" , "100" ) ; props . put ( "jacorb.codeset" , true ) ; // props . put ( " jacorb . config . dir " , " fr / esrf / TangoApi " ) ; // Initial timeout for establishing a connection . props . put ( "jacorb.connection.client.connect_timeout" , "5000" ) ; // Set the Largest transfert . final String str = checkORBgiopMaxMsgSize ( ) ; props . put ( "jacorb.maxManagedBufSize" , str ) ; // Set jacorb verbosity at minimum value props . put ( "jacorb.config.log.verbosity" , "0" ) ; // only used for no db device props . setProperty ( "jacorb.implname" , SERVER_IMPL_NAME ) ; // System . setProperties ( props ) ; // props . setProperty ( " jacorb . net . tcp _ listener " , // ConnectionListener . class . getName ( ) ) ; // Initialize ORB orb = ORB . init ( new String [ ] { } , props ) ; try { poa = POAHelper . narrow ( orb . resolve_initial_references ( "RootPOA" ) ) ; // boot _ manager = // BootManagerHelper . narrow ( orb . resolve _ initial _ references ( " BootManager " ) ) ; } catch ( final InvalidName e ) { throw DevFailedUtils . newDevFailed ( e ) ; } catch ( final INITIALIZE e ) { // ignore , occurs when starting several times a server that failed if ( ! useDb ) { throw DevFailedUtils . newDevFailed ( e ) ; } } try { if ( ! useDb ) { // If the database is not used , create a POA with the // USER _ ID policy final org . omg . CORBA . Policy [ ] policies = new org . omg . CORBA . Policy [ 2 ] ; policies [ 0 ] = poa . create_id_assignment_policy ( IdAssignmentPolicyValue . USER_ID ) ; policies [ 1 ] = poa . create_lifespan_policy ( LifespanPolicyValue . PERSISTENT ) ; final org . omg . PortableServer . POAManager manager = poa . the_POAManager ( ) ; poa = poa . create_POA ( NODB_POA , manager , policies ) ; } } catch ( final org . omg . PortableServer . POAPackage . AdapterAlreadyExists e ) { throw DevFailedUtils . newDevFailed ( e ) ; } catch ( final org . omg . PortableServer . POAPackage . InvalidPolicy e ) { throw DevFailedUtils . newDevFailed ( e ) ; } final POAManager manager = poa . the_POAManager ( ) ; try { manager . activate ( ) ; } catch ( final org . omg . PortableServer . POAManagerPackage . AdapterInactive ex ) { throw DevFailedUtils . newDevFailed ( "API_CantActivatePOAManager" , "The POA activate method throws an exception" ) ; } if ( useDb ) { // Build device name and try to import it from database final DeviceImportInfo importInfo = DatabaseFactory . getDatabase ( ) . importDevice ( adminDeviceName ) ; if ( importInfo . isExported ( ) ) { LOGGER . debug ( "{} is set as exported in tango db - checking if it is already running" , adminDeviceName ) ; // if is exported , try to connect to it ORBManager . checkServerRunning ( importInfo , adminDeviceName ) ; } }
public class AstyanaxTableDAO { /** * Write the delta to the system table and invalidate caches in the specified scope . */ private void updateTableMetadata ( String table , Delta delta , Audit audit , @ Nullable InvalidationScope scope ) { } }
_backingStore . update ( _systemTable , table , TimeUUIDs . newUUID ( ) , delta , audit , scope == InvalidationScope . GLOBAL ? WriteConsistency . GLOBAL : WriteConsistency . STRONG ) ; // Synchronously notify other emodb servers of the table change . if ( scope != null ) { _tableCacheHandle . invalidate ( scope , table ) ; }
public class PutItemRequest { /** * One or more substitution tokens for attribute names in an expression . The following are some use cases for using * < code > ExpressionAttributeNames < / code > : * < ul > * < li > * To access an attribute whose name conflicts with a DynamoDB reserved word . * < / li > * < li > * To create a placeholder for repeating occurrences of an attribute name in an expression . * < / li > * < li > * To prevent special characters in an attribute name from being misinterpreted in an expression . * < / li > * < / ul > * Use the < b > # < / b > character in an expression to dereference an attribute name . For example , consider the following * attribute name : * < ul > * < li > * < code > Percentile < / code > * < / li > * < / ul > * The name of this attribute conflicts with a reserved word , so it cannot be used directly in an expression . ( For * the complete list of reserved words , see < a * href = " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / ReservedWords . html " > Reserved Words < / a > in * the < i > Amazon DynamoDB Developer Guide < / i > ) . To work around this , you could specify the following for * < code > ExpressionAttributeNames < / code > : * < ul > * < li > * < code > { " # P " : " Percentile " } < / code > * < / li > * < / ul > * You could then use this substitution in an expression , as in this example : * < ul > * < li > * < code > # P = : val < / code > * < / li > * < / ul > * < note > * Tokens that begin with the < b > : < / b > character are < i > expression attribute values < / i > , which are placeholders for * the actual value at runtime . * < / note > * For more information on expression attribute names , see < a href = * " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . AccessingItemAttributes . html " * > Accessing Item Attributes < / a > in the < i > Amazon DynamoDB Developer Guide < / i > . * @ param expressionAttributeNames * One or more substitution tokens for attribute names in an expression . The following are some use cases for * using < code > ExpressionAttributeNames < / code > : < / p > * < ul > * < li > * To access an attribute whose name conflicts with a DynamoDB reserved word . * < / li > * < li > * To create a placeholder for repeating occurrences of an attribute name in an expression . * < / li > * < li > * To prevent special characters in an attribute name from being misinterpreted in an expression . * < / li > * < / ul > * Use the < b > # < / b > character in an expression to dereference an attribute name . For example , consider the * following attribute name : * < ul > * < li > * < code > Percentile < / code > * < / li > * < / ul > * The name of this attribute conflicts with a reserved word , so it cannot be used directly in an expression . * ( For the complete list of reserved words , see < a * href = " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / ReservedWords . html " > Reserved * Words < / a > in the < i > Amazon DynamoDB Developer Guide < / i > ) . To work around this , you could specify the * following for < code > ExpressionAttributeNames < / code > : * < ul > * < li > * < code > { " # P " : " Percentile " } < / code > * < / li > * < / ul > * You could then use this substitution in an expression , as in this example : * < ul > * < li > * < code > # P = : val < / code > * < / li > * < / ul > * < note > * Tokens that begin with the < b > : < / b > character are < i > expression attribute values < / i > , which are * placeholders for the actual value at runtime . * < / note > * For more information on expression attribute names , see < a href = * " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . AccessingItemAttributes . html " * > Accessing Item Attributes < / a > in the < i > Amazon DynamoDB Developer Guide < / i > . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutItemRequest withExpressionAttributeNames ( java . util . Map < String , String > expressionAttributeNames ) { } }
setExpressionAttributeNames ( expressionAttributeNames ) ; return this ;
public class responderpolicy_stats { /** * Use this API to fetch statistics of responderpolicy _ stats resource of given name . */ public static responderpolicy_stats get ( nitro_service service , String name ) throws Exception { } }
responderpolicy_stats obj = new responderpolicy_stats ( ) ; obj . set_name ( name ) ; responderpolicy_stats response = ( responderpolicy_stats ) obj . stat_resource ( service ) ; return response ;
public class LuceneUtil { /** * Check if the file is a segments _ N file * @ param name * @ return true if the file is a segments _ N file */ public static boolean isSegmentsFile ( String name ) { } }
return name . startsWith ( IndexFileNames . SEGMENTS ) && ! name . equals ( IndexFileNames . SEGMENTS_GEN ) ;
public class ModelUtils { /** * The binary name of the type as a String . * @ param typeElement The type element * @ return The class name */ String simpleBinaryNameFor ( TypeElement typeElement ) { } }
Name elementBinaryName = elementUtils . getBinaryName ( typeElement ) ; PackageElement packageElement = elementUtils . getPackageOf ( typeElement ) ; String packageName = packageElement . getQualifiedName ( ) . toString ( ) ; return elementBinaryName . toString ( ) . replaceFirst ( packageName + "\\." , "" ) ;
public class ConnectionContextFactory { /** * Creates a message producer using the context ' s session and destination . * @ param context the context where the new producer is stored * @ throws JMSException any error * @ throws IllegalStateException if the context is null or the context ' s session is null * or the context ' s destination is null */ protected void createProducer ( ProducerConnectionContext context ) throws JMSException { } }
if ( context == null ) { throw new IllegalStateException ( "The context is null" ) ; } Session session = context . getSession ( ) ; if ( session == null ) { throw new IllegalStateException ( "The context had a null session" ) ; } Destination dest = context . getDestination ( ) ; if ( dest == null ) { throw new IllegalStateException ( "The context had a null destination" ) ; } MessageProducer producer = session . createProducer ( dest ) ; context . setMessageProducer ( producer ) ;
public class FlowletProgramRunner { /** * Create a initializer to be executed during the flowlet driver initialization . */ private Service createServiceHook ( String flowletName , Iterable < ConsumerSupplier < ? > > consumerSuppliers , AtomicReference < FlowletProgramController > controller ) { } }
final List < String > streams = Lists . newArrayList ( ) ; for ( ConsumerSupplier < ? > consumerSupplier : consumerSuppliers ) { QueueName queueName = consumerSupplier . getQueueName ( ) ; if ( queueName . isStream ( ) ) { streams . add ( queueName . getSimpleName ( ) ) ; } } // If no stream , returns a no - op Service if ( streams . isEmpty ( ) ) { return new AbstractService ( ) { @ Override protected void doStart ( ) { notifyStarted ( ) ; } @ Override protected void doStop ( ) { notifyStopped ( ) ; } } ; } return new FlowletServiceHook ( flowletName , streams , controller ) ;
public class WorkspacePropertiesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( WorkspaceProperties workspaceProperties , ProtocolMarshaller protocolMarshaller ) { } }
if ( workspaceProperties == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( workspaceProperties . getRunningMode ( ) , RUNNINGMODE_BINDING ) ; protocolMarshaller . marshall ( workspaceProperties . getRunningModeAutoStopTimeoutInMinutes ( ) , RUNNINGMODEAUTOSTOPTIMEOUTINMINUTES_BINDING ) ; protocolMarshaller . marshall ( workspaceProperties . getRootVolumeSizeGib ( ) , ROOTVOLUMESIZEGIB_BINDING ) ; protocolMarshaller . marshall ( workspaceProperties . getUserVolumeSizeGib ( ) , USERVOLUMESIZEGIB_BINDING ) ; protocolMarshaller . marshall ( workspaceProperties . getComputeTypeName ( ) , COMPUTETYPENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StAXDecoder { /** * Returns true if the cursor points to a character data event that consists * of all whitespace * ( non - Javadoc ) * @ see javax . xml . stream . XMLStreamReader # isWhiteSpace ( ) */ public boolean isWhiteSpace ( ) { } }
switch ( getEventType ( ) ) { case XMLStreamConstants . CHARACTERS : return this . characters . toString ( ) . trim ( ) . length ( ) == 0 ; case XMLStreamConstants . CDATA : return false ; case XMLStreamConstants . COMMENT : return false ; case XMLStreamConstants . SPACE : return true ; default : return false ; }
public class Calendar { /** * Returns a field mask indicating which calendar field values * to be used to calculate the time value . The calendar fields are * returned as a bit mask , each bit of which corresponds to a field , i . e . , * the mask value of < code > field < / code > is < code > ( 1 & lt ; & lt ; * field ) < / code > . For example , 0x26 represents the < code > YEAR < / code > , * < code > MONTH < / code > , and < code > DAY _ OF _ MONTH < / code > fields ( i . e . , 0x26 is * equal to * < code > ( 1 & lt ; & lt ; YEAR ) | ( 1 & lt ; & lt ; MONTH ) | ( 1 & lt ; & lt ; DAY _ OF _ MONTH ) ) < / code > . * < p > This method supports the calendar fields resolution as described in * the class description . If the bit mask for a given field is on and its * field has not been set ( i . e . , < code > isSet ( field ) < / code > is * < code > false < / code > ) , then the default value of the field has to be * used , which case means that the field has been selected because the * selected combination involves the field . * @ return a bit mask of selected fields * @ see # isExternallySet ( int ) */ final int selectFields ( ) { } }
// This implementation has been taken from the GregorianCalendar class . // The YEAR field must always be used regardless of its SET // state because YEAR is a mandatory field to determine the date // and the default value ( EPOCH _ YEAR ) may change through the // normalization process . int fieldMask = YEAR_MASK ; if ( stamp [ ERA ] != UNSET ) { fieldMask |= ERA_MASK ; } // Find the most recent group of fields specifying the day within // the year . These may be any of the following combinations : // MONTH + DAY _ OF _ MONTH // MONTH + WEEK _ OF _ MONTH + DAY _ OF _ WEEK // MONTH + DAY _ OF _ WEEK _ IN _ MONTH + DAY _ OF _ WEEK // DAY _ OF _ YEAR // WEEK _ OF _ YEAR + DAY _ OF _ WEEK // We look for the most recent of the fields in each group to determine // the age of the group . For groups involving a week - related field such // as WEEK _ OF _ MONTH , DAY _ OF _ WEEK _ IN _ MONTH , or WEEK _ OF _ YEAR , both the // week - related field and the DAY _ OF _ WEEK must be set for the group as a // whole to be considered . ( See bug 4153860 - liu 7/24/98 . ) int dowStamp = stamp [ DAY_OF_WEEK ] ; int monthStamp = stamp [ MONTH ] ; int domStamp = stamp [ DAY_OF_MONTH ] ; int womStamp = aggregateStamp ( stamp [ WEEK_OF_MONTH ] , dowStamp ) ; int dowimStamp = aggregateStamp ( stamp [ DAY_OF_WEEK_IN_MONTH ] , dowStamp ) ; int doyStamp = stamp [ DAY_OF_YEAR ] ; int woyStamp = aggregateStamp ( stamp [ WEEK_OF_YEAR ] , dowStamp ) ; int bestStamp = domStamp ; if ( womStamp > bestStamp ) { bestStamp = womStamp ; } if ( dowimStamp > bestStamp ) { bestStamp = dowimStamp ; } if ( doyStamp > bestStamp ) { bestStamp = doyStamp ; } if ( woyStamp > bestStamp ) { bestStamp = woyStamp ; } /* No complete combination exists . Look for WEEK _ OF _ MONTH , * DAY _ OF _ WEEK _ IN _ MONTH , or WEEK _ OF _ YEAR alone . Treat DAY _ OF _ WEEK alone * as DAY _ OF _ WEEK _ IN _ MONTH . */ if ( bestStamp == UNSET ) { womStamp = stamp [ WEEK_OF_MONTH ] ; dowimStamp = Math . max ( stamp [ DAY_OF_WEEK_IN_MONTH ] , dowStamp ) ; woyStamp = stamp [ WEEK_OF_YEAR ] ; bestStamp = Math . max ( Math . max ( womStamp , dowimStamp ) , woyStamp ) ; /* Treat MONTH alone or no fields at all as DAY _ OF _ MONTH . This may * result in bestStamp = domStamp = UNSET if no fields are set , * which indicates DAY _ OF _ MONTH . */ if ( bestStamp == UNSET ) { bestStamp = domStamp = monthStamp ; } } if ( bestStamp == domStamp || ( bestStamp == womStamp && stamp [ WEEK_OF_MONTH ] >= stamp [ WEEK_OF_YEAR ] ) || ( bestStamp == dowimStamp && stamp [ DAY_OF_WEEK_IN_MONTH ] >= stamp [ WEEK_OF_YEAR ] ) ) { fieldMask |= MONTH_MASK ; if ( bestStamp == domStamp ) { fieldMask |= DAY_OF_MONTH_MASK ; } else { assert ( bestStamp == womStamp || bestStamp == dowimStamp ) ; if ( dowStamp != UNSET ) { fieldMask |= DAY_OF_WEEK_MASK ; } if ( womStamp == dowimStamp ) { // When they are equal , give the priority to // WEEK _ OF _ MONTH for compatibility . if ( stamp [ WEEK_OF_MONTH ] >= stamp [ DAY_OF_WEEK_IN_MONTH ] ) { fieldMask |= WEEK_OF_MONTH_MASK ; } else { fieldMask |= DAY_OF_WEEK_IN_MONTH_MASK ; } } else { if ( bestStamp == womStamp ) { fieldMask |= WEEK_OF_MONTH_MASK ; } else { assert ( bestStamp == dowimStamp ) ; if ( stamp [ DAY_OF_WEEK_IN_MONTH ] != UNSET ) { fieldMask |= DAY_OF_WEEK_IN_MONTH_MASK ; } } } } } else { assert ( bestStamp == doyStamp || bestStamp == woyStamp || bestStamp == UNSET ) ; if ( bestStamp == doyStamp ) { fieldMask |= DAY_OF_YEAR_MASK ; } else { assert ( bestStamp == woyStamp ) ; if ( dowStamp != UNSET ) { fieldMask |= DAY_OF_WEEK_MASK ; } fieldMask |= WEEK_OF_YEAR_MASK ; } } // Find the best set of fields specifying the time of day . There // are only two possibilities here ; the HOUR _ OF _ DAY or the // AM _ PM and the HOUR . int hourOfDayStamp = stamp [ HOUR_OF_DAY ] ; int hourStamp = aggregateStamp ( stamp [ HOUR ] , stamp [ AM_PM ] ) ; bestStamp = ( hourStamp > hourOfDayStamp ) ? hourStamp : hourOfDayStamp ; // if bestStamp is still UNSET , then take HOUR or AM _ PM . ( See 4846659) if ( bestStamp == UNSET ) { bestStamp = Math . max ( stamp [ HOUR ] , stamp [ AM_PM ] ) ; } // Hours if ( bestStamp != UNSET ) { if ( bestStamp == hourOfDayStamp ) { fieldMask |= HOUR_OF_DAY_MASK ; } else { fieldMask |= HOUR_MASK ; if ( stamp [ AM_PM ] != UNSET ) { fieldMask |= AM_PM_MASK ; } } } if ( stamp [ MINUTE ] != UNSET ) { fieldMask |= MINUTE_MASK ; } if ( stamp [ SECOND ] != UNSET ) { fieldMask |= SECOND_MASK ; } if ( stamp [ MILLISECOND ] != UNSET ) { fieldMask |= MILLISECOND_MASK ; } if ( stamp [ ZONE_OFFSET ] >= MINIMUM_USER_STAMP ) { fieldMask |= ZONE_OFFSET_MASK ; } if ( stamp [ DST_OFFSET ] >= MINIMUM_USER_STAMP ) { fieldMask |= DST_OFFSET_MASK ; } return fieldMask ;
public class DeleteGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteGroupRequest deleteGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteGroupRequest . getGroupId ( ) , GROUPID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommonsOJBLockManager { /** * Tries to acquire a lock on a resource . < br > * < br > * This method does not block , but immediatly returns . If a lock is not * available < code > false < / code > will be returned . * @ param ownerId a unique id identifying the entity that wants to acquire this * lock * @ param resourceId the resource to get the level for * @ param targetLockLevel the lock level to acquire * @ param reentrant < code > true < / code > if this request shall not be influenced by * other locks held by the same owner * @ param isolationId the isolation level identity key . See { @ link CommonsOJBLockManager } . * @ return < code > true < / code > if the lock has been acquired , < code > false < / code > otherwise */ public boolean tryLock ( Object ownerId , Object resourceId , int targetLockLevel , boolean reentrant , Object isolationId ) { } }
timeoutCheck ( ownerId ) ; OJBLock lock = atomicGetOrCreateLock ( resourceId , isolationId ) ; boolean acquired = lock . tryLock ( ownerId , targetLockLevel , reentrant ? GenericLock . COMPATIBILITY_REENTRANT : GenericLock . COMPATIBILITY_NONE , false ) ; if ( acquired ) { addOwner ( ownerId , lock ) ; } return acquired ;
public class CreateTransitGatewayVpcAttachmentRequest { /** * The tags to apply to the VPC attachment . * @ param tagSpecifications * The tags to apply to the VPC attachment . */ public void setTagSpecifications ( java . util . Collection < TagSpecification > tagSpecifications ) { } }
if ( tagSpecifications == null ) { this . tagSpecifications = null ; return ; } this . tagSpecifications = new com . amazonaws . internal . SdkInternalList < TagSpecification > ( tagSpecifications ) ;
public class JarConfigurationProvider { /** * Registers the given set of property keys for the view with name * < code > propertyView < / code > and the given prefix of entities with the * given type . * @ param type the type of the entities for which the view will be * registered * @ param propertyView the name of the property view for which the * property set will be registered * @ param propertySet the set of property keys to register for the given * view */ @ Override public void registerPropertySet ( Class type , String propertyView , PropertyKey ... propertySet ) { } }
Map < String , Set < PropertyKey > > propertyViewMap = getPropertyViewMapForType ( type ) ; Set < PropertyKey > properties = propertyViewMap . get ( propertyView ) ; if ( properties == null ) { properties = new LinkedHashSet < > ( ) ; propertyViewMap . put ( propertyView , properties ) ; } // allow properties to override existing ones as they // are most likely from a more concrete class . for ( final PropertyKey key : propertySet ) { // property keys are referenced by their names , // that ' s why we seemingly remove the existing // key , but the set does not differentiate // between different keys if ( properties . contains ( key ) ) { properties . remove ( key ) ; } properties . add ( key ) ; }
public class SuspensionRecord { /** * Find all suspension records for a given user . * @ param em The EntityManager to use . * @ param user The user for which to retrieve records . * @ return A list of suspension records for the given user . An empty list if no such records exist . */ public static List < SuspensionRecord > findByUser ( EntityManager em , PrincipalUser user ) { } }
TypedQuery < SuspensionRecord > query = em . createNamedQuery ( "SuspensionRecord.findByUser" , SuspensionRecord . class ) ; try { query . setParameter ( "user" , user ) ; return query . getResultList ( ) ; } catch ( NoResultException ex ) { return new ArrayList < > ( 0 ) ; }
public class ConnectionSources { /** * A ConnectionSource that will wrap externally managed connection * with proxy that will omit { @ link Connection # close ( ) } or { @ link Connection # commit ( ) } calls . * This is useful to make { @ link org . sql2o . Connection } work with externally managed transactions * @ param connection connection to wrap * @ return a connection wrapper that represent a nested connection */ public static ConnectionSource join ( final Connection connection ) { } }
return new ConnectionSource ( ) { @ Override public Connection getConnection ( ) throws SQLException { return new NestedConnection ( connection ) ; } } ;
public class CoverageUtilities { /** * Utility method to get col and row of a coordinate from a { @ link GridGeometry2D } . * @ param coordinate the coordinate to transform . * @ param gridGeometry the gridgeometry to use . * @ param point if not < code > null < / code > , the row col values are put inside the supplied point ' s x and y . * @ return the array with [ col , row ] or < code > null < / code > if something went wrong . */ public static int [ ] colRowFromCoordinate ( Coordinate coordinate , GridGeometry2D gridGeometry , Point point ) { } }
try { DirectPosition pos = new DirectPosition2D ( coordinate . x , coordinate . y ) ; GridCoordinates2D worldToGrid = gridGeometry . worldToGrid ( pos ) ; if ( point != null ) { point . x = worldToGrid . x ; point . y = worldToGrid . y ; } return new int [ ] { worldToGrid . x , worldToGrid . y } ; } catch ( InvalidGridGeometryException e ) { e . printStackTrace ( ) ; } catch ( TransformException e ) { e . printStackTrace ( ) ; } point . x = Integer . MAX_VALUE ; point . y = Integer . MAX_VALUE ; return null ;
public class ThreadIdentityManager { /** * Add a ThreadIdentityService reference . This method is called by * ThreadIdentityManagerConfigurator when a ThreadIdentityService shows * up in the OSGI framework . * @ param tis */ public static void addThreadIdentityService ( ThreadIdentityService tis ) { } }
if ( tis != null ) { threadIdentityServices . add ( tis ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "A ThreadIdentityService implementation was added." , tis . getClass ( ) . getName ( ) ) ; } }
public class ProjectApi { /** * Get a Pager of project events for specific project . Sorted from newest to latest . * < pre > < code > GET / projects / : id / events < / code > < / pre > * @ param projectIdOrPath projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance , required * @ param itemsPerPage the number of Project instances that will be fetched per page * @ return a Pager of project events for the specified project * @ throws GitLabApiException if any exception occurs */ public Pager < Event > getProjectEvents ( Object projectIdOrPath , int itemsPerPage ) throws GitLabApiException { } }
return ( new Pager < Event > ( this , Event . class , itemsPerPage , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "events" ) ) ;
public class CommerceWishListPersistenceImpl { /** * Returns the last commerce wish list in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce wish list * @ throws NoSuchWishListException if a matching commerce wish list could not be found */ @ Override public CommerceWishList findByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CommerceWishList > orderByComparator ) throws NoSuchWishListException { } }
CommerceWishList commerceWishList = fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; if ( commerceWishList != null ) { return commerceWishList ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchWishListException ( msg . toString ( ) ) ;
public class AptControlInterface { /** * Returns the total number of operations for this control interface */ public int getEventSetCount ( ) { } }
int count = _eventSets . size ( ) ; if ( _superClass != null ) count += _superClass . getEventSetCount ( ) ; return count ;
public class PoolsImpl { /** * Lists all of the pools in the specified account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; CloudPool & gt ; object */ public Observable < ServiceResponseWithHeaders < Page < CloudPool > , PoolListHeaders > > listWithServiceResponseAsync ( ) { } }
return listSinglePageAsync ( ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < CloudPool > , PoolListHeaders > , Observable < ServiceResponseWithHeaders < Page < CloudPool > , PoolListHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < CloudPool > , PoolListHeaders > > call ( ServiceResponseWithHeaders < Page < CloudPool > , PoolListHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listNextWithServiceResponseAsync ( nextPageLink , null ) ) ; } } ) ;
public class ArraysUtil { /** * 2 - D Integer array to double array . * @ param array Integer array . * @ return Double array . */ public static double [ ] [ ] toDouble ( int [ ] [ ] array ) { } }
double [ ] [ ] n = new double [ array . length ] [ array [ 0 ] . length ] ; for ( int i = 0 ; i < array . length ; i ++ ) { for ( int j = 0 ; j < array [ 0 ] . length ; j ++ ) { n [ i ] [ j ] = ( double ) array [ i ] [ j ] ; } } return n ;
public class WorkflowClient { /** * Retrieve all running workflow instances for a given name and version * @ param workflowName the name of the workflow * @ param version the version of the wokflow definition . Defaults to 1. * @ return the list of running workflow instances */ public List < String > getRunningWorkflow ( String workflowName , @ Nullable Integer version ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( workflowName ) , "Workflow name cannot be blank" ) ; WorkflowServicePb . GetRunningWorkflowsResponse workflows = stub . getRunningWorkflows ( WorkflowServicePb . GetRunningWorkflowsRequest . newBuilder ( ) . setName ( workflowName ) . setVersion ( version == null ? 1 : version ) . build ( ) ) ; return workflows . getWorkflowIdsList ( ) ;
public class XYPlotVisualization { /** * Setup the CSS classes for the plot . * @ param svgp Plot * @ param plot Plot to render */ private void setupCSS ( VisualizerContext context , SVGPlot svgp , XYPlot plot ) { } }
StyleLibrary style = context . getStyleLibrary ( ) ; for ( XYPlot . Curve curve : plot ) { CSSClass csscls = new CSSClass ( this , SERIESID + curve . getColor ( ) ) ; // csscls . setStatement ( SVGConstants . SVG _ STROKE _ WIDTH _ ATTRIBUTE , " 0.2 % " ) ; csscls . setStatement ( SVGConstants . SVG_FILL_ATTRIBUTE , SVGConstants . SVG_NONE_VALUE ) ; style . lines ( ) . formatCSSClass ( csscls , curve . getColor ( ) , style . getLineWidth ( StyleLibrary . XYCURVE ) ) ; svgp . addCSSClassOrLogError ( csscls ) ; } // Axis label CSSClass label = new CSSClass ( this , CSS_AXIS_LABEL ) ; label . setStatement ( SVGConstants . CSS_FILL_PROPERTY , style . getTextColor ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_FONT_FAMILY_PROPERTY , style . getFontFamily ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_FONT_SIZE_PROPERTY , style . getTextSize ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_TEXT_ANCHOR_PROPERTY , SVGConstants . CSS_MIDDLE_VALUE ) ; svgp . addCSSClassOrLogError ( label ) ; svgp . updateStyleElement ( ) ;
public class EqualityInference { /** * Determines whether an Expression may be successfully applied to the equality inference */ public static Predicate < Expression > isInferenceCandidate ( ) { } }
return expression -> { expression = normalizeInPredicateToEquality ( expression ) ; if ( expression instanceof ComparisonExpression && isDeterministic ( expression ) && ! mayReturnNullOnNonNullInput ( expression ) ) { ComparisonExpression comparison = ( ComparisonExpression ) expression ; if ( comparison . getOperator ( ) == ComparisonExpression . Operator . EQUAL ) { // We should only consider equalities that have distinct left and right components return ! comparison . getLeft ( ) . equals ( comparison . getRight ( ) ) ; } } return false ; } ;
public class SortableHeaderRenderer { /** * Get the ascending / descending icon . */ protected Icon getHeaderRendererIcon ( boolean bCurrentOrder ) { } }
// Get current classloader ClassLoader cl = this . getClass ( ) . getClassLoader ( ) ; // Create icons try { if ( ASCENDING_ICON == null ) { ASCENDING_ICON = new ImageIcon ( cl . getResource ( "images/buttons/" + ASCENDING_ICON_NAME + ".gif" ) ) ; DESCENDING_ICON = new ImageIcon ( cl . getResource ( "images/buttons/" + DESCENDING_ICON_NAME + ".gif" ) ) ; } return bCurrentOrder ? ASCENDING_ICON : DESCENDING_ICON ; } catch ( Exception ex ) { } return null ;
public class DebugSubstance { /** * { @ inheritDoc } */ @ Override public Double getMultiplier ( IAtomContainer container ) { } }
logger . debug ( "Getting multiplier for atom container: " , container ) ; return super . getMultiplier ( container ) ;
public class L1CacheRepositoryDecorator { /** * Evict all entries for entity types referred to by this entity type through a bidirectional * relation . */ private void evictBiDiReferencedEntityTypes ( ) { } }
getEntityType ( ) . getMappedByAttributes ( ) . map ( Attribute :: getRefEntity ) . forEach ( l1Cache :: evictAll ) ; getEntityType ( ) . getInversedByAttributes ( ) . map ( Attribute :: getRefEntity ) . forEach ( l1Cache :: evictAll ) ;
public class AbstractGuacamoleTunnelService { /** * Creates a tunnel for the given user which connects to the given * connection , which MUST already be acquired via acquire ( ) . The given * client information will be passed to guacd when the connection is * established . * The connection will be automatically released when it closes , or if it * fails to establish entirely . * @ param activeConnection * The active connection record of the connection in use . * @ param info * Information describing the Guacamole client connecting to the given * connection . * @ param tokens * A Map containing the token names and corresponding values to be * applied as parameter tokens when establishing the connection . * @ param interceptErrors * Whether errors from the upstream remote desktop should be * intercepted and rethrown as GuacamoleUpstreamExceptions . * @ return * A new GuacamoleTunnel which is configured and connected to the given * connection . * @ throws GuacamoleException * If an error occurs while the connection is being established , or * while connection configuration information is being retrieved . */ private GuacamoleTunnel assignGuacamoleTunnel ( ActiveConnectionRecord activeConnection , GuacamoleClientInformation info , Map < String , String > tokens , boolean interceptErrors ) throws GuacamoleException { } }
// Record new active connection Runnable cleanupTask = new ConnectionCleanupTask ( activeConnection ) ; activeTunnels . put ( activeConnection . getUUID ( ) . toString ( ) , activeConnection ) ; try { GuacamoleConfiguration config ; // Retrieve connection information associated with given connection record ModeledConnection connection = activeConnection . getConnection ( ) ; // Pull configuration directly from the connection if we are not // joining an active connection if ( activeConnection . isPrimaryConnection ( ) ) { activeConnections . put ( connection . getIdentifier ( ) , activeConnection ) ; activeConnectionGroups . put ( connection . getParentIdentifier ( ) , activeConnection ) ; config = getGuacamoleConfiguration ( activeConnection . getUser ( ) , connection ) ; } // If we ARE joining an active connection , generate a configuration // which does so else { // Verify that the connection ID is known String connectionID = activeConnection . getConnectionID ( ) ; if ( connectionID == null ) throw new GuacamoleResourceNotFoundException ( "No existing connection to be joined." ) ; // Build configuration from the sharing profile and the ID of // the connection being joined config = getGuacamoleConfiguration ( activeConnection . getUser ( ) , activeConnection . getSharingProfile ( ) , connectionID ) ; } // Build token filter containing credential tokens TokenFilter tokenFilter = new TokenFilter ( ) ; tokenFilter . setTokens ( tokens ) ; // Filter the configuration tokenFilter . filterValues ( config . getParameters ( ) ) ; // Obtain socket which will automatically run the cleanup task ConfiguredGuacamoleSocket socket = new ConfiguredGuacamoleSocket ( getUnconfiguredGuacamoleSocket ( connection . getGuacamoleProxyConfiguration ( ) , cleanupTask ) , config , info ) ; // Assign and return new tunnel if ( interceptErrors ) return activeConnection . assignGuacamoleTunnel ( new FailoverGuacamoleSocket ( socket ) , socket . getConnectionID ( ) ) ; else return activeConnection . assignGuacamoleTunnel ( socket , socket . getConnectionID ( ) ) ; } // Execute cleanup if socket could not be created catch ( GuacamoleException e ) { cleanupTask . run ( ) ; throw e ; }
public class AmazonElasticLoadBalancingClient { /** * Deregisters the specified targets from the specified target group . After the targets are deregistered , they no * longer receive traffic from the load balancer . * @ param deregisterTargetsRequest * @ return Result of the DeregisterTargets operation returned by the service . * @ throws TargetGroupNotFoundException * The specified target group does not exist . * @ throws InvalidTargetException * The specified target does not exist , is not in the same VPC as the target group , or has an unsupported * instance type . * @ sample AmazonElasticLoadBalancing . DeregisterTargets * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancingv2-2015-12-01 / DeregisterTargets " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeregisterTargetsResult deregisterTargets ( DeregisterTargetsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeregisterTargets ( request ) ;
public class JDBCDriverService { /** * Declarative Services method for unsetting the SharedLibrary service * @ param lib the service */ protected void unsetSharedLib ( Library lib ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "unsetSharedLib" , lib ) ; modified ( null , false ) ;
public class AbstractTaggerTrainer { /** * ( non - Javadoc ) * @ see es . ehu . si . ixa . pipe . pos . train . Trainer # train ( opennlp . tools . util . * TrainingParameters ) */ public final POSModel train ( final TrainingParameters params ) { } }
// features if ( getPosTaggerFactory ( ) == null ) { throw new IllegalStateException ( "Classes derived from AbstractTrainer must " + " create a POSTaggerFactory features!" ) ; } // training model POSModel trainedModel = null ; POSEvaluator posEvaluator = null ; try { trainedModel = POSTaggerME . train ( this . lang , this . trainSamples , params , getPosTaggerFactory ( ) ) ; final POSTaggerME posTagger = new POSTaggerME ( trainedModel ) ; posEvaluator = new POSEvaluator ( posTagger ) ; posEvaluator . evaluate ( this . testSamples ) ; } catch ( final IOException e ) { System . err . println ( "IO error while loading training and test sets!" ) ; e . printStackTrace ( ) ; System . exit ( 1 ) ; } System . out . println ( "Final result: " + posEvaluator . getWordAccuracy ( ) ) ; return trainedModel ;
public class CncOrd { /** * < p > It cancels all given buyer ' s orders . * For example buyer had not paid online after accepting ( booking ) orders . * It changes item ' s availability and orders status to given NEW or CANCELED . * @ param pRqVs additional request scoped parameters * @ param pBuyr buyer * @ param pPurId purchase ID * @ param pStFr usually BOOKED * @ param pStTo usually NEW * @ throws Exception - an exception */ @ Override public final void cancel ( final Map < String , Object > pRqVs , final OnlineBuyer pBuyr , final Long pPurId , final EOrdStat pStFr , final EOrdStat pStTo ) throws Exception { } }
List < CustOrder > ords = null ; List < CuOrSe > sords = null ; String tbn = CustOrder . class . getSimpleName ( ) ; String wheStBr = "where STAT=" + pStFr . ordinal ( ) + " and BUYER=" + pBuyr . getItsId ( ) + " and PUR=" + pPurId ; Set < String > ndFlNm = new HashSet < String > ( ) ; ndFlNm . add ( "itsId" ) ; ndFlNm . add ( "itsName" ) ; pRqVs . put ( "PickUpPlaceneededFields" , ndFlNm ) ; pRqVs . put ( tbn + "buyerdeepLevel" , 1 ) ; ords = this . srvOrm . retrieveListWithConditions ( pRqVs , CustOrder . class , wheStBr ) ; pRqVs . remove ( tbn + "buyerdeepLevel" ) ; for ( CustOrder co : ords ) { cancel ( pRqVs , co , pStTo ) ; } tbn = CuOrSe . class . getSimpleName ( ) ; Set < String > ndFlDc = new HashSet < String > ( ) ; ndFlDc . add ( "seller" ) ; pRqVs . put ( "SeSellerneededFields" , ndFlDc ) ; pRqVs . put ( "DebtorCreditorneededFields" , ndFlNm ) ; pRqVs . put ( tbn + "seldeepLevel" , 3 ) ; pRqVs . put ( tbn + "buyerdeepLevel" , 1 ) ; sords = this . srvOrm . retrieveListWithConditions ( pRqVs , CuOrSe . class , wheStBr ) ; pRqVs . remove ( "DebtorCreditorneededFields" ) ; pRqVs . remove ( "SeSellerneededFields" ) ; pRqVs . remove ( tbn + "seldeepLevel" ) ; pRqVs . remove ( tbn + "buyerdeepLevel" ) ; pRqVs . remove ( "PickUpPlaceneededFields" ) ; for ( CuOrSe co : sords ) { cancel ( pRqVs , co , pStTo ) ; }
public class OFFDumpRetriever { /** * Untar an input file into an output file . * The output file is created in the output folder , having the same name * as the input file , minus the ' . tar ' extension . * @ param inputFile the input . tar file * @ param outputDir the output directory file . * @ throws IOException * @ throws FileNotFoundException * @ return The { @ link List } of { @ link File } s with the untared content . * @ throws ArchiveException */ private List < File > unTar ( final File inputFile , final File outputDir ) throws FileNotFoundException , IOException , ArchiveException { } }
_log . info ( String . format ( "Untaring %s to dir %s." , inputFile . getAbsolutePath ( ) , outputDir . getAbsolutePath ( ) ) ) ; final List < File > untaredFiles = new LinkedList < File > ( ) ; final InputStream is = new FileInputStream ( inputFile ) ; final TarArchiveInputStream debInputStream = ( TarArchiveInputStream ) new ArchiveStreamFactory ( ) . createArchiveInputStream ( "tar" , is ) ; TarArchiveEntry entry = null ; while ( ( entry = ( TarArchiveEntry ) debInputStream . getNextEntry ( ) ) != null ) { final File outputFile = new File ( outputDir , entry . getName ( ) ) ; if ( entry . isDirectory ( ) ) { _log . info ( String . format ( "Attempting to write output directory %s." , outputFile . getAbsolutePath ( ) ) ) ; if ( ! outputFile . exists ( ) ) { _log . info ( String . format ( "Attempting to create output directory %s." , outputFile . getAbsolutePath ( ) ) ) ; if ( ! outputFile . mkdirs ( ) ) { throw new IllegalStateException ( String . format ( "Couldn't create directory %s." , outputFile . getAbsolutePath ( ) ) ) ; } } } else { _log . info ( String . format ( "Creating output file %s." , outputFile . getAbsolutePath ( ) ) ) ; final OutputStream outputFileStream = new FileOutputStream ( outputFile ) ; IOUtils . copy ( debInputStream , outputFileStream ) ; outputFileStream . close ( ) ; } untaredFiles . add ( outputFile ) ; } debInputStream . close ( ) ; return untaredFiles ;
public class MinimalMetaBean { /** * Obtains an instance of the meta - bean for immutable beans . * The properties will be determined using reflection to find the * { @ link PropertyDefinition } annotation . * @ param < B > the type of the bean * @ param beanType the bean type , not null * @ param builderSupplier the supplier of bean builders , not null * @ param getters the getter functions , not null * @ return the meta - bean , not null * @ deprecated Use version that takes the field names */ @ Deprecated @ SafeVarargs public static < B extends Bean > MinimalMetaBean < B > of ( Class < B > beanType , Supplier < BeanBuilder < B > > builderSupplier , Function < B , Object > ... getters ) { } }
if ( getters == null ) { throw new NullPointerException ( "Getter functions must not be null" ) ; } return new MinimalMetaBean < > ( beanType , fieldNames ( beanType ) , builderSupplier , Arrays . asList ( getters ) , null ) ;