signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ExportApi { /** * Export users . * Export the specified users with the properties you list in the * * fields * * parameter . * @ param fields fields . * @ param fileName the file name to be exported . * @ param personDBIDs DBIDs of users to be exported . * @ param filterParameters Filter parameters . * @ return Id of the export . * @ throws ProvisioningApiException if the call is unsuccessful . */ public String exportFile ( List < String > fields , String fileName , List < String > personDBIDs , ExportFilterParams filterParameters ) throws ProvisioningApiException { } }
try { ExportFileResponse resp = exportApi . exportFile ( new ExportFileData ( ) . fields ( fields ) . fileName ( fileName ) . personDBIDs ( personDBIDs ) . filterParameters ( Converters . convertExportFilterParamsToExportFileDataFilterParameters ( filterParameters ) ) ) ; if ( ! resp . getStatus ( ) . getCode ( ) . equals ( 0 ) ) { throw new ProvisioningApiException ( "Error exporting file. Code: " + resp . getStatus ( ) . getCode ( ) ) ; } return resp . getData ( ) . getId ( ) ; } catch ( ApiException e ) { throw new ProvisioningApiException ( "Error exporting file" , e ) ; }
public class RectifyImageOps { /** * Creates a transform that applies rectification to unrectified distorted pixels and outputs * normalized pixel coordinates . * @ param param Intrinsic parameters . * @ param rectify Transform for rectifying the image . * @ param rectifyK Camera calibration matrix after rectification * @ return Transform from unrectified to rectified normalized pixels */ public static Point2Transform2_F64 transformPixelToRectNorm ( CameraPinholeBrown param , DMatrixRMaj rectify , DMatrixRMaj rectifyK ) { } }
return ImplRectifyImageOps_F64 . transformPixelToRectNorm ( param , rectify , rectifyK ) ;
public class IdentityMap { /** * Returns the key for the specified value , or null if it is not in the map . Note this traverses the entire map and compares * every value , which may be an expensive operation . * @ param identity If true , uses = = to compare the specified value with values in the map . If false , uses * { @ link # equals ( Object ) } . */ public K findKey ( Object value , boolean identity ) { } }
V [ ] valueTable = this . valueTable ; if ( value == null ) { K [ ] keyTable = this . keyTable ; for ( int i = capacity + stashSize ; i -- > 0 ; ) if ( keyTable [ i ] != null && valueTable [ i ] == null ) return keyTable [ i ] ; } else if ( identity ) { for ( int i = capacity + stashSize ; i -- > 0 ; ) if ( valueTable [ i ] == value ) return keyTable [ i ] ; } else { for ( int i = capacity + stashSize ; i -- > 0 ; ) if ( value . equals ( valueTable [ i ] ) ) return keyTable [ i ] ; } return null ;
public class CompletableFutures { /** * Get the result now , when we know for sure that the future is complete . */ public static < T > T getCompleted ( CompletionStage < T > stage ) { } }
CompletableFuture < T > future = stage . toCompletableFuture ( ) ; Preconditions . checkArgument ( future . isDone ( ) && ! future . isCompletedExceptionally ( ) ) ; try { return future . get ( ) ; } catch ( InterruptedException | ExecutionException e ) { // Neither can happen given the precondition throw new AssertionError ( "Unexpected error" , e ) ; }
public class ContextAwareReporter { /** * Called whenever a new { @ link MetricContext } is added to the tree . * @ param context new { @ link MetricContext } added . */ protected void newMetricContext ( MetricContext context ) { } }
if ( this . contextFilter . matches ( context ) ) { this . contextsToReport . add ( context . getInnerMetricContext ( ) ) ; }
public class Mediawiki { /** * get the result for the given action and query * @ param action * @ param params * @ return the API result for the action * @ throws Exception */ public Api getActionResult ( String action , String params ) throws Exception { } }
Api result = this . getActionResult ( action , params , null , null ) ; return result ;
public class RoundRobinPacking { /** * Sort the components in decreasing order based on their RAM requirements * @ return The sorted list of components and their RAM requirements */ private ArrayList < ResourceRequirement > getSortedRAMComponents ( Set < String > componentNames ) { } }
ArrayList < ResourceRequirement > resourceRequirements = new ArrayList < > ( ) ; Map < String , ByteAmount > ramMap = TopologyUtils . getComponentRamMapConfig ( topology ) ; for ( String componentName : componentNames ) { resourceRequirements . add ( new ResourceRequirement ( componentName , ramMap . getOrDefault ( componentName , ByteAmount . ZERO ) ) ) ; } Collections . sort ( resourceRequirements , SortingStrategy . RAM_FIRST . reversed ( ) ) ; return resourceRequirements ;
public class FlashOverlayCreative { /** * Sets the apiFramework value for this FlashOverlayCreative . * @ param apiFramework * The API framework of the asset . This attribute is optional . */ public void setApiFramework ( com . google . api . ads . admanager . axis . v201811 . ApiFramework apiFramework ) { } }
this . apiFramework = apiFramework ;
public class CollectorServiceImpl { /** * We want to initialize the Quasi - product collector when the API starts up * so that any existing Team dashboards will be added as CollectorItems . * TODO - Is this the best home for this method ? ? */ @ PostConstruct public void initProductCollectorOnStartup ( ) { } }
Collector productCollector = collectorRepository . findByName ( "Product" ) ; if ( productCollector == null ) { productCollector = new Collector ( ) ; productCollector . setName ( "Product" ) ; productCollector . setCollectorType ( CollectorType . Product ) ; productCollector . setEnabled ( true ) ; productCollector . setOnline ( true ) ; collectorRepository . save ( productCollector ) ; // Create collector items for existing team dashboards for ( Dashboard dashboard : dashboardRepository . findTeamDashboards ( ) ) { CollectorItem item = new CollectorItem ( ) ; item . setCollectorId ( productCollector . getId ( ) ) ; item . getOptions ( ) . put ( "dashboardId" , dashboard . getId ( ) . toString ( ) ) ; item . setDescription ( dashboard . getTitle ( ) ) ; collectorItemRepository . save ( item ) ; } }
public class DateTimeFormat { /** * Creates a formatter for the specified style . * @ param dateStyle the date style * @ param timeStyle the time style * @ return the formatter */ private static DateTimeFormatter createDateTimeFormatter ( int dateStyle , int timeStyle ) { } }
int type = DATETIME ; if ( dateStyle == NONE ) { type = TIME ; } else if ( timeStyle == NONE ) { type = DATE ; } StyleFormatter llf = new StyleFormatter ( dateStyle , timeStyle , type ) ; return new DateTimeFormatter ( llf , llf ) ;
public class StatBuckets { /** * Default is the latest result * @ param showStr * @ return */ public static String getShowTimeStr ( Integer time ) { } }
if ( time == null ) { return MINUTE_WINDOW_STR ; } else if ( time . equals ( MINUTE_WINDOW ) ) { return MINUTE_WINDOW_STR ; } else if ( time . equals ( HOUR_WINDOW ) ) { return HOUR_WINDOW_STR ; } else if ( time . equals ( DAY_WINDOW ) ) { return DAY_WINDOW_STR ; } else if ( time . equals ( ALL_TIME_WINDOW ) ) { return ALL_WINDOW_STR ; } else { return MINUTE_WINDOW_STR ; }
public class WindowUtil { /** * Returns true if the target widget is vertically scrolled into view . * @ param minPixels the minimum number of pixels that must be visible to count as " in view " . */ public static boolean isScrolledIntoView ( Widget target , int minPixels ) { } }
int wtop = Window . getScrollTop ( ) , wheight = Window . getClientHeight ( ) ; int ttop = target . getAbsoluteTop ( ) ; if ( ttop > wtop ) { return ( wtop + wheight - ttop > minPixels ) ; } else { return ( ttop + target . getOffsetHeight ( ) - wtop > minPixels ) ; }
public class AutotuningPCA { /** * Sort an array of doubles in descending order . * @ param a Values * @ return Values in descending order */ private static double [ ] reversed ( double [ ] a ) { } }
// TODO : there doesn ' t appear to be a nicer version in Java , unfortunately . Arrays . sort ( a ) ; for ( int i = 0 , j = a . length - 1 ; i < j ; i ++ , j -- ) { double tmp = a [ i ] ; a [ i ] = a [ j ] ; a [ j ] = tmp ; } return a ;
public class PylonStream { /** * Create a stream instance containing only a hash * @ param str the hash obtained from DataSift for a stream * @ return an instance which can be used by the client */ public static PylonStream fromString ( String str ) { } }
if ( str == null || str . isEmpty ( ) ) { throw new IllegalArgumentException ( "Cannot create a stream from an empty or null string" ) ; } PylonStream stream = new PylonStream ( ) ; stream . hash = str ; return stream ;
public class TeamDataClient { /** * Updates the MongoDB with a JSONArray received from the source system * back - end with story - based data . * @ param tmpMongoDetailArray * A JSON response in JSONArray format from the source system */ @ Override protected void updateMongoInfo ( JSONArray tmpMongoDetailArray ) { } }
for ( Object obj : tmpMongoDetailArray ) { JSONObject dataMainObj = ( JSONObject ) obj ; Team team = new Team ( "" , "" ) ; /* * Checks to see if the available asset state is not active from the * V1 Response and removes it if it exists and not active : */ if ( ! getJSONString ( dataMainObj , "AssetState" ) . equalsIgnoreCase ( "Active" ) ) { this . removeInactiveScopeOwnerByTeamId ( getJSONString ( dataMainObj , "_oid" ) ) ; } else { if ( removeExistingEntity ( getJSONString ( dataMainObj , "_oid" ) ) ) { team . setId ( this . getOldTeamId ( ) ) ; team . setEnabled ( this . isOldTeamEnabledState ( ) ) ; } // collectorId team . setCollectorId ( featureCollectorRepository . findByName ( FeatureCollectorConstants . VERSIONONE ) . getId ( ) ) ; // teamId team . setTeamId ( getJSONString ( dataMainObj , "_oid" ) ) ; // name team . setName ( getJSONString ( dataMainObj , "Name" ) ) ; // changeDate ; team . setChangeDate ( getJSONString ( dataMainObj , "ChangeDate" ) ) ; // assetState team . setAssetState ( getJSONString ( dataMainObj , "AssetState" ) ) ; // isDeleted ; team . setIsDeleted ( getJSONString ( dataMainObj , "IsDeleted" ) ) ; teamRepo . save ( team ) ; } }
public class AbstractInputHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . ControlHandler # handleControlMessage ( com . ibm . ws . sib . trm . topology . Cellule , com . ibm . ws . sib . mfp . control . ControlMessage ) * Handle all downstream control messages i . e . target control messages */ public void handleControlMessage ( SIBUuid8 sourceMEUuid , ControlMessage cMsg ) throws SIIncorrectCallException , SIErrorException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "handleControlMessage" , new Object [ ] { sourceMEUuid , cMsg } ) ; // Next work out type of ControlMessage and process it ControlMessageType type = cMsg . getControlMessageType ( ) ; // First check to see whether this is an " are you flushed " reply . // Such messages will not be mappable to a stream ID since we // don ' t yet have a stream data structure ( that ' s why we sent the // query in the first place ) . Or . . . these could be stale messages // for streams we don ' t care about . Either way , handle them // elsewhere . if ( type == ControlMessageType . FLUSHED ) { _targetStreamManager . handleFlushedMessage ( ( ControlFlushed ) cMsg ) ; } else if ( type == ControlMessageType . NOTFLUSHED ) { _targetStreamManager . handleNotFlushedMessage ( ( ControlNotFlushed ) cMsg ) ; } else if ( type == ControlMessageType . SILENCE ) { _targetStreamManager . handleSilenceMessage ( ( ControlSilence ) cMsg ) ; } else if ( type == ControlMessageType . ACKEXPECTED ) { _targetStreamManager . handleAckExpectedMessage ( ( ControlAckExpected ) cMsg ) ; } else { // Not a recognised type // throw exception }
public class CQLService { /** * Get the { @ link PreparedStatement } for the given { @ link CQLStatementCache . Query } to * the given table name . If needed , the query statement is compiled and cached . * @ param query Query statement type . * @ param storeName Store ( ColumnFamily ) name . * @ return PreparedStatement for requested table / query . */ public PreparedStatement getPreparedQuery ( Query query , String storeName ) { } }
String tableName = storeToCQLName ( storeName ) ; return m_statementCache . getPreparedQuery ( tableName , query ) ;
public class ManagedPoolDataSource { /** * - - - - - internal implementation - - - - - */ private void doWait ( long loginTimeoutExpiration ) throws SQLException { } }
try { if ( loginTimeoutExpiration > 0 ) { long timeToWait = loginTimeoutExpiration - System . currentTimeMillis ( ) ; if ( timeToWait > 0 ) { this . wait ( timeToWait ) ; } else { throw new SQLException ( "No connections available within the given login timeout: " + getLoginTimeout ( ) ) ; } } else { this . wait ( ) ; } } catch ( InterruptedException e ) { throw new SQLException ( "Thread was interrupted while waiting for available connection" ) ; }
public class DefaultMethodConfigBuilder { /** * / * PARAMS SETTINGS METHODS */ public MethodConfigBuilder setParamsSerializer ( Class < ? extends Serializer > serializerClass ) { } }
for ( ParamConfigBuilder b : methodParamConfigBuilders ) { b . setSerializer ( serializerClass ) ; } return this ;
public class GreenPepperXmlRpcClient { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) public Specification createSpecification ( Specification specification , String identifier ) throws GreenPepperServerException { } }
Vector params = CollectionUtil . toVector ( specification . marshallize ( ) ) ; log . debug ( "Creating Specification: " + specification . getName ( ) ) ; Vector < Object > specificationParams = ( Vector < Object > ) execute ( XmlRpcMethodName . createSpecification , params , identifier ) ; return XmlRpcDataMarshaller . toSpecification ( specificationParams ) ;
public class OffsetCharSequence { /** * { @ inheritDoc } */ public char charAt ( int index ) { } }
if ( transform == TRANSFORM_NONE ) { return base . charAt ( index + offset ) ; } else if ( transform == TRANSFORM_LOWER_CASE ) { return Character . toLowerCase ( base . charAt ( index + offset ) ) ; } else if ( transform == TRANSFORM_UPPER_CASE ) { return Character . toUpperCase ( base . charAt ( index + offset ) ) ; } // shouldn ' t get here . return plain character return base . charAt ( index + offset ) ;
public class PowerMock { /** * A utility method that may be used to mock several < b > static < / b > methods * in an easy way ( by just passing in the method names of the method you * wish to mock ) . Note that you cannot uniquely specify a method to mock * using this method if there are several methods with the same name in * { @ code type } . This method will mock ALL methods that match the * supplied name regardless of parameter types and signature . If this is the * case you should fall - back on using the * { @ link # mockStatic ( Class , Method . . . ) } method instead . * @ param clazz The class that contains the static methods that should be * mocked . * @ param methodNames The names of the methods that should be mocked . If * { @ code null } , then this method will have the same effect * as just calling { @ link # mockStatic ( Class , Method . . . ) } with the * second parameter as { @ code new Method [ 0 ] } ( i . e . all * methods in that class will be mocked ) . */ public static synchronized void mockStaticPartial ( Class < ? > clazz , String ... methodNames ) { } }
mockStatic ( clazz , Whitebox . getMethods ( clazz , methodNames ) ) ;
public class Client { /** * Returns a cursor of datapoints specified by a series filter . * < p > This endpoint allows one to request multiple series and apply an aggregation function . * The system default timezone is used for the returned DateTimes . * @ param filter The series filter * @ param interval An interval of time for the query ( start / end datetimes ) * @ param aggregation The aggregation for the read query . This is required . * @ return A Cursor of DataPoints . The cursor . iterator ( ) . next ( ) may throw a { @ link TempoDBException } if an error occurs while making a request . * @ see Aggregation * @ see Cursor * @ see Filter * @ since 1.0.0 */ public Cursor < DataPoint > readDataPoints ( Filter filter , Interval interval , Aggregation aggregation ) { } }
return readDataPoints ( filter , interval , DateTimeZone . getDefault ( ) , aggregation , null , null ) ;
public class ConcurrentUtils { /** * Tests whether the specified { @ code Throwable } is a checked exception . If * not , an exception is thrown . * @ param ex the { @ code Throwable } to check * @ return a flag whether the passed in exception is a checked exception * @ throws IllegalArgumentException if the { @ code Throwable } is not a * checked exception */ static Throwable checkedException ( final Throwable ex ) { } }
Validate . isTrue ( ex != null && ! ( ex instanceof RuntimeException ) && ! ( ex instanceof Error ) , "Not a checked exception: " + ex ) ; return ex ;
public class CmsGwtActionElement { /** * Serializes the result of the given method for RPC - prefetching . < p > * @ param method the method * @ param data the result to serialize * @ return the serialized data * @ throws SerializationException if something goes wrong */ public static String serialize ( Method method , Object data ) throws SerializationException { } }
String result = RPC . encodeResponseForSuccess ( method , data , CmsPrefetchSerializationPolicy . instance ( ) ) ; result = CmsEncoder . escapeXml ( result , true ) ; return result ;
public class LoadBalancerRequestFilter { /** * { @ inheritDoc } */ @ Override public void filter ( ContainerRequestContext ctx ) throws IOException { } }
String scheme = getValue ( ctx . getHeaders ( ) , "x-forwarded-proto" ) ; String port = getValue ( ctx . getHeaders ( ) , "x-forwarded-port" ) ; if ( scheme == null && port == null ) return ; UriBuilder baseBuilder = ctx . getUriInfo ( ) . getBaseUriBuilder ( ) ; UriBuilder requestBuilder = ctx . getUriInfo ( ) . getRequestUriBuilder ( ) ; if ( scheme != null ) { baseBuilder . scheme ( scheme ) ; requestBuilder . scheme ( scheme ) ; baseBuilder . port ( 443 ) ; requestBuilder . port ( 443 ) ; } if ( port != null ) { int nPort = Integer . parseInt ( port ) ; baseBuilder . port ( nPort ) ; requestBuilder . port ( nPort ) ; } ctx . setRequestUri ( baseBuilder . build ( ) , requestBuilder . build ( ) ) ;
public class CPDefinitionOptionRelLocalServiceUtil { /** * Returns a range of all the cp definition option rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . product . model . impl . CPDefinitionOptionRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp definition option rels * @ param end the upper bound of the range of cp definition option rels ( not inclusive ) * @ return the range of cp definition option rels */ public static java . util . List < com . liferay . commerce . product . model . CPDefinitionOptionRel > getCPDefinitionOptionRels ( int start , int end ) { } }
return getService ( ) . getCPDefinitionOptionRels ( start , end ) ;
public class AopAllianceSimulatorImpl { /** * Returns some simulation response if found . * @ param mi the MethodInvocation * @ return some simulation response * @ throws Exception if something goes wrong */ public Object invoke ( MethodInvocation mi ) throws Exception { } }
Method method = mi . getMethod ( ) ; return simulatorAdapter . service ( method . getDeclaringClass ( ) . getCanonicalName ( ) , method . getName ( ) , mi . getArguments ( ) , rootPath , useRootRelativePath ) ;
public class TrxMessageHeader { /** * Return the state of this object as a properties object . * @ param strKey The key to return . * @ return The properties . */ public void putAll ( TrxMessageHeader trxHeaderToMerge ) { } }
if ( m_mapMessageHeader == null ) m_mapMessageHeader = new HashMap < String , Object > ( ) ; if ( trxHeaderToMerge . getMessageHeaderMap ( ) != null ) this . getMessageHeaderMap ( ) . putAll ( trxHeaderToMerge . getMessageHeaderMap ( ) ) ; if ( m_mapMessageInfo == null ) m_mapMessageInfo = new HashMap < String , Object > ( ) ; if ( trxHeaderToMerge . getMessageInfoMap ( ) != null ) this . getMessageInfoMap ( ) . putAll ( trxHeaderToMerge . getMessageInfoMap ( ) ) ; if ( m_mapMessageTransport == null ) m_mapMessageTransport = new HashMap < String , Object > ( ) ; if ( trxHeaderToMerge . getMessageTransportMap ( ) != null ) this . getMessageTransportMap ( ) . putAll ( trxHeaderToMerge . getMessageTransportMap ( ) ) ;
public class CommerceCountryLocalServiceBaseImpl { /** * Deletes the commerce country from the database . Also notifies the appropriate model listeners . * @ param commerceCountry the commerce country * @ return the commerce country that was removed * @ throws PortalException */ @ Indexable ( type = IndexableType . DELETE ) @ Override public CommerceCountry deleteCommerceCountry ( CommerceCountry commerceCountry ) throws PortalException { } }
return commerceCountryPersistence . remove ( commerceCountry ) ;
public class CommandLineUiCallback { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . IGuiCallback # showQuestionDialog ( java . lang . String , * java . lang . String , java . lang . String ) */ @ Override public String showQuestionDialog ( String message , String title , String defaultValue ) { } }
throw new UnsupportedOperationException ( ) ;
public class AbstractParser { /** * convert an xml element in Long value * @ param reader the StAX reader * @ param key The key * @ param expressions The expressions * @ return the long representing element * @ throws XMLStreamException StAX exception * @ throws ParserException in case it isn ' t a number */ protected Long elementAsLong ( XMLStreamReader reader , String key , Map < String , String > expressions ) throws XMLStreamException , ParserException { } }
Long longValue = null ; String elementtext = rawElementText ( reader ) ; if ( key != null && expressions != null && elementtext != null && elementtext . indexOf ( "${" ) != - 1 ) expressions . put ( key , elementtext ) ; try { longValue = Long . valueOf ( getSubstitutionValue ( elementtext ) ) ; } catch ( NumberFormatException nfe ) { throw new ParserException ( bundle . notValidNumber ( elementtext , reader . getLocalName ( ) ) ) ; } return longValue ;
public class ST_AsGeoJSON { /** * Coordinates of a MultiPoint are an array of positions . * Syntax : * { " type " : " MultiPoint " , " coordinates " : [ [ 100.0 , 0.0 ] , [ 101.0 , 1.0 ] ] } * @ param multiPoint * @ param sb */ public static void toGeojsonMultiPoint ( MultiPoint multiPoint , StringBuilder sb ) { } }
sb . append ( "{\"type\":\"MultiPoint\",\"coordinates\":" ) ; toGeojsonCoordinates ( multiPoint . getCoordinates ( ) , sb ) ; sb . append ( "}" ) ;
public class CmsAttributeHandler { /** * Adds a new choice attribute value . < p > * @ param reference the reference value view * @ param choicePath the path of the selected ( possibly nested ) choice attribute , consisting of attribute names */ public void addNewChoiceAttributeValue ( CmsAttributeValueView reference , List < String > choicePath ) { } }
CmsValueFocusHandler . getInstance ( ) . clearFocus ( ) ; m_widgetService . addChangedOrderPath ( getSimplePath ( - 1 ) ) ; if ( isChoiceHandler ( ) ) { addChoiceOption ( reference , choicePath ) ; } else { addComplexChoiceValue ( reference , choicePath ) ; } updateButtonVisisbility ( ) ; CmsUndoRedoHandler handler = CmsUndoRedoHandler . getInstance ( ) ; if ( handler . isIntitalized ( ) ) { handler . addChange ( m_entity . getId ( ) , m_attributeName , reference . getValueIndex ( ) + 1 , ChangeType . choice ) ; }
public class Resource { /** * doesnt exist . Otherwise , the URL will fail on the first access . */ private static boolean resourceExist ( ExternalContext externalContext , String path ) { } }
if ( "/" . equals ( path ) ) { // The root context exists always return true ; } Object ctx = externalContext . getContext ( ) ; if ( ctx instanceof ServletContext ) { ServletContext servletContext = ( ServletContext ) ctx ; InputStream stream = servletContext . getResourceAsStream ( path ) ; if ( stream != null ) { try { stream . close ( ) ; } catch ( IOException e ) { // Ignore here , since we donnot wanted to read from this // resource anyway } return true ; } } return false ;
public class EJBModuleMetaDataImpl { /** * Adds a list of timer method metadata for a bean belonging to this * application . This method will only be called for beans that contain * automatic timers . * @ param timerBean the list of timer method metadata */ public void addAutomaticTimerBean ( AutomaticTimerBean timerBean ) // d604213 { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "addAutomaticTimerBean: " + timerBean . getBeanMetaData ( ) . j2eeName ) ; if ( ivAutomaticTimerBeans == null ) { ivAutomaticTimerBeans = new ArrayList < AutomaticTimerBean > ( ) ; } ivHasNonPersistentAutomaticTimers |= timerBean . getNumNonPersistentTimers ( ) > 0 ; ivHasPersistentAutomaticTimers |= timerBean . getNumPersistentTimers ( ) > 0 ; ivAutomaticTimerBeans . add ( timerBean ) ;
public class CloudRedisClient { /** * Updates the metadata and configuration of a specific Redis instance . * < p > Completed longrunning . Operation will contain the new instance object in the response field . * The returned operation is automatically deleted after a few hours , so there is no need to call * DeleteOperation . * < p > Sample code : * < pre > < code > * try ( CloudRedisClient cloudRedisClient = CloudRedisClient . create ( ) ) { * String pathsElement = " display _ name " ; * String pathsElement2 = " memory _ size _ gb " ; * List & lt ; String & gt ; paths = Arrays . asList ( pathsElement , pathsElement2 ) ; * FieldMask updateMask = FieldMask . newBuilder ( ) * . addAllPaths ( paths ) * . build ( ) ; * String displayName = " UpdatedDisplayName " ; * int memorySizeGb = 4; * Instance instance = Instance . newBuilder ( ) * . setDisplayName ( displayName ) * . setMemorySizeGb ( memorySizeGb ) * . build ( ) ; * UpdateInstanceRequest request = UpdateInstanceRequest . newBuilder ( ) * . setUpdateMask ( updateMask ) * . setInstance ( instance ) * . build ( ) ; * Instance response = cloudRedisClient . updateInstanceAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Instance , Any > updateInstanceAsync ( UpdateInstanceRequest request ) { } }
return updateInstanceOperationCallable ( ) . futureCall ( request ) ;
public class GCPARCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . GCPARC__XCENT : setXCENT ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCPARC__YCENT : setYCENT ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCPARC__MH : setMH ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCPARC__MFR : setMFR ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCPARC__START : setSTART ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCPARC__SWEEP : setSWEEP ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class SqlManagerImpl { /** * Sets the value types . * @ param valueTypes the value types to set . * @ throws IllegalArgumentException if the { @ code valueTypes } is { @ code null } or * an element in the { @ code valueTypes } is { @ code null } */ public void setValueTypes ( List < ValueType < ? > > valueTypes ) { } }
Validate . noNullElements ( valueTypes ) ; this . sqlExecutor . setValueTypes ( valueTypes ) ; this . callExecutor . setValueTypes ( valueTypes ) ;
public class SimonDataGenerator { /** * Stacked stopwatches to test call tree . */ public void addStackedSimons ( ) { } }
Split splitA = SimonManager . getStopwatch ( "Y.A" ) . start ( ) ; Split splitB = SimonManager . getStopwatch ( "Y.B" ) . start ( ) ; addStopwatchSplits ( SimonManager . getStopwatch ( "Y.C" ) , 6 ) ; splitB . stop ( ) ; Split splitD = SimonManager . getStopwatch ( "Y.D" ) . start ( ) ; randomWait ( 100 , 250 ) ; randomWait ( 100 , 250 ) ; splitD . stop ( ) ; splitA . stop ( ) ;
public class SearchHandler { /** * Compares three categories with decreasing priority from the first to the last category . * { @ see developer reference } for further information * @ param setting category to return , if settingsMatch is chosen * @ param group category to return , if groupMatch is chosen * @ param category category to return , if categoryMatch is chosen * @ param settingMatch amount of settings which match * @ param groupMatch amount of groups which match * @ param categoryMatch amount of categories which match * @ return the category with the least amount of matches , taking into account the priority */ public Category compareMatches ( Category setting , Category group , Category category , int settingMatch , int groupMatch , int categoryMatch ) { } }
LOGGER . trace ( String . format ( "compareMatches: settingMatch: %s, groupMatch: %s, " + "categoryMatch: %s" , settingMatch , groupMatch , categoryMatch ) ) ; if ( settingMatch == 0 && groupMatch == 0 && categoryMatch == 0 ) { // if all values are 0 return null ; // if all values are equal to each other } else if ( settingMatch == groupMatch && settingMatch == categoryMatch ) { return setting ; } else if ( settingMatch == 1 ) { return setting ; } else if ( groupMatch == 1 ) { return group ; } else if ( categoryMatch == 1 ) { return category ; } else if ( settingMatch != 0 && groupMatch == 0 && categoryMatch == 0 ) { return setting ; } else if ( settingMatch == 0 && groupMatch != 0 && categoryMatch == 0 ) { return group ; } else if ( settingMatch == 0 && groupMatch == 0 && categoryMatch != 0 ) { return category ; } else if ( settingMatch == 0 ) { // can only be categoryMatch , if it ' s smaller than groupMatch if ( categoryMatch < groupMatch ) { return category ; } else { // from here it can only be groupMatch if settingMatch is 0 return group ; } } else if ( groupMatch == 0 ) { // can only be settingMatch , if it ' s smaller or equal to categoryMatch if ( settingMatch <= categoryMatch ) { return setting ; } else { // from here it can only be categoryMatch return category ; } } else if ( categoryMatch == 0 ) { if ( groupMatch < settingMatch ) { // can only be groupMatch , if it ' s smaller than settingMatch return group ; } else { // from here it can only be settingMatch return setting ; } // from here , no more 0 or 1 values are present = > comparisons can be made safely ! } else if ( settingMatch <= groupMatch && settingMatch <= categoryMatch ) { return setting ; } else if ( groupMatch <= categoryMatch ) { return group ; } return category ;
public class FringeConfiguration { /** * If the first base tileset fringes upon the second , return the * fringe priority of the first base tileset , otherwise return - 1. */ public int fringesOn ( int first , int second ) { } }
FringeRecord f1 = _frecs . get ( first ) ; // we better have a fringe record for the first if ( null != f1 ) { // it had better have some tilesets defined if ( f1 . tilesets . size ( ) > 0 ) { FringeRecord f2 = _frecs . get ( second ) ; // and we only fringe if second doesn ' t exist or has a lower // priority if ( ( null == f2 ) || ( f1 . priority > f2 . priority ) ) { return f1 . priority ; } } } return - 1 ;
public class CompareAssert { /** * { @ inheritDoc } * @ see DiffBuilder # withNamespaceContext ( Map ) */ @ Override public CompareAssert withNamespaceContext ( Map < String , String > prefix2Uri ) { } }
diffBuilder . withNamespaceContext ( prefix2Uri ) ; return this ;
public class LineItemSummary { /** * Gets the deliveryRateType value for this LineItemSummary . * @ return deliveryRateType * The strategy for delivering ads over the course of the line * item ' s duration . This attribute is * optional and defaults to { @ link DeliveryRateType # EVENLY } * or * { @ link DeliveryRateType # FRONTLOADED } depending on * the network ' s configuration . */ public com . google . api . ads . admanager . axis . v201811 . DeliveryRateType getDeliveryRateType ( ) { } }
return deliveryRateType ;
public class FunctionToBlockMutator { /** * Inlines the arguments within the node tree using the given argument map , * replaces " unsafe " names with local aliases . * The aliases for unsafe require new VAR declarations , so this function * can not be used in for direct CALL node replacement as VAR nodes can not be * created there . * @ return The node or its replacement . */ private Node aliasAndInlineArguments ( Node fnTemplateRoot , ImmutableMap < String , Node > argMap , Set < String > namesToAlias ) { } }
if ( namesToAlias == null || namesToAlias . isEmpty ( ) ) { // There are no names to alias , just inline the arguments directly . Node result = FunctionArgumentInjector . inject ( compiler , fnTemplateRoot , null , argMap ) ; checkState ( result == fnTemplateRoot ) ; return result ; } else { // Create local alias of names that can not be safely // used directly . // An arg map that will be updated to contain the // safe aliases . Map < String , Node > newArgMap = new HashMap < > ( argMap ) ; // Declare the alias in the same order as they // are declared . List < Node > newVars = new ArrayList < > ( ) ; // NOTE : argMap is a linked map so we get the parameters in the // order that they were declared . for ( Entry < String , Node > entry : argMap . entrySet ( ) ) { String name = entry . getKey ( ) ; if ( namesToAlias . contains ( name ) ) { if ( name . equals ( THIS_MARKER ) ) { boolean referencesThis = NodeUtil . referencesThis ( fnTemplateRoot ) ; // Update " this " , this is only necessary if " this " is referenced // and the value of " this " is not Token . THIS , or the value of " this " // has side effects . Node value = entry . getValue ( ) ; if ( ! value . isThis ( ) && ( referencesThis || NodeUtil . mayHaveSideEffects ( value , compiler ) ) ) { String newName = getUniqueThisName ( ) ; Node newValue = entry . getValue ( ) . cloneTree ( ) ; Node newNode = NodeUtil . newVarNode ( newName , newValue ) . useSourceInfoIfMissingFromForTree ( newValue ) ; newVars . add ( 0 , newNode ) ; // Remove the parameter from the list to replace . newArgMap . put ( THIS_MARKER , IR . name ( newName ) . srcrefTree ( newValue ) ) ; } } else { Node newValue = entry . getValue ( ) . cloneTree ( ) ; Node newNode = NodeUtil . newVarNode ( name , newValue ) . useSourceInfoIfMissingFromForTree ( newValue ) ; newVars . add ( 0 , newNode ) ; // Remove the parameter from the list to replace . newArgMap . remove ( name ) ; } } } // Inline the arguments . Node result = FunctionArgumentInjector . inject ( compiler , fnTemplateRoot , null , newArgMap ) ; checkState ( result == fnTemplateRoot ) ; // Now that the names have been replaced , add the new aliases for // the old names . for ( Node n : newVars ) { fnTemplateRoot . addChildToFront ( n ) ; } return result ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link X3DMaterialType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link X3DMaterialType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/appearance/1.0" , name = "X3DMaterial" , substitutionHeadNamespace = "http://www.opengis.net/citygml/appearance/1.0" , substitutionHeadName = "_SurfaceData" ) public JAXBElement < X3DMaterialType > createX3DMaterial ( X3DMaterialType value ) { } }
return new JAXBElement < X3DMaterialType > ( _X3DMaterial_QNAME , X3DMaterialType . class , null , value ) ;
public class Datatype_Builder { /** * Adds each element of { @ code elements } to the list to be returned from { @ link * Datatype # getValueTypeAnnotations ( ) } . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code elements } is null or contains a null element */ public Datatype . Builder addAllValueTypeAnnotations ( Spliterator < ? extends Excerpt > elements ) { } }
if ( ( elements . characteristics ( ) & Spliterator . SIZED ) != 0 ) { long elementsSize = elements . estimateSize ( ) ; if ( elementsSize > 0 && elementsSize <= Integer . MAX_VALUE ) { if ( valueTypeAnnotations instanceof ImmutableList ) { valueTypeAnnotations = new ArrayList < > ( valueTypeAnnotations ) ; } ( ( ArrayList < ? > ) valueTypeAnnotations ) . ensureCapacity ( valueTypeAnnotations . size ( ) + ( int ) elementsSize ) ; } } elements . forEachRemaining ( this :: addValueTypeAnnotations ) ; return ( Datatype . Builder ) this ;
public class OMVRBTreeSet { /** * Save the state of the { @ code OTreeSet } instance to a stream ( that is , serialize it ) . * @ serialData Emits the comparator used to order this set , or { @ code null } if it obeys its elements ' natural ordering ( Object ) , * followed by the size of the set ( the number of elements it contains ) ( int ) , followed by all of its elements ( each * an Object ) in order ( as determined by the set ' s Comparator , or by the elements ' natural ordering if the set has no * Comparator ) . */ private void writeObject ( java . io . ObjectOutputStream s ) throws java . io . IOException { } }
// Write out any hidden stuff s . defaultWriteObject ( ) ; // Write out Comparator s . writeObject ( m . comparator ( ) ) ; // Write out size s . writeInt ( m . size ( ) ) ; // Write out all elements in the proper order . for ( Iterator < E > i = m . keySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) s . writeObject ( i . next ( ) ) ;
public class BoxLegalHoldPolicy { /** * Creates a new ongoing Legal Hold Policy . * @ param api the API connection to be used by the resource . * @ param name the name of Legal Hold Policy . * @ param description the description of Legal Hold Policy . * @ return information about the Legal Hold Policy created . */ public static BoxLegalHoldPolicy . Info createOngoing ( BoxAPIConnection api , String name , String description ) { } }
URL url = ALL_LEGAL_HOLD_URL_TEMPLATE . build ( api . getBaseURL ( ) ) ; BoxJSONRequest request = new BoxJSONRequest ( api , url , "POST" ) ; JsonObject requestJSON = new JsonObject ( ) . add ( "policy_name" , name ) . add ( "is_ongoing" , true ) ; if ( description != null ) { requestJSON . add ( "description" , description ) ; } request . setBody ( requestJSON . toString ( ) ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject responseJSON = JsonObject . readFrom ( response . getJSON ( ) ) ; BoxLegalHoldPolicy createdPolicy = new BoxLegalHoldPolicy ( api , responseJSON . get ( "id" ) . asString ( ) ) ; return createdPolicy . new Info ( responseJSON ) ;
public class IdentityStoreHandlerServiceImpl { /** * Returns the partial subject for hashtable login * @ param username * @ param password * @ return the partial subject which can be used for hashtable login if username and password are valid . * @ throws com . ibm . ws . security . authentication . AuthenticationException */ @ Override public Subject createHashtableInSubject ( String username , @ Sensitive String password ) throws AuthenticationException { } }
UsernamePasswordCredential credential = new UsernamePasswordCredential ( username , password ) ; return createHashtableInSubject ( credential ) ;
public class EndpointGroupMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EndpointGroup endpointGroup , ProtocolMarshaller protocolMarshaller ) { } }
if ( endpointGroup == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( endpointGroup . getEndpointGroupArn ( ) , ENDPOINTGROUPARN_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getEndpointGroupRegion ( ) , ENDPOINTGROUPREGION_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getEndpointDescriptions ( ) , ENDPOINTDESCRIPTIONS_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getTrafficDialPercentage ( ) , TRAFFICDIALPERCENTAGE_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getHealthCheckPort ( ) , HEALTHCHECKPORT_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getHealthCheckProtocol ( ) , HEALTHCHECKPROTOCOL_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getHealthCheckPath ( ) , HEALTHCHECKPATH_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getHealthCheckIntervalSeconds ( ) , HEALTHCHECKINTERVALSECONDS_BINDING ) ; protocolMarshaller . marshall ( endpointGroup . getThresholdCount ( ) , THRESHOLDCOUNT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ManagementSecurityAuthorizationTable { /** * Clear the authorization table . */ private void clearAuthorizationTable ( ) { } }
accessIdToRoles . clear ( ) ; userToAccessId . clear ( ) ; groupToAccessId . clear ( ) ; userToRoles . clear ( ) ; groupToRoles . clear ( ) ;
public class ReadStreamOld { /** * Reads a line , returning a string . */ public String readLineNoChop ( ) throws IOException { } }
CharBuffer cb = new CharBuffer ( ) ; if ( readLine ( cb , false ) ) return cb . toString ( ) ; else if ( cb . length ( ) == 0 ) return null ; else return cb . toString ( ) ;
public class ZSocket { /** * { @ inheritDoc } */ @ Override public void close ( ) { } }
if ( isClosed . compareAndSet ( false , true ) ) { ManagedContext . getInstance ( ) . destroy ( socketBase ) ; }
public class JournalSegmentFile { /** * Returns a boolean value indicating whether the given file appears to be a parsable segment file . * @ param journalName the name of the journal * @ param fileName the name of the file to check * @ throws NullPointerException if { @ code file } is null */ public static boolean isSegmentFile ( String journalName , String fileName ) { } }
checkNotNull ( journalName , "journalName cannot be null" ) ; checkNotNull ( fileName , "fileName cannot be null" ) ; int partSeparator = fileName . lastIndexOf ( PART_SEPARATOR ) ; int extensionSeparator = fileName . lastIndexOf ( EXTENSION_SEPARATOR ) ; if ( extensionSeparator == - 1 || partSeparator == - 1 || extensionSeparator < partSeparator || ! fileName . endsWith ( EXTENSION ) ) { return false ; } for ( int i = partSeparator + 1 ; i < extensionSeparator ; i ++ ) { if ( ! Character . isDigit ( fileName . charAt ( i ) ) ) { return false ; } } return fileName . startsWith ( journalName ) ;
public class Plane4d { /** * Replies the pivot point around which the rotation must be done . * @ return a reference on the buffered pivot point . */ @ Override public Point3d getPivot ( ) { } }
Point3d pivot = this . cachedPivot == null ? null : this . cachedPivot . get ( ) ; if ( pivot == null ) { pivot = getProjection ( 0. , 0. , 0. ) ; this . cachedPivot = new WeakReference < > ( pivot ) ; } return pivot ;
public class SourceFileSpecifierMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SourceFileSpecifier sourceFileSpecifier , ProtocolMarshaller protocolMarshaller ) { } }
if ( sourceFileSpecifier == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sourceFileSpecifier . getFilePath ( ) , FILEPATH_BINDING ) ; protocolMarshaller . marshall ( sourceFileSpecifier . getIsMove ( ) , ISMOVE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JAXRSClientFactoryBean { /** * Create a Client instance . Proxies and WebClients are Clients . * @ param varValues optional list of values which will be used to substitute * template variables specified in the class - level JAX - RS Path annotations * @ return the client */ public Client createWithValues ( Object ... varValues ) { } }
serviceFactory . setBus ( getBus ( ) ) ; checkResources ( false ) ; ClassResourceInfo cri = null ; try { Endpoint ep = createEndpoint ( ) ; if ( getServiceClass ( ) != null ) { for ( ClassResourceInfo info : serviceFactory . getClassResourceInfo ( ) ) { if ( info . getServiceClass ( ) . isAssignableFrom ( getServiceClass ( ) ) || getServiceClass ( ) . isAssignableFrom ( info . getServiceClass ( ) ) ) { cri = info ; break ; } } if ( cri == null ) { // can not happen in the reality throw new RuntimeException ( "Service class " + getServiceClass ( ) . getName ( ) + " is not recognized" ) ; } } else { cri = serviceFactory . getClassResourceInfo ( ) . get ( 0 ) ; } boolean isRoot = cri . getURITemplate ( ) != null ; ClientProxyImpl proxyImpl = null ; ClientState actualState = getActualState ( ) ; proxyImpl = createClientProxy ( cri , isRoot , actualState , varValues ) ; initClient ( proxyImpl , ep , actualState == null ) ; final Class < ? > serviceClass = cri . getServiceClass ( ) ; ClassLoader theLoader = AccessController . doPrivileged ( new PrivilegedAction < ClassLoader > ( ) { @ Override public ClassLoader run ( ) { return proxyLoader == null ? serviceClass . getClassLoader ( ) : proxyLoader ; } } ) ; Class < ? > [ ] ifaces = new Class [ ] { Client . class , InvocationHandlerAware . class , cri . getServiceClass ( ) } ; Client actualClient = ( Client ) ProxyHelper . getProxy ( theLoader , ifaces , proxyImpl ) ; proxyImpl . setProxyClient ( actualClient ) ; notifyLifecycleManager ( actualClient ) ; this . getServiceFactory ( ) . sendEvent ( FactoryBeanListener . Event . CLIENT_CREATED , actualClient , ep ) ; return actualClient ; } catch ( IllegalArgumentException ex ) { String message = ex . getLocalizedMessage ( ) ; if ( cri != null ) { String expected = cri . getServiceClass ( ) . getSimpleName ( ) ; if ( ( expected + " is not an interface" ) . equals ( message ) ) { message += "; make sure CGLIB is on the classpath" ; } } LOG . severe ( ex . getClass ( ) . getName ( ) + " : " + message ) ; throw ex ; } catch ( Exception ex ) { LOG . severe ( ex . getClass ( ) . getName ( ) + " : " + ex . getLocalizedMessage ( ) ) ; throw new RuntimeException ( ex ) ; }
public class ByteArrayUtil { /** * Decode long from byte array at offset * @ param ba byte array * @ param offset Offset * @ return long value */ public static long decodeLong ( byte [ ] ba , int offset ) { } }
return ( ( ( ( long ) ba [ offset + 0 ] << 56 ) + ( ( long ) ( ba [ offset + 1 ] & 255 ) << 48 ) + ( ( long ) ( ba [ offset + 2 ] & 255 ) << 40 ) + ( ( long ) ( ba [ offset + 3 ] & 255 ) << 32 ) + ( ( long ) ( ba [ offset + 4 ] & 255 ) << 24 ) + ( ( ba [ offset + 5 ] & 255 ) << 16 ) + ( ( ba [ offset + 6 ] & 255 ) << 8 ) + ( ( ba [ offset + 7 ] & 255 ) << 0 ) ) ) ;
public class JedisRedisClient { /** * { @ inheritDoc } */ @ Override public boolean setIsMember ( String setName , String value ) { } }
Boolean result = redisClient . sismember ( setName , value ) ; return result != null ? result . booleanValue ( ) : false ;
public class MultiFormatter { /** * Add and copy the MultiFormatter . * @ param formatters */ public MultiFormatter add ( Formatter < ? > ... formatters ) { } }
if ( formatter != null ) { MultiFormatter copy = new MultiFormatter ( ) ; copy . formatters . putAll ( this . formatters ) ; copy . setFormatters ( formatters ) ; return copy ; } return this ;
public class GridScreen { /** * Code this position and Anchor to add it to the LayoutManager . * @ param position The location constant ( see ScreenConstants ) . * @ param setNewAnchor The anchor constant . * @ return The new screen location object . */ public ScreenLocation getNextLocation ( short position , short setNewAnchor ) { } }
if ( position == ScreenConstants . NEXT_LOGICAL ) position = ScreenConstants . RIGHT_OF_LAST ; if ( position == ScreenConstants . FIRST_LOCATION ) position = ScreenConstants . FIRST_DISPLAY_LOCATION ; if ( position == ScreenConstants . ADD_SCREEN_VIEW_BUFFER ) position = ScreenConstants . ADD_GRID_SCREEN_BUFFER ; // No buffer around frame ! return super . getNextLocation ( position , setNewAnchor ) ;
public class ToolInstallerDescriptor { /** * Filters { @ link # all ( ) } by eliminating things that are not applicable to the given type . */ public static List < ToolInstallerDescriptor < ? > > for_ ( Class < ? extends ToolInstallation > type ) { } }
List < ToolInstallerDescriptor < ? > > r = new ArrayList < > ( ) ; for ( ToolInstallerDescriptor < ? > d : all ( ) ) if ( d . isApplicable ( type ) ) r . add ( d ) ; return r ;
public class DiskStorageCache { /** * If file cache size is not calculated or if it was calculated * a long time ago ( FILECACHE _ SIZE _ UPDATE _ PERIOD _ MS ) recalculated from file listing . * @ return true if it was recalculated , false otherwise . */ @ GuardedBy ( "mLock" ) private boolean maybeUpdateFileCacheSize ( ) { } }
long now = mClock . now ( ) ; if ( ( ! mCacheStats . isInitialized ( ) ) || mCacheSizeLastUpdateTime == UNINITIALIZED || ( now - mCacheSizeLastUpdateTime ) > FILECACHE_SIZE_UPDATE_PERIOD_MS ) { return maybeUpdateFileCacheSizeAndIndex ( ) ; } return false ;
public class FormatUtils { /** * Parses a list of { @ code Objects } into a { @ code String } . * @ param objs a list of Objects to convert to a String * @ return comma - separated concatenation of the string representation returned by Object # toString * of the individual objects */ public static String parametersToString ( Object ... objs ) { } }
StringBuilder sb = new StringBuilder ( "(" ) ; if ( objs != null ) { for ( int k = 0 ; k < objs . length ; k ++ ) { if ( k != 0 ) { sb . append ( ", " ) ; } if ( objs [ k ] == null ) { sb . append ( "null" ) ; } else { sb . append ( objs [ k ] . toString ( ) ) ; } } } sb . append ( ")" ) ; return sb . toString ( ) ;
public class FnDouble { /** * It divides the target element by the given divisor and returns the * remainder ( target % divisor ) . The remainder precision and { @ link RoundingMode } * is specified by the given { @ link MathContext } * @ param divisor the divisor * @ param mathContext the { @ link MathContext } to define { @ link RoundingMode } and precision * @ return the remainder of target / divisor */ public final static Function < Double , Double > remainder ( Number divisor , MathContext mathContext ) { } }
return new Remainder ( fromNumber ( divisor ) , mathContext ) ;
public class SerializedRelay { /** * Loops until all notifications in the queue has been processed . */ private void emitLoop ( ) { } }
for ( ; ; ) { AppendOnlyLinkedArrayList < T > q ; synchronized ( this ) { q = queue ; if ( q == null ) { emitting = false ; return ; } queue = null ; } q . accept ( actual ) ; }
public class TypefaceHelper { /** * Set the typeface to the all text views belong to the activity . * Note that we use decor view of the activity so that the typeface will also be applied to action bar . * @ param activity the activity . * @ param strResId string resource containing typeface name . * @ param style the typeface style . */ public void setTypeface ( Activity activity , @ StringRes int strResId , int style ) { } }
setTypeface ( activity , mApplication . getString ( strResId ) , style ) ;
public class VdmBreakpointPropertyPage { /** * Creates a fully configured label with the given text . * @ param parent * the parent composite * @ param text * the test of the returned label * @ return a fully configured label */ protected Label createLabel ( Composite parent , String text ) { } }
return SWTFactory . createLabel ( parent , text , 1 ) ;
public class BookmarkDatastore { /** * TODO geopgrahpic bounding box ? */ public List < Marker > getBookmarksAsMarkers ( MapView view ) { } }
List < Marker > markers = new ArrayList < > ( ) ; try { // TODO order by title final Cursor cur = mDatabase . rawQuery ( "SELECT * FROM " + TABLE , null ) ; while ( cur . moveToNext ( ) ) { Marker m = new Marker ( view ) ; m . setId ( cur . getString ( cur . getColumnIndex ( COLUMN_ID ) ) ) ; m . setTitle ( cur . getString ( cur . getColumnIndex ( COLUMN_TITLE ) ) ) ; m . setSubDescription ( cur . getString ( cur . getColumnIndex ( COLUMN_DESC ) ) ) ; m . setPosition ( new GeoPoint ( cur . getDouble ( cur . getColumnIndex ( COLUMN_LAT ) ) , cur . getDouble ( cur . getColumnIndex ( COLUMN_LON ) ) ) ) ; m . setSnippet ( m . getPosition ( ) . toDoubleString ( ) ) ; markers . add ( m ) ; } cur . close ( ) ; } catch ( final Exception e ) { Log . w ( IMapView . LOGTAG , "Error getting tile sources: " , e ) ; } return markers ;
public class GetAccountAuthorizationDetailsResult { /** * A list containing information about IAM users . * @ return A list containing information about IAM users . */ public java . util . List < UserDetail > getUserDetailList ( ) { } }
if ( userDetailList == null ) { userDetailList = new com . amazonaws . internal . SdkInternalList < UserDetail > ( ) ; } return userDetailList ;
public class SARLValidator { /** * Check the types of the parameters of the " fires " statement . * @ param action the signature that contains the " fires " statement . */ @ Check ( CheckType . FAST ) public void checkActionFires ( SarlAction action ) { } }
for ( final JvmTypeReference event : action . getFiredEvents ( ) ) { final LightweightTypeReference ref = toLightweightTypeReference ( event ) ; if ( ref != null && ! this . inheritanceHelper . isSarlEvent ( ref ) ) { error ( MessageFormat . format ( Messages . SARLValidator_57 , event . getQualifiedName ( ) , Messages . SARLValidator_62 , this . grammarAccess . getFiresKeyword ( ) ) , event , null , ValidationMessageAcceptor . INSIGNIFICANT_INDEX , INVALID_FIRING_EVENT_TYPE , event . getSimpleName ( ) ) ; } }
public class XMLChecker { /** * Determines if the specified string matches the < em > PubidLiteral < / em > production . * See : < a href = " http : / / www . w3 . org / TR / REC - xml # NT - PubidLiteral " > Definition of PubidLiteral < / a > . * @ param s the character string to check , cannot be < code > null < / code > . * @ return < code > true < / code > if the { @ link String } matches the production , or < code > false < / code > * otherwise . * @ throws NullPointerException if < code > s = = null < / code > . */ public static final boolean isPubidLiteral ( String s ) throws NullPointerException { } }
try { checkPubidLiteral ( s ) ; return true ; } catch ( InvalidXMLException exception ) { return false ; }
public class CassandraSchemaManager { /** * Validate and append column name . * @ param translator * the translator * @ param queryBuilder * the query builder * @ param b * the b * @ param clazz * the clazz */ private void validateAndAppendColumnName ( CQLTranslator translator , StringBuilder queryBuilder , String b , Class < ? > clazz ) { } }
String dataType = CassandraValidationClassMapper . getValidationClass ( clazz , true ) ; translator . appendColumnName ( queryBuilder , b , translator . getCQLType ( dataType ) ) ; queryBuilder . append ( Constants . SPACE_COMMA ) ;
public class EnglishGrammaticalStructure { /** * Collapse multiword preposition of the following format : * prep | advmod | dep | amod ( gov , mwp0 ) dep ( mpw0 , mwp1 ) pobj | pcomp ( mwp1 , compl ) or * pobj | pcomp ( mwp0 , compl ) - > prep _ mwp0 _ mwp1 ( gov , compl ) * @ param list * List of typedDependencies to work on , * @ param newTypedDeps * List of typedDependencies that we construct * @ param str _ mwp0 * First part of the multiword preposition to construct the collapsed * preposition * @ param str _ mwp1 * Second part of the multiword preposition to construct the * collapsed preposition * @ param w _ mwp0 * First part of the multiword preposition that we look for * @ param w _ mwp1 * Second part of the multiword preposition that we look for */ private static void collapseMultiWordPrep ( Collection < TypedDependency > list , Collection < TypedDependency > newTypedDeps , String str_mwp0 , String str_mwp1 , String w_mwp0 , String w_mwp1 ) { } }
// first find the multiword _ preposition : dep ( mpw [ 0 ] , mwp [ 1 ] ) // the two words should be next to another in the sentence ( difference of // indexes = 1) TreeGraphNode mwp0 = null ; TreeGraphNode mwp1 = null ; TypedDependency dep = null ; for ( TypedDependency td : list ) { if ( td . gov ( ) . value ( ) . equalsIgnoreCase ( w_mwp0 ) && td . dep ( ) . value ( ) . equalsIgnoreCase ( w_mwp1 ) && Math . abs ( td . gov ( ) . index ( ) - td . dep ( ) . index ( ) ) == 1 ) { mwp0 = td . gov ( ) ; mwp1 = td . dep ( ) ; dep = td ; } } // now search for prep | advmod | dep | amod ( gov , mwp0) TreeGraphNode governor = null ; TypedDependency prep = null ; for ( TypedDependency td1 : list ) { if ( td1 . dep ( ) == mwp0 && ( td1 . reln ( ) == PREPOSITIONAL_MODIFIER || td1 . reln ( ) == ADVERBIAL_MODIFIER || td1 . reln ( ) == ADJECTIVAL_MODIFIER || td1 . reln ( ) == DEPENDENT || td1 . reln ( ) == MULTI_WORD_EXPRESSION ) ) { // we found prep | advmod | dep | amod ( gov , mwp0) prep = td1 ; governor = prep . gov ( ) ; } } // search for the complement : pobj | pcomp ( mwp1 , X ) // or for pobj | pcomp ( mwp0 , X ) // There may be more than one in weird constructions ; if there are several , // take the one with the LOWEST index ! TypedDependency pobj = null ; TypedDependency newtd = null ; for ( TypedDependency td2 : list ) { if ( ( td2 . gov ( ) == mwp1 || td2 . gov ( ) == mwp0 ) && ( td2 . reln ( ) == PREPOSITIONAL_OBJECT || td2 . reln ( ) == PREPOSITIONAL_COMPLEMENT ) ) { if ( pobj == null || pobj . dep ( ) . index ( ) > td2 . dep ( ) . index ( ) ) { pobj = td2 ; // create the new gr relation GrammaticalRelation gr ; if ( td2 . reln ( ) == PREPOSITIONAL_COMPLEMENT ) { gr = EnglishGrammaticalRelations . getPrepC ( str_mwp0 + '_' + str_mwp1 ) ; } else { gr = EnglishGrammaticalRelations . getPrep ( str_mwp0 + '_' + str_mwp1 ) ; } if ( governor != null ) { newtd = new TypedDependency ( gr , governor , pobj . dep ( ) ) ; } } } } // only if we found the three parts , set to KILL and remove // and add the new one if ( prep != null && dep != null && pobj != null && newtd != null ) { if ( DEBUG ) { System . err . println ( "Removing " + prep + ", " + dep + ", and " + pobj ) ; System . err . println ( " and adding " + newtd ) ; } prep . setReln ( KILL ) ; dep . setReln ( KILL ) ; pobj . setReln ( KILL ) ; newTypedDeps . add ( newtd ) ; // now remove typed dependencies with reln " kill " // and promote possible orphans for ( TypedDependency td1 : list ) { if ( td1 . reln ( ) != KILL ) { if ( td1 . gov ( ) == mwp0 || td1 . gov ( ) == mwp1 ) { // CDM : Thought of adding this in Jan 2010 , but it causes // conflicting relations tmod vs . pobj . Needs more thought // maybe restrict pobj to first NP in PP , and allow tmod for a later // one ? if ( td1 . reln ( ) == TEMPORAL_MODIFIER ) { // special case when an extra NP - TMP is buried in a PP for // " during the same period last year " td1 . setGov ( pobj . dep ( ) ) ; } else { td1 . setGov ( governor ) ; } } if ( ! newTypedDeps . contains ( td1 ) ) { newTypedDeps . add ( td1 ) ; } } } list . clear ( ) ; list . addAll ( newTypedDeps ) ; }
public class DB { /** * Selects a page of posts with associated terms and a set of types . * @ param types The post types . May be { @ code null } or empty fo any type . * @ param status The required post status . * @ param terms A collection of terms attached to the posts . * @ param sort The page sort . * @ param paging The page range and interval . * @ return The list of posts . * @ throws SQLException on database error . */ public List < Long > selectPostIds ( final EnumSet < Post . Type > types , final Post . Status status , final Collection < TaxonomyTerm > terms , final Post . Sort sort , final Paging paging ) throws SQLException { } }
if ( paging . limit < 1 || paging . start < 0 ) { return ImmutableList . of ( ) ; } List < Long > ids = Lists . newArrayListWithExpectedSize ( paging . limit < 1024 ? paging . limit : 1024 ) ; StringBuilder sql = new StringBuilder ( "SELECT ID FROM " ) ; sql . append ( postsTableName ) ; if ( terms != null && terms . size ( ) > 0 ) { sql . append ( "," ) . append ( termRelationshipsTableName ) ; sql . append ( " WHERE post_status=? AND object_id=ID AND " ) ; if ( terms . size ( ) == 1 ) { sql . append ( "term_taxonomy_id=" ) . append ( terms . iterator ( ) . next ( ) . id ) ; } else { sql . append ( "term_taxonomy_id IN (" ) ; Iterator < TaxonomyTerm > iter = terms . iterator ( ) ; sql . append ( iter . next ( ) . id ) ; while ( iter . hasNext ( ) ) { sql . append ( "," ) . append ( iter . next ( ) . id ) ; } sql . append ( ")" ) ; } } else { sql . append ( " WHERE post_status=?" ) ; } appendPostTypes ( types , sql ) ; appendPagingSortSQL ( sql , sort , paging ) ; Connection conn = null ; PreparedStatement stmt = null ; ResultSet rs = null ; Timer . Context ctx = metrics . selectPostIdsTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( sql . toString ( ) ) ; stmt . setString ( 1 , status . toString ( ) . toLowerCase ( ) ) ; if ( paging . interval != null ) { stmt . setTimestamp ( 2 , new Timestamp ( paging . interval . getStartMillis ( ) ) ) ; stmt . setTimestamp ( 3 , new Timestamp ( paging . interval . getEndMillis ( ) ) ) ; stmt . setInt ( 4 , paging . start ) ; stmt . setInt ( 5 , paging . limit ) ; } else { stmt . setInt ( 2 , paging . start ) ; stmt . setInt ( 3 , paging . limit ) ; } rs = stmt . executeQuery ( ) ; while ( rs . next ( ) ) { ids . add ( rs . getLong ( 1 ) ) ; } } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt , rs ) ; } return ids ;
public class LocalDateAndTimeStampImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__STAMP_TYPE : return getStampType ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__THUN_YEAR : return getTHunYear ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__TEN_YEAR : return getTenYear ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__DAY : return getDay ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__HOUR : return getHour ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__MINUTE : return getMinute ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__SECOND : return getSecond ( ) ; case AfplibPackage . LOCAL_DATE_AND_TIME_STAMP__HUND_SEC : return getHundSec ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class InjectionConfiguration { /** * TODO extract */ public static Class < ? extends MockProvider > lookupMockProviderClass ( final String mockProviderClassName ) { } }
try { if ( mockProviderClassName != null ) { return ReflectionUtil . lookupClass ( MockProvider . class , mockProviderClassName ) ; } } catch ( final Exception e ) { throw new RuntimeException ( "could not load mock provider class: '" + mockProviderClassName + "'" , e ) ; } throw new RuntimeException ( "no mock provider configured" ) ;
public class WebSocketNode { /** * 广播消息 , 给所有人发消息 * @ param message 消息内容 * @ return 为0表示成功 , 其他值表示部分发送异常 */ @ Local public final CompletableFuture < Integer > broadcastMessage ( final Object message ) { } }
return broadcastMessage ( ( Convert ) null , message , true ) ;
public class WarCdiFactory { /** * Initializes the ES storage ( if required ) . * @ param config * @ param esStorage */ private static EsStorage initEsStorage ( WarApiManagerConfig config , EsStorage esStorage ) { } }
if ( sESStorage == null ) { sESStorage = esStorage ; sESStorage . setIndexName ( config . getStorageESIndexName ( ) ) ; if ( config . isInitializeStorageES ( ) ) { sESStorage . initialize ( ) ; } } return sESStorage ;
public class StatefulKnowledgeSessionImpl { /** * Retrieve the < code > JoinMemory < / code > for a particular * < code > JoinNode < / code > . * @ param node * The < code > JoinNode < / code > key . * @ return The node ' s memory . */ public < T extends Memory > T getNodeMemory ( MemoryFactory < T > node ) { } }
return nodeMemories . getNodeMemory ( node , this ) ;
public class ExceptionUtils { /** * Formatea una excepcion y las excepciones padre . * @ param exception * excepcion * @ return el fomato requerido */ public static String formatException ( Throwable exception ) { } }
StringBuilder buffer = new StringBuilder ( ) ; formatException ( exception , buffer ) ; return buffer . toString ( ) ;
public class XmlValidatorManager { /** * Instances of the validators are created here rather than in the constructors . This is because * some validators might need to maintain project - specific states . By instantiating the validators * here , it ensures that the validator objects are project - specific , rather than global . * { @ inheritDoc } * @ see azkaban . project . validator . ValidatorManager # loadValidators ( azkaban . utils . Props , * org . apache . log4j . Logger ) */ @ Override public void loadValidators ( final Props props , final Logger log ) { } }
this . validators = new LinkedHashMap < > ( ) ; if ( ! props . containsKey ( ValidatorConfigs . XML_FILE_PARAM ) ) { logger . warn ( "Azkaban properties file does not contain the key " + ValidatorConfigs . XML_FILE_PARAM ) ; return ; } final String xmlPath = props . get ( ValidatorConfigs . XML_FILE_PARAM ) ; final File file = new File ( xmlPath ) ; if ( ! file . exists ( ) ) { logger . error ( "Azkaban validator configuration file " + xmlPath + " does not exist." ) ; return ; } // Creating the document builder to parse xml . final DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder builder = null ; try { builder = docBuilderFactory . newDocumentBuilder ( ) ; } catch ( final ParserConfigurationException e ) { throw new ValidatorManagerException ( "Exception while parsing validator xml. Document builder not created." , e ) ; } Document doc = null ; try { doc = builder . parse ( file ) ; } catch ( final SAXException e ) { throw new ValidatorManagerException ( "Exception while parsing " + xmlPath + ". Invalid XML." , e ) ; } catch ( final IOException e ) { throw new ValidatorManagerException ( "Exception while parsing " + xmlPath + ". Error reading file." , e ) ; } final NodeList tagList = doc . getChildNodes ( ) ; final Node azkabanValidators = tagList . item ( 0 ) ; final NodeList azkabanValidatorsList = azkabanValidators . getChildNodes ( ) ; for ( int i = 0 ; i < azkabanValidatorsList . getLength ( ) ; ++ i ) { final Node node = azkabanValidatorsList . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { if ( node . getNodeName ( ) . equals ( VALIDATOR_TAG ) ) { parseValidatorTag ( node , props , log ) ; } } }
public class BeanId { /** * Returns a BeanId from its serialized ( byteArray ) version . A more * performance friendly replacement for the serialization code used in * deserialize . < p > * Note : the passed ByteArray will become part of the state of the * returned BeanId , so should not be modified after calling this * method . < p > * @ param byteArray contains byte array of serialized BeanId to be * deserialized . * @ param container EJB Container in which the home for this BeanId lives . * @ return deserialized BeanId . * @ exception IOException if a problem occurs deserializing the BeanID * @ exception ClassNotFoundException if a class in the serialized * BeanId cannot be located by the class loader . */ public static BeanId getBeanId ( ByteArray byteArray , // d140003.12 EJSContainer container ) throws IOException , ClassNotFoundException { } }
BeanId id = null ; int pkeyIndex ; Serializable pkey = null ; byte [ ] j2eeNameBytes ; boolean isHome = false ; J2EEName j2eeName = null ; byte [ ] bytes = byteArray . getBytes ( ) ; // d140003.12 // Match up the header with the new format . If it does not match // then we have an incoming type 1 BeanId . for ( int i = 0 ; i < HEADER_LEN ; i ++ ) { if ( bytes [ i ] != header [ i ] ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Header mismatch, bytes do not represent a BeanId." ) ; // Should be rare that we get an old BeanId . throw new IOException ( "Invalid BeanId header '" + new String ( bytes ) + "', bytes not a BeanId" ) ; } } // Make a determination on the type of the bean from the type byte byte typeId = bytes [ HEADER_LEN ] ; // Determine if the bean module is version capable F54184 boolean isVersionCapable = ( ( typeId & MODULE_VERSION_CAPABLE ) != 0 ) ; switch ( typeId & ~ MODULE_VERSION_CAPABLE ) { // If it is a home bean , set the isHome flag . We then have // to read the J2EEName and the primary key case HOME_BEAN : isHome = true ; // Fall through // Bean types that contain state must read the J2EEName and primary key case STATEFUL_BEAN : case STATEFUL_BEAN + USES_BEAN_MANAGED_TX : case ENTITY_BEAN : j2eeNameBytes = readJ2EENameBytes ( bytes ) ; j2eeName = container . getJ2EENameFactory ( ) . create ( j2eeNameBytes ) ; if ( ! isHome && isVersionCapable ) { j2eeName = EJSContainer . homeOfHomes . getVersionedJ2EEName ( j2eeName ) ; } pkeyIndex = HEADER_LEN + BEAN_TYPE_LEN + J2EE_NAME_LEN + j2eeNameBytes . length ; pkey = readPKey ( bytes , pkeyIndex , j2eeName ) ; break ; // Simplest case : A stateless bean . Read the Java EE name // and we are done // MessageDriven are just like Stateless . . . . d176974 case SINGLETON_BEAN : case SINGLETON_BEAN + USES_BEAN_MANAGED_TX : case STATELESS_BEAN : case STATELESS_BEAN + USES_BEAN_MANAGED_TX : case MESSAGEDRIVEN_BEAN : case MESSAGEDRIVEN_BEAN + USES_BEAN_MANAGED_TX : j2eeNameBytes = readJ2EENameBytes ( bytes ) ; j2eeName = container . getJ2EENameFactory ( ) . create ( j2eeNameBytes ) ; if ( isVersionCapable ) { j2eeName = EJSContainer . homeOfHomes . getVersionedJ2EEName ( j2eeName ) ; } // pkey = null ; break ; default : // Nothing can be done . . . . this is not a type 1 BeanId , so either // the stream is corrupt , or a new bean type has been added , // but not added to the list above . LI2281-3 if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "Unable to parse bean id: unsupported EJB type: " + bytes [ HEADER_LEN ] ) ; throw new IOException ( "Unsupported EJB Type: " + bytes [ HEADER_LEN ] ) ; } // Now , lookup the home based on the J2EEName read above . HomeInternal home = container . getHomeOfHomes ( ) . getHome ( j2eeName ) ; // If the home was not found , then the application has either not // been installed or started , or possibly failed to start . // Log a meaningful warning , and throw a meaningful exception . LI2281-3 if ( home == null ) { Tr . warning ( tc , "HOME_NOT_FOUND_CNTR0092W" , j2eeName . toString ( ) ) ; throw new EJBNotFoundException ( j2eeName . toString ( ) ) ; } // If we are dealing with a stateful bean , then the pkey has to // be converted back into it ' s CSI implementation before it can // be used . if ( home . isStatefulSessionHome ( ) ) { pkey = EJSContainer . sessionKeyFactory . create ( ( byte [ ] ) pkey ) ; } // For homes , convert the byte array to a J2EEName object if ( isHome ) { if ( home != EJSContainer . homeOfHomes && // d621921 home != EJSContainer . homeOfHomes . ivEJBFactoryHome ) // d639148 { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "invalid home bean type id for " + j2eeName , byteArray ) ; throw new IOException ( "Invalid home bean type id" ) ; } pkey = container . getJ2EENameFactory ( ) . create ( ( byte [ ] ) pkey ) ; if ( isVersionCapable ) { pkey = EJSContainer . homeOfHomes . getVersionedJ2EEName ( ( J2EEName ) pkey ) ; } } else { if ( getBeanType ( home . getBeanMetaData ( j2eeName ) , false ) != typeId ) // d621921 { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "invalid bean type id for " + j2eeName , byteArray ) ; throw new IOException ( "Invalid bean type id" ) ; } } // Create the BeanId from the deserialized pieces . . . . note that // this includes calculating the hash value . id = new BeanId ( home , pkey , isHome ) ; // Set the byteArray used to create this BeanId in the BeanId // itself , so it won ' t have to be created later . Note that the // caller must not modify the passed byteArray after this . d140003.12 id . byteArray = byteArray ; return id ;
public class DbPro { /** * Execute sql query and return the first result . I recommend add " limit 1 " in your sql . * @ param sql an SQL statement that may contain one or more ' ? ' IN parameter placeholders * @ param paras the parameters of sql * @ return Object [ ] if your sql has select more than one column , * and it return Object if your sql has select only one column . */ public < T > T queryFirst ( String sql , Object ... paras ) { } }
List < T > result = query ( sql , paras ) ; return ( result . size ( ) > 0 ? result . get ( 0 ) : null ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getMCF1 ( ) { } }
if ( mcf1EClass == null ) { mcf1EClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 292 ) ; } return mcf1EClass ;
public class SchedulerClientFactory { /** * Implementation of getSchedulerClient - Used to create objects * Currently it creates either HttpServiceSchedulerClient or LibrarySchedulerClient * @ return getSchedulerClient created . return null if failed to create ISchedulerClient instance */ public ISchedulerClient getSchedulerClient ( ) throws SchedulerException { } }
LOG . fine ( "Creating scheduler client" ) ; ISchedulerClient schedulerClient ; if ( Context . schedulerService ( config ) ) { // get the instance of the state manager SchedulerStateManagerAdaptor statemgr = Runtime . schedulerStateManagerAdaptor ( runtime ) ; Scheduler . SchedulerLocation schedulerLocation = statemgr . getSchedulerLocation ( Runtime . topologyName ( runtime ) ) ; if ( schedulerLocation == null ) { throw new SchedulerException ( "Failed to get scheduler location from state manager" ) ; } LOG . log ( Level . FINE , "Scheduler is listening on location: {0} " , schedulerLocation . toString ( ) ) ; schedulerClient = new HttpServiceSchedulerClient ( config , runtime , schedulerLocation . getHttpEndpoint ( ) ) ; } else { // create an instance of scheduler final IScheduler scheduler = LauncherUtils . getInstance ( ) . getSchedulerInstance ( config , runtime ) ; LOG . fine ( "Invoke scheduler as a library" ) ; schedulerClient = new LibrarySchedulerClient ( config , runtime , scheduler ) ; } return schedulerClient ;
public class JShellTool { /** * Print ( fluff ) using resource bundle look - up , MessageFormat , and add * prefix and postfix * @ param key the resource key * @ param args */ @ Override public void fluffmsg ( String key , Object ... args ) { } }
if ( showFluff ( ) ) { hardmsg ( key , args ) ; }
public class BaseDestinationHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # removeConsumerPointMatchTarget ( com . ibm . ws . sib . processor . impl . ConsumerKey ) */ @ Override public void removeConsumerPointMatchTarget ( DispatchableKey consumerPointData ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removeConsumerPointMatchTarget" , consumerPointData ) ; // Remove the consumer point from the matchspace // This used to use the ptoPRealization but this is no longer the case . messageProcessor . getMessageProcessorMatching ( ) . removeConsumerPointMatchTarget ( consumerPointData ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeConsumerPointMatchTarget" ) ;
public class ZmqEventConsumer { private void callEventSubscriptionAndConnect ( DeviceProxy device , String eventType ) throws DevFailed { } }
// Done for IDL > = 5 and not for notifd event system ( no attribute name ) String device_name = device . name ( ) ; String [ ] info = new String [ ] { device_name , "" , "subscribe" , eventType , Integer . toString ( device . get_idl_version ( ) ) } ; DeviceData argIn = new DeviceData ( ) ; argIn . insert ( info ) ; String cmdName = getEventSubscriptionCommandName ( ) ; ApiUtil . printTrace ( device . get_adm_dev ( ) . name ( ) + ".command_inout(\"" + cmdName + "\") for " + device_name + eventType ) ; DeviceData argOut = device . get_adm_dev ( ) . command_inout ( cmdName , argIn ) ; ApiUtil . printTrace ( " command_inout done." ) ; // And then connect to device checkDeviceConnection ( device , null , argOut , eventType ) ;
public class LiveOutputsInner { /** * Create Live Output . * Creates a Live Output . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ param liveEventName The name of the Live Event . * @ param liveOutputName The name of the Live Output . * @ param parameters Live Output properties needed for creation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ApiErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the LiveOutputInner object if successful . */ public LiveOutputInner beginCreate ( String resourceGroupName , String accountName , String liveEventName , String liveOutputName , LiveOutputInner parameters ) { } }
return beginCreateWithServiceResponseAsync ( resourceGroupName , accountName , liveEventName , liveOutputName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CPRulePersistenceImpl { /** * Returns a range of all the cp rules . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPRuleModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp rules * @ param end the upper bound of the range of cp rules ( not inclusive ) * @ return the range of cp rules */ @ Override public List < CPRule > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;
public class MetaObjectCompiler { /** * Returns the bid of the processor for the file . * @ param inputFile * filename of input file * @ return bid for the file , 0 indicates no interest , 1 indicates that the * processor recognizes the file but doesn ' t process it ( header * files , for example ) , 100 indicates strong interest */ @ Override public int bid ( final String inputFile ) { } }
// get base bid final int baseBid = super . bid ( inputFile ) ; // if the base bid was non - zero ( . h or . cpp extension ) if ( baseBid > 0 ) { // scan the file for Q _ OBJECT // skip file if not present try { final Reader reader = new BufferedReader ( new FileReader ( inputFile ) ) ; final boolean hasQObject = MetaObjectParser . hasQObject ( reader ) ; reader . close ( ) ; if ( hasQObject ) { return baseBid ; } } catch ( final IOException ex ) { return 0 ; } } return 0 ;
public class RuntimeUtil { /** * This method guarantees that garbage collection is * done unlike < code > { @ link System # gc ( ) } < / code > */ public static void gc ( ) { } }
Object obj = new Object ( ) ; WeakReference ref = new WeakReference < Object > ( obj ) ; obj = null ; while ( ref . get ( ) != null ) System . gc ( ) ;
public class ErrorReporter { /** * Returns true iff errors have occurred since { @ code checkpoint } was obtained from { @ link * # checkpoint } . * < p > Useful for callers whose outputs are dependent on whether some code path resulted in new * errors ( for example , returning an error node if parsing encountered errors ) . */ public final boolean errorsSince ( Checkpoint checkpoint ) { } }
Checkpoint impl = checkpoint ; if ( impl . owner != this ) { throw new IllegalArgumentException ( "Can only call errorsSince on a Checkpoint instance that was returned from this same " + "reporter" ) ; } return getCurrentNumberOfErrors ( ) > impl . errorsSoFar ;
public class OjbFieldManager { /** * retrieve the value of attribute [ fieldNum ] from the object . * @ return Object the value of attribute [ fieldNum ] */ Object getValue ( int fieldNum ) { } }
String attributeName = getAttributeName ( fieldNum ) ; ClassDescriptor cld = broker . getClassDescriptor ( pc . getClass ( ) ) ; // field could be a primitive typed attribute . . . AttributeDescriptorBase fld = cld . getFieldDescriptorByName ( attributeName ) ; // field could be a reference attribute . . . if ( fld == null ) { fld = cld . getObjectReferenceDescriptorByName ( attributeName ) ; } // or it could be a collection attribute : if ( fld == null ) { fld = cld . getCollectionDescriptorByName ( attributeName ) ; } Object value = fld . getPersistentField ( ) . get ( pc ) ; return value ;
public class JBBPSafeInstantiator { /** * Find a constructor for a static class . * @ param klazz a class to find a constructor , must not be null * @ return found constructor to be used to make an instance */ private static Constructor < ? > findConstructorForStaticClass ( final Class < ? > klazz ) { } }
final Constructor < ? > [ ] constructors = klazz . getDeclaredConstructors ( ) ; if ( constructors . length == 1 ) { return constructors [ 0 ] ; } for ( final Constructor < ? > c : constructors ) { final Class < ? > [ ] params = c . getParameterTypes ( ) ; if ( params . length == 0 ) { return c ; } } return constructors [ 0 ] ;
public class OWLClassAssertionAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the * { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } . * @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the * object ' s content to * @ param instance the object instance to serialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the serialization operation is not * successful */ @ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLClassAssertionAxiomImpl instance ) throws SerializationException { } }
serialize ( streamWriter , instance ) ;
public class UnknownExtensionTypeException { /** * Creates a new { @ link UnknownExtensionTypeException } for the specified type * @ throws IllegalArgumentException * If the type is not specified */ static < T extends Assignable > UnknownExtensionTypeException newInstance ( final Class < T > type ) throws IllegalArgumentException { } }
// Precondition checks if ( type == null ) { throw new IllegalArgumentException ( "type must be specified" ) ; } // Create new return new UnknownExtensionTypeException ( type ) ;
public class HttpResponseConduit { /** * Handles writing out the header data . It can also take a byte buffer of user * data , to enable both user data and headers to be written out in a single operation , * which has a noticeable performance impact . * It is up to the caller to note the current position of this buffer before and after they * call this method , and use this to figure out how many bytes ( if any ) have been written . * @ param state * @ param userData * @ return * @ throws IOException */ private int processWrite ( int state , final Object userData , int pos , int length ) throws IOException { } }
if ( done || exchange == null ) { throw new ClosedChannelException ( ) ; } try { assert state != STATE_BODY ; if ( state == STATE_BUF_FLUSH ) { final ByteBuffer byteBuffer = pooledBuffer . getBuffer ( ) ; do { long res = 0 ; ByteBuffer [ ] data ; if ( userData == null || length == 0 ) { res = next . write ( byteBuffer ) ; } else if ( userData instanceof ByteBuffer ) { data = writevBuffer ; if ( data == null ) { data = writevBuffer = new ByteBuffer [ 2 ] ; } data [ 0 ] = byteBuffer ; data [ 1 ] = ( ByteBuffer ) userData ; res = next . write ( data , 0 , 2 ) ; } else { data = writevBuffer ; if ( data == null || data . length < length + 1 ) { data = writevBuffer = new ByteBuffer [ length + 1 ] ; } data [ 0 ] = byteBuffer ; System . arraycopy ( userData , pos , data , 1 , length ) ; res = next . write ( data , 0 , length + 1 ) ; } if ( res == 0 ) { return STATE_BUF_FLUSH ; } } while ( byteBuffer . hasRemaining ( ) ) ; bufferDone ( ) ; return STATE_BODY ; } else if ( state != STATE_START ) { return processStatefulWrite ( state , userData , pos , length ) ; } // merge the cookies into the header map Connectors . flattenCookies ( exchange ) ; if ( pooledBuffer == null ) { pooledBuffer = pool . allocate ( ) ; } ByteBuffer buffer = pooledBuffer . getBuffer ( ) ; assert buffer . remaining ( ) >= 50 ; Protocols . HTTP_1_1 . appendTo ( buffer ) ; buffer . put ( ( byte ) ' ' ) ; int code = exchange . getStatusCode ( ) ; assert 999 >= code && code >= 100 ; buffer . put ( ( byte ) ( code / 100 + '0' ) ) ; buffer . put ( ( byte ) ( code / 10 % 10 + '0' ) ) ; buffer . put ( ( byte ) ( code % 10 + '0' ) ) ; buffer . put ( ( byte ) ' ' ) ; String string = exchange . getReasonPhrase ( ) ; if ( string == null ) { string = StatusCodes . getReason ( code ) ; } if ( string . length ( ) > buffer . remaining ( ) ) { pooledBuffer . close ( ) ; pooledBuffer = null ; truncateWrites ( ) ; throw UndertowMessages . MESSAGES . reasonPhraseToLargeForBuffer ( string ) ; } writeString ( buffer , string ) ; buffer . put ( ( byte ) '\r' ) . put ( ( byte ) '\n' ) ; int remaining = buffer . remaining ( ) ; HeaderMap headers = exchange . getResponseHeaders ( ) ; long fiCookie = headers . fastIterateNonEmpty ( ) ; while ( fiCookie != - 1 ) { HeaderValues headerValues = headers . fiCurrent ( fiCookie ) ; HttpString header = headerValues . getHeaderName ( ) ; int headerSize = header . length ( ) ; int valueIdx = 0 ; while ( valueIdx < headerValues . size ( ) ) { remaining -= ( headerSize + 2 ) ; if ( remaining < 0 ) { this . fiCookie = fiCookie ; this . string = string ; this . headerValues = headerValues ; this . valueIdx = valueIdx ; this . charIndex = 0 ; this . state = STATE_HDR_NAME ; buffer . flip ( ) ; return processStatefulWrite ( STATE_HDR_NAME , userData , pos , length ) ; } header . appendTo ( buffer ) ; buffer . put ( ( byte ) ':' ) . put ( ( byte ) ' ' ) ; string = headerValues . get ( valueIdx ++ ) ; remaining -= ( string . length ( ) + 2 ) ; if ( remaining < 2 ) { // we use 2 here , to make sure we always have room for the final \ r \ n this . fiCookie = fiCookie ; this . string = string ; this . headerValues = headerValues ; this . valueIdx = valueIdx ; this . charIndex = 0 ; this . state = STATE_HDR_VAL ; buffer . flip ( ) ; return processStatefulWrite ( STATE_HDR_VAL , userData , pos , length ) ; } writeString ( buffer , string ) ; buffer . put ( ( byte ) '\r' ) . put ( ( byte ) '\n' ) ; } fiCookie = headers . fiNextNonEmpty ( fiCookie ) ; } buffer . put ( ( byte ) '\r' ) . put ( ( byte ) '\n' ) ; buffer . flip ( ) ; do { long res = 0 ; ByteBuffer [ ] data ; if ( userData == null ) { res = next . write ( buffer ) ; } else if ( userData instanceof ByteBuffer ) { data = writevBuffer ; if ( data == null ) { data = writevBuffer = new ByteBuffer [ 2 ] ; } data [ 0 ] = buffer ; data [ 1 ] = ( ByteBuffer ) userData ; res = next . write ( data , 0 , 2 ) ; } else { data = writevBuffer ; if ( data == null || data . length < length + 1 ) { data = writevBuffer = new ByteBuffer [ length + 1 ] ; } data [ 0 ] = buffer ; System . arraycopy ( userData , pos , data , 1 , length ) ; res = next . write ( data , 0 , length + 1 ) ; } if ( res == 0 ) { return STATE_BUF_FLUSH ; } } while ( buffer . hasRemaining ( ) ) ; bufferDone ( ) ; return STATE_BODY ; } catch ( IOException | RuntimeException | Error e ) { // WFLY - 4696 , just to be safe if ( pooledBuffer != null ) { pooledBuffer . close ( ) ; pooledBuffer = null ; } throw e ; }