signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LazyReact { /** * Build an FutureStream that reacts Asynchronously to the Suppliers within the * specified Iterator * < pre > * { @ code * List < Supplier < Data > > list = Arrays . asList ( this : : load1 , this : : looad2 , this : : load3 ) ; * LazyReact ( ) . fromIteratorAsync ( list . iterator ( ) ) * . map ( this : : process ) * . forEach ( this : : save ) * < / pre > * @ param actions Iterator to react to * @ return FutureStream * @ see com . oath . cyclops . react . stream . BaseSimpleReact # react ( java . util . Iterator ) */ public < U > FutureStream < U > fromIteratorAsync ( final Iterator < ? extends Supplier < U > > actions ) { } }
return this . < U > constructFutures ( StreamSupport . < Supplier < U > > stream ( Spliterators . < Supplier < U > > spliteratorUnknownSize ( actions , Spliterator . ORDERED ) , false ) . map ( next -> CompletableFuture . supplyAsync ( next , getExecutor ( ) ) ) ) ;
public class HttpConnection { /** * Handling the webservice call * @ param url URL to call * @ return Input stream as a result , or an exception * @ throws IOException In case of an IO error * @ throws NsApiException In case of any other error */ InputStream getContent ( String url ) throws IOException , NsApiException { } }
Request request = new Request . Builder ( ) . url ( url ) . get ( ) . build ( ) ; try { Response response = client . newCall ( request ) . execute ( ) ; if ( response . body ( ) == null ) { log . error ( "Error while calling the webservice, entity is null" ) ; throw new NsApiException ( "Error while calling the webservice, entity is null" ) ; } return response . body ( ) . byteStream ( ) ; } catch ( RuntimeException e ) { log . error ( "Error while calling the webservice, entity is null" ) ; throw new NsApiException ( "Error while calling the webservice, entity is null" , e ) ; }
public class UpdateApplicationBase { /** * Error generating shortcut method */ static protected CompletableFuture < ClientResponse > makeQuickResponse ( byte statusCode , String msg ) { } }
ClientResponseImpl cri = new ClientResponseImpl ( statusCode , new VoltTable [ 0 ] , msg ) ; CompletableFuture < ClientResponse > f = new CompletableFuture < > ( ) ; f . complete ( cri ) ; return f ;
public class SceneStructureMetric { /** * Call this function first . Specifies number of each type of data which is available . * @ param totalCameras Number of cameras * @ param totalViews Number of views * @ param totalPoints Number of points * @ param totalRigid Number of rigid objects */ public void initialize ( int totalCameras , int totalViews , int totalPoints , int totalRigid ) { } }
cameras = new Camera [ totalCameras ] ; views = new View [ totalViews ] ; points = new Point [ totalPoints ] ; rigids = new Rigid [ totalRigid ] ; for ( int i = 0 ; i < cameras . length ; i ++ ) { cameras [ i ] = new Camera ( ) ; } for ( int i = 0 ; i < views . length ; i ++ ) { views [ i ] = new View ( ) ; } for ( int i = 0 ; i < points . length ; i ++ ) { points [ i ] = new Point ( pointSize ) ; } for ( int i = 0 ; i < rigids . length ; i ++ ) { rigids [ i ] = new Rigid ( ) ; } // forget old assignments lookupRigid = null ;
public class FlowTypeCheck { /** * Type check a < code > return < / code > statement . If a return expression is given , * then we must check that this is well - formed and is a subtype of the enclosing * function or method ' s declared return type . The environment after a return * statement is " bottom " because that represents an unreachable program point . * @ param stmt * Statement to type check * @ param environment * Determines the type of all variables immediately going into this * block * @ param scope * The stack of enclosing scopes * @ return * @ throws ResolveError * If a named type within this statement cannot be resolved within * the enclosing project . */ private Environment checkReturn ( Stmt . Return stmt , Environment environment , EnclosingScope scope ) throws IOException { } }
// Determine the set of return types for the enclosing function or // method . This then allows us to check the given operands are // appropriate subtypes . Decl . FunctionOrMethod fm = scope . getEnclosingScope ( FunctionOrMethodScope . class ) . getDeclaration ( ) ; Tuple < Type > types = fm . getType ( ) . getReturns ( ) ; // Type check the operands for the return statement ( if any ) checkMultiExpressions ( stmt . getReturns ( ) , environment , types ) ; // Return bottom as following environment to signal that control - flow // cannot continue here . Thus , any following statements will encounter // the BOTTOM environment and , hence , report an appropriate error . return FlowTypeUtils . BOTTOM ;
public class HttpChannelConfig { /** * Check the input configuration for the timeout to use when writing data * during the connection . * @ param props */ private void parseWriteTimeout ( Map < Object , Object > props ) { } }
Object value = props . get ( HttpConfigConstants . PROPNAME_WRITE_TIMEOUT ) ; if ( null != value ) { try { this . writeTimeout = TIMEOUT_MODIFIER * minLimit ( convertInteger ( value ) , HttpConfigConstants . MIN_TIMEOUT ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: Write timeout is " + getWriteTimeout ( ) ) ; } } catch ( NumberFormatException nfe ) { FFDCFilter . processException ( nfe , getClass ( ) . getName ( ) + ".parseWriteTimeout" , "1" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: Invalid write timeout; " + value ) ; } } }
public class SearchRange { /** * Converts this range into a { @ link SearchKey } . We expect the caller will * call { @ link # isSingleValue ( ) } before calling this method . * @ return a SearchKey represents the range */ public SearchKey asSearchKey ( ) { } }
Constant [ ] vals = new Constant [ ranges . length ] ; for ( int i = 0 ; i < vals . length ; i ++ ) vals [ i ] = ranges [ i ] . asConstant ( ) ; return new SearchKey ( vals ) ;
public class IPAddressString { /** * Produces the { @ link IPAddress } of the specified address version corresponding to this IPAddressString . * In most cases the string indicates the address version and calling { @ link # toAddress ( ) } is sufficient , with a few exceptions . * When this object represents only a network prefix length , * specifying the address version allows the conversion to take place to the associated mask for that prefix length . * When this object represents all addresses , specifying the address version allows the conversion to take place * to the associated representation of all IPv4 or all IPv6 addresses . * When this object represents the empty string and that string is interpreted as a loopback , then it returns * the corresponding loopback address . If empty strings are not interpreted as loopback , null is returned . * When this object represents an ipv4 or ipv6 address , it returns that address if and only if that address matches the provided version . * If the string used to construct this object is an invalid format , * or a format that does not match the provided version , then this method throws { @ link AddressStringException } . * @ param version the address version that this address should represent . * @ return * @ throws AddressStringException * @ throws IncompatibleAddressException address in proper format cannot be converted to an address : for masks inconsistent with associated address range , or ipv4 mixed segments that cannot be joined into ipv6 segments */ public IPAddress toAddress ( IPVersion version ) throws AddressStringException , IncompatibleAddressException { } }
validate ( ) ; // call validate so that we throw consistently , cover type = = INVALID , and ensure the addressProvider exists return addressProvider . getProviderAddress ( version ) ;
public class WebGL10 { /** * < p > { @ code glBindFramebuffer } binds the framebuffer object with name framebuffer to the framebuffer target * specified by target . target must be { @ link # GL _ FRAMEBUFFER } . If a framebuffer object is bound , it becomes the * target for rendering or readback operations , respectively , until it is deleted or another framebuffer is bound to * the corresponding bind point . < / p > * < p > { @ link # GL _ INVALID _ ENUM } is generated if target is not { @ link # GL _ FRAMEBUFFER } . < / p > * < p > { @ link # GL _ INVALID _ OPERATION } is generated if framebuffer is not zero or the name of a framebuffer previously * returned from a call to { @ link # glCreateFramebuffer ( ) } . * @ param target Specifies the framebuffer target of the binding operation . * @ param frameBuffer Specifies the name of the framebuffer object to bind . */ public static void glBindFramebuffer ( int target , int frameBuffer ) { } }
checkContextCompatibility ( ) ; nglBindFramebuffer ( target , WebGLObjectMap . get ( ) . toFramebuffer ( frameBuffer ) ) ;
public class TypeCheckingExtension { /** * Lookup a ClassNode by its name from the source unit * @ param type the name of the class whose ClassNode we want to lookup * @ return a ClassNode representing the class */ public ClassNode lookupClassNodeFor ( String type ) { } }
for ( ClassNode cn : typeCheckingVisitor . getSourceUnit ( ) . getAST ( ) . getClasses ( ) ) { if ( cn . getName ( ) . equals ( type ) ) return cn ; } return null ;
public class Tuple0 { /** * Concatenate a tuple to this tuple . */ public final < T1 , T2 > Tuple2 < T1 , T2 > concat ( Tuple2 < T1 , T2 > tuple ) { } }
return new Tuple2 < > ( tuple . v1 , tuple . v2 ) ;
public class ManagementModelNode { /** * Refresh children using read - resource operation . */ public void explore ( ) { } }
if ( isLeaf ) return ; if ( isGeneric ) return ; removeAllChildren ( ) ; try { String addressPath = addressPath ( ) ; ModelNode resourceDesc = executor . doCommand ( addressPath + ":read-resource-description" ) ; resourceDesc = resourceDesc . get ( "result" ) ; ModelNode response = executor . doCommand ( addressPath + ":read-resource(include-runtime=true,include-defaults=true)" ) ; ModelNode result = response . get ( "result" ) ; if ( ! result . isDefined ( ) ) return ; List < String > childrenTypes = getChildrenTypes ( addressPath ) ; for ( ModelNode node : result . asList ( ) ) { Property prop = node . asProperty ( ) ; if ( childrenTypes . contains ( prop . getName ( ) ) ) { // resource node if ( hasGenericOperations ( addressPath , prop . getName ( ) ) ) { add ( new ManagementModelNode ( cliGuiCtx , new UserObject ( node , prop . getName ( ) ) ) ) ; } if ( prop . getValue ( ) . isDefined ( ) ) { for ( ModelNode innerNode : prop . getValue ( ) . asList ( ) ) { UserObject usrObj = new UserObject ( innerNode , prop . getName ( ) , innerNode . asProperty ( ) . getName ( ) ) ; add ( new ManagementModelNode ( cliGuiCtx , usrObj ) ) ; } } } else { // attribute node UserObject usrObj = new UserObject ( node , resourceDesc , prop . getName ( ) , prop . getValue ( ) . asString ( ) ) ; add ( new ManagementModelNode ( cliGuiCtx , usrObj ) ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class ListT { /** * Construct an ListT from an AnyM that contains a monad type that contains type other than List * The values in the underlying monad will be mapped to List < A > * @ param anyM AnyM that doesn ' t contain a monad wrapping an List * @ return ListT */ public static < W extends WitnessType < W > , A > ListT < W , A > fromAnyM ( final AnyM < W , A > anyM ) { } }
AnyM < W , ListX < A > > y = anyM . map ( i -> ListX . of ( i ) ) ; return of ( y ) ;
public class AbstractETFWriter { /** * Write the full ETF . * @ param printWriter the Writer . * @ param a the automaton to write . * @ param inputs the alphabet . */ protected final void write ( PrintWriter printWriter , A a , Alphabet < I > inputs ) { } }
writeState ( printWriter ) ; writeEdge ( printWriter ) ; writeETF ( printWriter , a , inputs ) ; printWriter . close ( ) ;
public class VTensor { /** * Normalizes the values so that they sum to 1 */ public double normalize ( ) { } }
double propSum = this . getSum ( ) ; if ( propSum == s . zero ( ) ) { this . fill ( s . divide ( s . one ( ) , s . fromReal ( this . size ( ) ) ) ) ; } else if ( propSum == s . posInf ( ) ) { int count = count ( s . posInf ( ) ) ; if ( count == 0 ) { throw new RuntimeException ( "Unable to normalize since sum is infinite but contains no infinities: " + this . toString ( ) ) ; } double constant = s . divide ( s . one ( ) , s . fromReal ( count ) ) ; for ( int d = 0 ; d < this . size ( ) ; d ++ ) { if ( this . getValue ( d ) == s . posInf ( ) ) { this . setValue ( d , constant ) ; } else { this . setValue ( d , s . zero ( ) ) ; } } } else { this . divide ( propSum ) ; assert ! this . containsNaN ( ) ; } return propSum ;
public class JsonService { /** * this class loader interface can be used by other plugins to lookup * resources from the bundles . A temporary class loader interface is set * during other configuration loading as well * @ return ClassLoaderInterface ( BundleClassLoaderInterface ) */ private ClassLoaderInterface getClassLoader ( ) { } }
Map < String , Object > application = ActionContext . getContext ( ) . getApplication ( ) ; if ( application != null ) { return ( ClassLoaderInterface ) application . get ( ClassLoaderInterface . CLASS_LOADER_INTERFACE ) ; } return null ;
public class AWSElasticBeanstalkClient { /** * Update the list of tags applied to an AWS Elastic Beanstalk resource . Two lists can be passed : * < code > TagsToAdd < / code > for tags to add or update , and < code > TagsToRemove < / code > . * Currently , Elastic Beanstalk only supports tagging of Elastic Beanstalk environments . For details about * environment tagging , see < a * href = " https : / / docs . aws . amazon . com / elasticbeanstalk / latest / dg / using - features . tagging . html " > Tagging Resources in * Your Elastic Beanstalk Environment < / a > . * If you create a custom IAM user policy to control permission to this operation , specify one of the following two * virtual actions ( or both ) instead of the API operation name : * < dl > * < dt > elasticbeanstalk : AddTags < / dt > * < dd > * Controls permission to call < code > UpdateTagsForResource < / code > and pass a list of tags to add in the * < code > TagsToAdd < / code > parameter . * < / dd > * < dt > elasticbeanstalk : RemoveTags < / dt > * < dd > * Controls permission to call < code > UpdateTagsForResource < / code > and pass a list of tag keys to remove in the * < code > TagsToRemove < / code > parameter . * < / dd > * < / dl > * For details about creating a custom user policy , see < a href = * " https : / / docs . aws . amazon . com / elasticbeanstalk / latest / dg / AWSHowTo . iam . managed - policies . html # AWSHowTo . iam . policies " * > Creating a Custom User Policy < / a > . * @ param updateTagsForResourceRequest * @ return Result of the UpdateTagsForResource operation returned by the service . * @ throws InsufficientPrivilegesException * The specified account does not have sufficient privileges for one or more AWS services . * @ throws OperationInProgressException * Unable to perform the specified operation because another operation that effects an element in this * activity is already in progress . * @ throws TooManyTagsException * The number of tags in the resource would exceed the number of tags that each resource can have . < / p > * To calculate this , the operation considers both the number of tags the resource already has and the tags * this operation would add if it succeeded . * @ throws ResourceNotFoundException * A resource doesn ' t exist for the specified Amazon Resource Name ( ARN ) . * @ throws ResourceTypeNotSupportedException * The type of the specified Amazon Resource Name ( ARN ) isn ' t supported for this operation . * @ sample AWSElasticBeanstalk . UpdateTagsForResource * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticbeanstalk - 2010-12-01 / UpdateTagsForResource " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateTagsForResourceResult updateTagsForResource ( UpdateTagsForResourceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateTagsForResource ( request ) ;
public class RadixSort { /** * Specialization of sort ( ) for key - prefix arrays . In this type of array , each record consists * of two longs , only the second of which is sorted on . * @ param startIndex starting index in the array to sort from . This parameter is not supported * in the plain sort ( ) implementation . */ public static int sortKeyPrefixArray ( LongArray array , long startIndex , long numRecords , int startByteIndex , int endByteIndex , boolean desc , boolean signed ) { } }
assert startByteIndex >= 0 : "startByteIndex (" + startByteIndex + ") should >= 0" ; assert endByteIndex <= 7 : "endByteIndex (" + endByteIndex + ") should <= 7" ; assert endByteIndex > startByteIndex ; assert numRecords * 4 <= array . size ( ) ; long inIndex = startIndex ; long outIndex = startIndex + numRecords * 2L ; if ( numRecords > 0 ) { long [ ] [ ] counts = getKeyPrefixArrayCounts ( array , startIndex , numRecords , startByteIndex , endByteIndex ) ; for ( int i = startByteIndex ; i <= endByteIndex ; i ++ ) { if ( counts [ i ] != null ) { sortKeyPrefixArrayAtByte ( array , numRecords , counts [ i ] , i , inIndex , outIndex , desc , signed && i == endByteIndex ) ; long tmp = inIndex ; inIndex = outIndex ; outIndex = tmp ; } } } return Ints . checkedCast ( inIndex ) ;
public class Intersectionf { /** * Test whether the ray with given origin < code > ( originX , originY ) < / code > and direction < code > ( dirX , dirY ) < / code > intersects the line * containing the given point < code > ( pointX , pointY ) < / code > and having the normal < code > ( normalX , normalY ) < / code > , and return the * value of the parameter < i > t < / i > in the ray equation < i > p ( t ) = origin + t * dir < / i > of the intersection point . * This method returns < code > - 1.0 < / code > if the ray does not intersect the line , because it is either parallel to the line or its direction points * away from the line or the ray ' s origin is on the < i > negative < / i > side of the line ( i . e . the line ' s normal points away from the ray ' s origin ) . * @ param originX * the x coordinate of the ray ' s origin * @ param originY * the y coordinate of the ray ' s origin * @ param dirX * the x coordinate of the ray ' s direction * @ param dirY * the y coordinate of the ray ' s direction * @ param pointX * the x coordinate of a point on the line * @ param pointY * the y coordinate of a point on the line * @ param normalX * the x coordinate of the line ' s normal * @ param normalY * the y coordinate of the line ' s normal * @ param epsilon * some small epsilon for when the ray is parallel to the line * @ return the value of the parameter < i > t < / i > in the ray equation < i > p ( t ) = origin + t * dir < / i > of the intersection point , if the ray * intersects the line ; < code > - 1.0 < / code > otherwise */ public static float intersectRayLine ( float originX , float originY , float dirX , float dirY , float pointX , float pointY , float normalX , float normalY , float epsilon ) { } }
float denom = normalX * dirX + normalY * dirY ; if ( denom < epsilon ) { float t = ( ( pointX - originX ) * normalX + ( pointY - originY ) * normalY ) / denom ; if ( t >= 0.0f ) return t ; } return - 1.0f ;
public class MessageBirdClient { /** * Starts a conversation by sending an initial message . * @ param request Data for this request . * @ return The created Conversation . */ public Conversation startConversation ( ConversationStartRequest request ) throws UnauthorizedException , GeneralException { } }
String url = String . format ( "%s%s/start" , CONVERSATIONS_BASE_URL , CONVERSATION_PATH ) ; return messageBirdService . sendPayLoad ( url , request , Conversation . class ) ;
public class ProxiedFileSystemCache { /** * Cached version of { @ link ProxiedFileSystemUtils # createProxiedFileSystemUsingToken ( String , Token , URI , Configuration ) } . * @ deprecated use { @ link # fromToken } . */ @ Deprecated public static FileSystem getProxiedFileSystemUsingToken ( @ NonNull final String userNameToProxyAs , final Token < ? > userNameToken , final URI fsURI , final Configuration conf ) throws ExecutionException { } }
try { return getProxiedFileSystemUsingToken ( userNameToProxyAs , userNameToken , fsURI , conf , null ) ; } catch ( IOException ioe ) { throw new ExecutionException ( ioe ) ; }
public class RolloutGroupConditionBuilder { /** * Sets condition defaults . * @ return the builder itself */ public RolloutGroupConditionBuilder withDefaults ( ) { } }
successCondition ( RolloutGroupSuccessCondition . THRESHOLD , "50" ) ; successAction ( RolloutGroupSuccessAction . NEXTGROUP , "" ) ; errorCondition ( RolloutGroupErrorCondition . THRESHOLD , "50" ) ; errorAction ( RolloutGroupErrorAction . PAUSE , "" ) ; return this ;
public class Config { /** * Loads the static class variables for values that are called often . This * should be called any time the configuration changes . */ public void loadStaticVariables ( ) { } }
auto_metric = this . getBoolean ( "tsd.core.auto_create_metrics" ) ; auto_tagk = this . getBoolean ( "tsd.core.auto_create_tagks" ) ; auto_tagv = this . getBoolean ( "tsd.core.auto_create_tagvs" ) ; enable_compactions = this . getBoolean ( "tsd.storage.enable_compaction" ) ; enable_appends = this . getBoolean ( "tsd.storage.enable_appends" ) ; repair_appends = this . getBoolean ( "tsd.storage.repair_appends" ) ; enable_chunked_requests = this . getBoolean ( "tsd.http.request.enable_chunked" ) ; enable_realtime_ts = this . getBoolean ( "tsd.core.meta.enable_realtime_ts" ) ; enable_realtime_uid = this . getBoolean ( "tsd.core.meta.enable_realtime_uid" ) ; enable_tsuid_incrementing = this . getBoolean ( "tsd.core.meta.enable_tsuid_incrementing" ) ; enable_tsuid_tracking = this . getBoolean ( "tsd.core.meta.enable_tsuid_tracking" ) ; if ( this . hasProperty ( "tsd.http.request.max_chunk" ) ) { max_chunked_requests = this . getInt ( "tsd.http.request.max_chunk" ) ; } if ( this . hasProperty ( "tsd.http.header_tag" ) ) { http_header_tag = this . getString ( "tsd.http.header_tag" ) ; } enable_tree_processing = this . getBoolean ( "tsd.core.tree.enable_processing" ) ; fix_duplicates = this . getBoolean ( "tsd.storage.fix_duplicates" ) ; scanner_max_num_rows = this . getInt ( "tsd.storage.hbase.scanner.maxNumRows" ) ; use_otsdb_timestamp = this . getBoolean ( "tsd.storage.use_otsdb_timestamp" ) ; get_date_tiered_compaction_start = this . getLong ( "tsd.storage.get_date_tiered_compaction_start" ) ; use_max_value = this . getBoolean ( "tsd.storage.use_max_value" ) ;
public class ChineseCalendar { /** * Adjust this calendar to be delta months before or after a given * start position , pinning the day of month if necessary . The start * position is given as a local days number for the start of the month * and a day - of - month . Used by add ( ) and roll ( ) . * @ param newMoon the local days of the first day of the month of the * start position ( days after January 1 , 1970 0:00 Asia / Shanghai ) * @ param dom the 1 - based day - of - month of the start position * @ param delta the number of months to move forward or backward from * the start position */ private void offsetMonth ( int newMoon , int dom , int delta ) { } }
// Move to the middle of the month before our target month . newMoon += ( int ) ( CalendarAstronomer . SYNODIC_MONTH * ( delta - 0.5 ) ) ; // Search forward to the target month ' s new moon newMoon = newMoonNear ( newMoon , true ) ; // Find the target dom int jd = newMoon + EPOCH_JULIAN_DAY - 1 + dom ; // Pin the dom . In this calendar all months are 29 or 30 days // so pinning just means handling dom 30. if ( dom > 29 ) { set ( JULIAN_DAY , jd - 1 ) ; // TODO Fix this . We really shouldn ' t ever have to // explicitly call complete ( ) . This is either a bug in // this method , in ChineseCalendar , or in // Calendar . getActualMaximum ( ) . I suspect the last . complete ( ) ; if ( getActualMaximum ( DAY_OF_MONTH ) >= dom ) { set ( JULIAN_DAY , jd ) ; } } else { set ( JULIAN_DAY , jd ) ; }
public class LogPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getUserRemovedFromProject ( ) { } }
if ( userRemovedFromProjectEClass == null ) { userRemovedFromProjectEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( LogPackage . eNS_URI ) . getEClassifiers ( ) . get ( 16 ) ; } return userRemovedFromProjectEClass ;
public class JmsMessage { /** * Gets the JMS type header . * @ return */ public String getType ( ) { } }
Object type = getHeader ( JmsMessageHeaders . TYPE ) ; if ( type != null ) { return type . toString ( ) ; } return null ;
public class BaseDenseHog { /** * Specifies input image . Gradient is computed immediately * @ param input input image */ public void setInput ( I input ) { } }
derivX . reshape ( input . width , input . height ) ; derivY . reshape ( input . width , input . height ) ; // pixel gradient gradient . process ( input , derivX , derivY ) ;
public class AbstractUserObject { /** * Assign this user object to the given JAAS system under the given JAAS * key . * @ param _ jaasSystem JAAS system to which the person is assigned * @ param _ jaasKey key under which the person is know in the JAAS * system * @ throws EFapsException if the assignment could not be made */ @ SuppressFBWarnings ( "SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING" ) public void assignToJAASSystem ( final JAASSystem _jaasSystem , final String _jaasKey ) throws EFapsException { } }
Connection con = null ; try { final Context context = Context . getThreadContext ( ) ; con = Context . getConnection ( ) ; final Type keyType = CIAdminUser . JAASKey . getType ( ) ; PreparedStatement stmt = null ; final StringBuilder cmd = new StringBuilder ( ) ; try { long keyId = 0 ; if ( Context . getDbType ( ) . supportsGetGeneratedKeys ( ) ) { cmd . append ( "insert into " ) . append ( keyType . getMainTable ( ) . getSqlTable ( ) ) . append ( "(JAASKEY,CREATOR,CREATED,MODIFIER,MODIFIED," ) . append ( "USERABSTRACT,USERJAASSYSTEM) " ) . append ( "values (" ) ; } else { keyId = Context . getDbType ( ) . getNewId ( new ConnectionResource ( con ) , keyType . getMainTable ( ) . getSqlTable ( ) , "ID" ) ; cmd . append ( "insert into " ) . append ( keyType . getMainTable ( ) . getSqlTable ( ) ) . append ( "(ID,JAASKEY,CREATOR,CREATED,MODIFIER,MODIFIED," ) . append ( "USERABSTRACT,USERJAASSYSTEM) " ) . append ( "values (" ) . append ( keyId ) . append ( "," ) ; } cmd . append ( "'" ) . append ( _jaasKey ) . append ( "'," ) . append ( context . getPersonId ( ) ) . append ( "," ) . append ( Context . getDbType ( ) . getCurrentTimeStamp ( ) ) . append ( "," ) . append ( context . getPersonId ( ) ) . append ( "," ) . append ( Context . getDbType ( ) . getCurrentTimeStamp ( ) ) . append ( "," ) . append ( getId ( ) ) . append ( "," ) . append ( _jaasSystem . getId ( ) ) . append ( ")" ) ; stmt = con . prepareStatement ( cmd . toString ( ) ) ; final int rows = stmt . executeUpdate ( ) ; if ( rows == 0 ) { AbstractUserObject . LOG . error ( "could not execute '" + cmd . toString ( ) + "' for JAAS system '" + _jaasSystem . getName ( ) + "' for user object '" + toString ( ) + "' with JAAS key '" + _jaasKey + "'" ) ; throw new EFapsException ( getClass ( ) , "assignToJAASSystem.NotInserted" , _jaasSystem . getName ( ) , _jaasKey , toString ( ) ) ; } } catch ( final SQLException e ) { AbstractUserObject . LOG . error ( "could not execute '" + cmd . toString ( ) + "' to assign user object '" + toString ( ) + "' with JAAS key '" + _jaasKey + "' to JAAS system '" + _jaasSystem . getName ( ) + "'" , e ) ; throw new EFapsException ( getClass ( ) , "assignToJAASSystem.SQLException" , e , cmd . toString ( ) , _jaasSystem . getName ( ) , _jaasKey , toString ( ) ) ; } finally { try { if ( stmt != null ) { stmt . close ( ) ; } con . commit ( ) ; } catch ( final SQLException e ) { AbstractUserObject . LOG . error ( "Could not close a statement." , e ) ; } } } finally { try { if ( con != null && ! con . isClosed ( ) ) { con . close ( ) ; } } catch ( final SQLException e ) { AbstractUserObject . LOG . error ( "Could not close a connection." , e ) ; } }
public class ColorHolder { /** * a small helper to get the color from the colorHolder * @ param ctx * @ return */ public int color ( Context ctx ) { } }
if ( mColorInt == 0 && mColorRes != - 1 ) { mColorInt = ContextCompat . getColor ( ctx , mColorRes ) ; } return mColorInt ;
public class DefaultEffector { /** * mergeEffects merges all matching results collected by the enforcer into a single decision . */ public boolean mergeEffects ( String expr , Effect [ ] effects , float [ ] results ) { } }
boolean result ; if ( expr . equals ( "some(where (p_eft == allow))" ) ) { result = false ; for ( Effect eft : effects ) { if ( eft == Effect . Allow ) { result = true ; break ; } } } else if ( expr . equals ( "!some(where (p_eft == deny))" ) ) { result = true ; for ( Effect eft : effects ) { if ( eft == Effect . Deny ) { result = false ; break ; } } } else if ( expr . equals ( "some(where (p_eft == allow)) && !some(where (p_eft == deny))" ) ) { result = false ; for ( Effect eft : effects ) { if ( eft == Effect . Allow ) { result = true ; } else if ( eft == Effect . Deny ) { result = false ; break ; } } } else if ( expr . equals ( "priority(p_eft) || deny" ) ) { result = false ; for ( Effect eft : effects ) { if ( eft != Effect . Indeterminate ) { if ( eft == Effect . Allow ) { result = true ; } else { result = false ; } break ; } } } else { throw new Error ( "unsupported effect" ) ; } return result ;
public class UserPreferences { /** * Put hash table . * @ param name the name * @ param hash the hash */ public static void putHashTable ( final String name , final Hashtable hash ) { } }
final Enumeration < String > keys = hash . keys ( ) ; final StringBuffer buf = new StringBuffer ( "" ) ; while ( keys . hasMoreElements ( ) ) { if ( ! buf . toString ( ) . equals ( "" ) ) { // end the previous record buf . append ( ";" ) ; } final String key = keys . nextElement ( ) ; final String value = hash . get ( key ) . toString ( ) ; buf . append ( key + ":" + value ) ; } put ( fixKey ( name ) , buf . toString ( ) ) ;
public class D6Crud { /** * Execute the SQL for number search < br > * @ param preparedSql * @ param searchKeys * @ return number of result */ public int execSelectCount ( String preparedSql , Object [ ] searchKeys ) { } }
log ( "#execSelectCount preparedSql=" + preparedSql + " searchKeys=" + searchKeys ) ; int retVal = 0 ; PreparedStatement preparedStmt = null ; ResultSet rs = null ; final Connection conn = createConnection ( ) ; try { preparedStmt = conn . prepareStatement ( preparedSql , ResultSet . TYPE_SCROLL_INSENSITIVE , ResultSet . CONCUR_READ_ONLY ) ; final StringBuilder logSb = new StringBuilder ( ) ; if ( searchKeys != null ) { logSb . append ( "/ " ) ; for ( int i = 0 ; i < searchKeys . length ; i ++ ) { setObject ( ( i + 1 ) , preparedStmt , searchKeys [ i ] ) ; logSb . append ( "key(" + ( i + 1 ) + ")=" + searchKeys [ i ] ) ; logSb . append ( " " ) ; } } log ( "#execSelectCount SQL=" + preparedSql + " " + logSb . toString ( ) ) ; // execute SQL rs = preparedStmt . executeQuery ( ) ; while ( rs . next ( ) ) { retVal = rs . getInt ( 1 ) ; } } catch ( Exception e ) { loge ( "#execSelectCount" , e ) ; } finally { try { if ( rs != null ) { rs . close ( ) ; } if ( preparedStmt != null ) { preparedStmt . close ( ) ; } if ( conn != null ) { conn . close ( ) ; } } catch ( SQLException e ) { loge ( "#execSelectCount" , e ) ; } } return retVal ;
public class XmlEscape { /** * Perform an XML 1.0 level 2 ( markup - significant and all non - ASCII chars ) < strong > escape < / strong > operation * on a < tt > Reader < / tt > input meant to be an XML attribute value , writing results to a < tt > Writer < / tt > . * < em > Level 2 < / em > means this method will escape : * < ul > * < li > The five markup - significant characters : < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , * < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > < / li > * < li > All non ASCII characters . < / li > * < / ul > * This escape will be performed by replacing those chars by the corresponding XML Character Entity References * ( e . g . < tt > ' & amp ; lt ; ' < / tt > ) when such CER exists for the replaced character , and replacing by a hexadecimal * character reference ( e . g . < tt > ' & amp ; # x2430 ; ' < / tt > ) when there there is no CER for the replaced character . * Besides , being an attribute value also < tt > & # 92 ; t < / tt > , < tt > & # 92 ; n < / tt > and < tt > & # 92 ; r < / tt > will * be escaped to avoid white - space normalization from removing line feeds ( turning them into white * spaces ) during future parsing operations . * This method calls { @ link # escapeXml10 ( Reader , Writer , XmlEscapeType , XmlEscapeLevel ) } with the following * preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . xml . XmlEscapeType # CHARACTER _ ENTITY _ REFERENCES _ DEFAULT _ TO _ HEXA } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . xml . XmlEscapeLevel # LEVEL _ 2 _ ALL _ NON _ ASCII _ PLUS _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param reader the < tt > Reader < / tt > reading the text to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs * @ since 1.1.5 */ public static void escapeXml10Attribute ( final Reader reader , final Writer writer ) throws IOException { } }
escapeXml ( reader , writer , XmlEscapeSymbols . XML10_ATTRIBUTE_SYMBOLS , XmlEscapeType . CHARACTER_ENTITY_REFERENCES_DEFAULT_TO_HEXA , XmlEscapeLevel . LEVEL_2_ALL_NON_ASCII_PLUS_MARKUP_SIGNIFICANT ) ;
public class PropertiesUtils { /** * printing - - - - - */ public static void printProperties ( String message , Properties properties , PrintStream stream ) { } }
if ( message != null ) { stream . println ( message ) ; } if ( properties . isEmpty ( ) ) { stream . println ( " [empty]" ) ; } else { List < Map . Entry < String , String > > entries = getSortedEntries ( properties ) ; for ( Map . Entry < String , String > entry : entries ) { if ( ! "" . equals ( entry . getKey ( ) ) ) { stream . format ( " %-30s = %s%n" , entry . getKey ( ) , entry . getValue ( ) ) ; } } } stream . println ( ) ;
public class CmsPreEditorAction { /** * Returns if an action has to be performed before opening the editor depending on the resource to edit * and request parameter values . < p > * @ return true if an action has to be performed , then the editor frameset is not generated */ public boolean doPreAction ( ) { } }
String resourceName = getParamResource ( ) ; try { boolean preActionDone = Boolean . valueOf ( getParamPreActionDone ( ) ) . booleanValue ( ) ; if ( ! preActionDone ) { // pre editor action not executed yet now check if a pre action class is given for the resource type CmsResource resource = getCms ( ) . readResource ( resourceName , CmsResourceFilter . ALL ) ; I_CmsResourceType type = OpenCms . getResourceManager ( ) . getResourceType ( resource . getTypeId ( ) ) ; I_CmsPreEditorActionDefinition preAction = OpenCms . getWorkplaceManager ( ) . getPreEditorConditionDefinition ( type ) ; if ( preAction != null ) { return preAction . doPreAction ( resource , this , getOriginalParams ( ) ) ; } } } catch ( Exception e ) { // log error if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } // nothing to be done as pre action , open the editor return false ;
public class TxtStorer { /** * Prints one line to the given writer ; the line includes path to file and * hash . Subclasses may include more fields . */ protected void printLine ( State state , Writer pw , String externalForm , String hash ) throws IOException { } }
pw . write ( externalForm ) ; pw . write ( SEPARATOR ) ; pw . write ( hash ) ; pw . write ( '\n' ) ;
public class Types { /** * Find the type of an object boxed or not . * @ param arg the object to query . * @ param boxed whether to get a primitive or boxed type . * @ return null if arg null , type of Primitive or getClass . */ public static Class < ? > getType ( Object arg , boolean boxed ) { } }
if ( null == arg || Primitive . NULL == arg ) return null ; if ( arg instanceof Primitive && ! boxed ) return ( ( Primitive ) arg ) . getType ( ) ; return Primitive . unwrap ( arg ) . getClass ( ) ;
public class Constraints { /** * Get a { @ link Predicate } for testing entity objects . * @ param < E > The type of entities to be matched * @ return a Predicate to test if entity objects satisfy this constraint set */ public < E > Predicate < E > toEntityPredicate ( EntityAccessor < E > accessor ) { } }
return entityPredicate ( constraints , schema , accessor , strategy ) ;
public class DefaultAuthenticationTransaction { /** * Wrap credentials into an authentication transaction , as a factory method , * and return the final result . * @ param service the service * @ param credentials the credentials * @ return the authentication transaction */ public static DefaultAuthenticationTransaction of ( final Service service , final Credential ... credentials ) { } }
val creds = sanitizeCredentials ( credentials ) ; return new DefaultAuthenticationTransaction ( service , creds ) ;
public class XMLUtil { /** * Replies the value that corresponds to the specified attribute ' s path . * < p > The path is an ordered list of tag ' s names and ended by the name of * the attribute . * @ param document is the XML document to explore . * @ param caseSensitive indicates of the { @ code path } ' s components are case sensitive . * @ param idxStart is the index of the first element of the path to use . * @ param path is the list of and ended by the attribute ' s name . * @ return the value of the specified attribute or < code > null < / code > if * it was node found in the document */ @ Pure @ SuppressWarnings ( "checkstyle:cyclomaticcomplexity" ) private static String getAttributeValue ( Node document , boolean caseSensitive , int idxStart , String ... path ) { } }
assert document != null : AssertMessages . notNullParameter ( 0 ) ; assert path != null && ( path . length - idxStart ) >= 0 : AssertMessages . invalidValue ( 2 ) ; if ( ( path . length - idxStart ) > 1 ) { final NodeList nodes = document . getChildNodes ( ) ; final int len = nodes . getLength ( ) ; for ( int i = 0 ; i < len ; ++ i ) { final Node node = nodes . item ( i ) ; if ( node != null ) { final String name = node . getNodeName ( ) ; if ( name != null && ( ( caseSensitive && name . equals ( path [ idxStart ] ) ) || ( ! caseSensitive && name . equalsIgnoreCase ( path [ idxStart ] ) ) ) ) { final String value = getAttributeValue ( node , caseSensitive , idxStart + 1 , path ) ; if ( value != null ) { return value ; } } } } } else if ( document instanceof Element ) { if ( caseSensitive ) { return ( ( Element ) document ) . getAttribute ( path [ idxStart ] ) ; } final NamedNodeMap map = ( ( Element ) document ) . getAttributes ( ) ; final int len = map . getLength ( ) ; for ( int i = 0 ; i < len ; ++ i ) { final Node node = map . item ( i ) ; if ( node instanceof Attr ) { final Attr attr = ( Attr ) node ; final String name = attr . getName ( ) ; if ( name != null && name . equalsIgnoreCase ( path [ idxStart ] ) ) { final String value = attr . getValue ( ) ; if ( value != null ) { return value ; } } } } } else { final NamedNodeMap attrs = document . getAttributes ( ) ; if ( attrs != null ) { final int len = attrs . getLength ( ) ; for ( int idxAttr = 0 ; idxAttr < len ; ++ idxAttr ) { final Node node = attrs . item ( idxAttr ) ; final String name = node . getNodeName ( ) ; if ( name != null && ( ( caseSensitive && name . equals ( path [ idxStart ] ) ) || ( ! caseSensitive && name . equalsIgnoreCase ( path [ idxStart ] ) ) ) ) { return node . getNodeValue ( ) ; } } } } return null ;
public class BaseHolder { /** * Handle the command send from my client peer . * @ param in The ( optional ) Inputstream to get the params from . * @ param out The stream to write the results . * @ param properties Temporary session properties . */ public void doProcess ( InputStream in , PrintWriter out , Map < String , Object > properties ) throws RemoteException { } }
Utility . getLogger ( ) . warning ( "Command not handled: " + this . getProperty ( REMOTE_COMMAND , properties ) ) ;
public class AssetUtil { /** * Helper to convert from java package name to class loader package name < br / > * < br / > * ie : javax . test + my . txt = javax / test / + my . txt * @ param resourcePackage * The base package * @ param resourceName * The resource inside the package . * @ return { @ link ClassLoader } resource location */ public static String getClassLoaderResourceName ( Package resourcePackage , String resourceName ) { } }
String resourcePackaeName = resourcePackage . getName ( ) . replaceAll ( DELIMITER_CLASS_NAME_PATH , DELIMITER_RESOURCE_PATH ) ; return resourcePackaeName + DELIMITER_RESOURCE_PATH + resourceName ;
public class HostCertificateManager { /** * Requests the server to generate a certificate - signing request ( CSR ) for itself . The CSR is then typically * provided to a Certificate Authority to sign and issue the SSL certificate for the server . Use * InstallServerCertificate to install this certificate . * @ param useIpAddressAsCommonName if true , use host ' s management IP address as CN in the CSR ; otherwise use host ' s FQDN * @ return CSR in PEM format * @ throws HostConfigFault * @ throws RuntimeFault * @ throws RemoteException */ public String generateCertificateSigningRequest ( boolean useIpAddressAsCommonName ) throws HostConfigFault , RuntimeFault , RemoteException { } }
return getVimService ( ) . generateCertificateSigningRequest ( getMOR ( ) , useIpAddressAsCommonName ) ;
public class ProfilePictureView { /** * Apply a preset size to this profile photo * @ param sizeType The size type to apply : SMALL , NORMAL or LARGE */ public final void setPresetSize ( int sizeType ) { } }
switch ( sizeType ) { case SMALL : case NORMAL : case LARGE : case CUSTOM : this . presetSizeType = sizeType ; break ; default : throw new IllegalArgumentException ( "Must use a predefined preset size" ) ; } requestLayout ( ) ;
public class SAXProcessor { /** * Converts the input time series into a SAX data structure via sliding window and Z * normalization . * @ param ts the input data . * @ param windowSize the sliding window size . * @ param paaSize the PAA size . * @ param cuts the Alphabet cuts . * @ param nThreshold the normalization threshold value . * @ param strategy the NR strategy . * @ return SAX representation of the time series . * @ throws SAXException if error occurs . */ public SAXRecords ts2saxViaWindow ( double [ ] ts , int windowSize , int paaSize , double [ ] cuts , NumerosityReductionStrategy strategy , double nThreshold ) throws SAXException { } }
if ( windowSize > ts . length ) { throw new SAXException ( "Unable to saxify via window, window size is greater than the timeseries length..." ) ; } // the resulting data structure init SAXRecords saxFrequencyData = new SAXRecords ( ) ; // scan across the time series extract sub sequences , and convert them to strings char [ ] previousString = null ; for ( int i = 0 ; i <= ts . length - windowSize ; i ++ ) { // fix the current subsection double [ ] subSection = Arrays . copyOfRange ( ts , i , i + windowSize ) ; // Z normalize it subSection = tsProcessor . znorm ( subSection , nThreshold ) ; // perform PAA conversion if needed double [ ] paa = tsProcessor . paa ( subSection , paaSize ) ; // Convert the PAA to a string . char [ ] currentString = tsProcessor . ts2String ( paa , cuts ) ; if ( null != previousString ) { if ( NumerosityReductionStrategy . EXACT . equals ( strategy ) && Arrays . equals ( previousString , currentString ) ) { // NumerosityReduction continue ; } else if ( NumerosityReductionStrategy . MINDIST . equals ( strategy ) && checkMinDistIsZero ( previousString , currentString ) ) { continue ; } } previousString = currentString ; saxFrequencyData . add ( currentString , i ) ; } // ArrayList < Integer > keys = saxFrequencyData . getAllIndices ( ) ; // for ( int i : keys ) { // System . out . println ( i + " , " + String . valueOf ( saxFrequencyData . getByIndex ( i ) . getPayload ( ) ) ) ; return saxFrequencyData ;
public class ChannelConsumer { /** * Resolve the channel by name . * @ param channelName the name to resolve * @ param context * @ return the MessageChannel object */ protected MessageChannel resolveChannelName ( String channelName , TestContext context ) { } }
if ( endpointConfiguration . getChannelResolver ( ) == null ) { if ( endpointConfiguration . getBeanFactory ( ) != null ) { endpointConfiguration . setChannelResolver ( new BeanFactoryChannelResolver ( endpointConfiguration . getBeanFactory ( ) ) ) ; } else { endpointConfiguration . setChannelResolver ( new BeanFactoryChannelResolver ( context . getApplicationContext ( ) ) ) ; } } return endpointConfiguration . getChannelResolver ( ) . resolveDestination ( channelName ) ;
public class Input { /** * Creates an InputReader * @ param in * @ param shared Shared ringbuffer . * @ return */ public static InputReader getInstance ( Reader in , char [ ] shared ) { } }
Set < ParserFeature > features = NO_FEATURES ; return new ReadableInput ( getFeaturedReader ( in , shared . length , features ) , shared , features ) ;
public class Thing { /** * Returns the device ' s metadata . * @ return a map */ @ JsonIgnore public Map < String , Object > getDeviceMetadata ( ) { } }
if ( deviceMetadata == null ) { deviceMetadata = new LinkedHashMap < > ( 20 ) ; } return deviceMetadata ;
public class Single { /** * Signals true if the current Single signals a success value that is equal with * the value provided by calling a bi - predicate . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code contains } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param value the value to compare against the success value of this Single * @ param comparer the function that receives the success value of this Single , the value provided * and should return true if they are considered equal * @ return the new Single instance * @ since 2.0 */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final Single < Boolean > contains ( final Object value , final BiPredicate < Object , Object > comparer ) { } }
ObjectHelper . requireNonNull ( value , "value is null" ) ; ObjectHelper . requireNonNull ( comparer , "comparer is null" ) ; return RxJavaPlugins . onAssembly ( new SingleContains < T > ( this , value , comparer ) ) ;
public class Restrictors { /** * 对两个ICriterion进行 " 逻辑与 " 合并 * @ param c1 Restrictor left * @ param c2 Restrictor right * @ return Restrictor */ public static Restrictor and ( Restrictor c1 , Restrictor c2 ) { } }
return new LogicRestrictor ( c1 , c2 , RestrictType . and ) ;
public class SourceStream { /** * Get an unmodifiable list of all of the message items in the VALUE * state on this stream and , optionally , in the Uncommitted state */ public synchronized List < TickRange > getAllMessageItemsOnStream ( boolean includeUncommitted ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getAllMessageItemsOnStream" , Boolean . valueOf ( includeUncommitted ) ) ; List < TickRange > msgs = new LinkedList < TickRange > ( ) ; oststream . setCursor ( 0 ) ; // Get the first TickRange TickRange tr = oststream . getNext ( ) ; while ( tr . endstamp < RangeList . INFINITY ) { if ( tr . type == TickRange . Value ) { // get this msg from the downstream control msgs . add ( ( TickRange ) tr . clone ( ) ) ; } else if ( tr . type == TickRange . Uncommitted && includeUncommitted ) { // get this msg directly if ( tr . value != null ) msgs . add ( ( TickRange ) tr . clone ( ) ) ; } tr = oststream . getNext ( ) ; } // end while if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getAllMessageItemsOnStream" , msgs ) ; return Collections . unmodifiableList ( msgs ) ;
public class HttpDecodingClient { /** * Creates a new { @ link HttpDecodingClient } decorator with the specified { @ link StreamDecoderFactory } s . */ public static Function < Client < HttpRequest , HttpResponse > , HttpDecodingClient > newDecorator ( Iterable < ? extends StreamDecoderFactory > decoderFactories ) { } }
return client -> new HttpDecodingClient ( client , decoderFactories ) ;
public class WebSocketScopeManager { /** * Returns the enable state of a given path . * @ param path * scope / context path * @ return enabled if registered as active and false otherwise */ public boolean isEnabled ( String path ) { } }
if ( path . startsWith ( "/" ) ) { // start after the leading slash int roomSlashPos = path . indexOf ( '/' , 1 ) ; if ( roomSlashPos == - 1 ) { // check application level scope path = path . substring ( 1 ) ; } else { // check room level scope path = path . substring ( 1 , roomSlashPos ) ; } } boolean enabled = activeRooms . contains ( path ) ; log . debug ( "Enabled check on path: {} enabled: {}" , path , enabled ) ; return enabled ;
public class CapacityCommand { /** * Generates capacity report . * @ param options GetWorkerReportOptions to get worker report */ public void generateCapacityReport ( GetWorkerReportOptions options ) throws IOException { } }
List < WorkerInfo > workerInfoList = mBlockMasterClient . getWorkerReport ( options ) ; if ( workerInfoList . size ( ) == 0 ) { print ( "No workers found." ) ; return ; } Collections . sort ( workerInfoList , new WorkerInfo . LastContactSecComparator ( ) ) ; collectWorkerInfo ( workerInfoList ) ; printAggregatedInfo ( options ) ; printWorkerInfo ( workerInfoList ) ;
public class ColumnInfo { /** * Make a column - info instance from a Java Field . */ public static < T > ColumnInfo < T > fromAnnotation ( CsvField csvField , String fieldName , Class < T > type , Field field , Method getMethod , Method setMethod , Converter < T , ? > converter ) { } }
return fromAnnoation ( csvField . converterClass ( ) , csvField . format ( ) , csvField . converterFlags ( ) , csvField . columnName ( ) , csvField . defaultValue ( ) , null , csvField . mustNotBeBlank ( ) , csvField . mustBeSupplied ( ) , csvField . trimInput ( ) , fieldName , type , field , getMethod , setMethod , converter ) ;
public class ConvertUtils { /** * Konwertuje struień wejściowy na tablicę bajtów . * @ param stream Strumień wejściowy do konwersji . * @ return Tablica bajtów odczytanych ze strumienia . * @ throws IOException W przypadku błędu odczytu strumienia . */ public static byte [ ] toByteArray ( InputStream stream ) throws IOException { } }
// Wyznaczanie rozmiaru strumienia : long length = stream . available ( ) ; // Nie można utworzyć tablicy używając typu long . Wymagany jest do tego // typ int . Przed konwersją sprawdzamy , czy strumień nie jest zbyt duży : if ( length > Integer . MAX_VALUE ) { throw new RuntimeException ( "Stream is to large." ) ; } // Tworzymy tablicę do przechowywania danych ze strumienia i odczytujemy : byte [ ] bytes = new byte [ ( int ) length ] ; int offset = 0 ; int numRead = 0 ; while ( offset < bytes . length && ( numRead = stream . read ( bytes , offset , bytes . length - offset ) ) >= 0 ) { offset += numRead ; } // Sprawdzamy , czy wszystkie dane zostały odczytane : if ( offset < bytes . length ) { throw new IOException ( "Could not completely read stream." ) ; } // Zamykamy strumień i zwracamy tablicę z jego zawartością : stream . close ( ) ; return bytes ;
public class MapReduceIndexManagement { /** * TODO make this future actually async and update javadoc @ return accordingly */ public TitanManagement . IndexJobFuture updateIndex ( TitanIndex index , SchemaAction updateAction ) throws BackendException { } }
Preconditions . checkNotNull ( index , "Index parameter must not be null" , index ) ; Preconditions . checkNotNull ( updateAction , "%s parameter must not be null" , SchemaAction . class . getSimpleName ( ) ) ; Preconditions . checkArgument ( SUPPORTED_ACTIONS . contains ( updateAction ) , "Only these %s parameters are supported: %s (was given %s)" , SchemaAction . class . getSimpleName ( ) , SUPPORTED_ACTIONS_STRING , updateAction ) ; Preconditions . checkArgument ( RelationTypeIndex . class . isAssignableFrom ( index . getClass ( ) ) || TitanGraphIndex . class . isAssignableFrom ( index . getClass ( ) ) , "Index %s has class %s: must be a %s or %s (or subtype)" , index . getClass ( ) , RelationTypeIndex . class . getSimpleName ( ) , TitanGraphIndex . class . getSimpleName ( ) ) ; org . apache . hadoop . conf . Configuration hadoopConf = new org . apache . hadoop . conf . Configuration ( ) ; ModifiableHadoopConfiguration titanmrConf = ModifiableHadoopConfiguration . of ( TitanHadoopConfiguration . MAPRED_NS , hadoopConf ) ; // The job we ' ll execute to either REINDEX or REMOVE _ INDEX final Class < ? extends IndexUpdateJob > indexJobClass ; final Class < ? extends Mapper > mapperClass ; // The class of the IndexUpdateJob and the Mapper that will be used to run it ( VertexScanJob vs ScanJob ) if ( updateAction . equals ( SchemaAction . REINDEX ) ) { indexJobClass = IndexRepairJob . class ; mapperClass = HadoopVertexScanMapper . class ; } else if ( updateAction . equals ( SchemaAction . REMOVE_INDEX ) ) { indexJobClass = IndexRemoveJob . class ; mapperClass = HadoopScanMapper . class ; } else { // Shouldn ' t get here - - if this exception is ever thrown , update SUPPORTED _ ACTIONS throw new IllegalStateException ( "Unrecognized " + SchemaAction . class . getSimpleName ( ) + ": " + updateAction ) ; } // The column family that serves as input to the IndexUpdateJob final String readCF ; if ( RelationTypeIndex . class . isAssignableFrom ( index . getClass ( ) ) ) { readCF = Backend . EDGESTORE_NAME ; } else { TitanGraphIndex gindex = ( TitanGraphIndex ) index ; if ( gindex . isMixedIndex ( ) && ! updateAction . equals ( SchemaAction . REINDEX ) ) throw new UnsupportedOperationException ( "External mixed indexes must be removed in the indexing system directly." ) ; Preconditions . checkState ( TitanGraphIndex . class . isAssignableFrom ( index . getClass ( ) ) ) ; if ( updateAction . equals ( SchemaAction . REMOVE_INDEX ) ) readCF = Backend . INDEXSTORE_NAME ; else readCF = Backend . EDGESTORE_NAME ; } titanmrConf . set ( TitanHadoopConfiguration . COLUMN_FAMILY_NAME , readCF ) ; // The MapReduce InputFormat class based on the open graph ' s store manager final Class < ? extends InputFormat > inputFormat ; final Class < ? extends KeyColumnValueStoreManager > storeManagerClass = graph . getBackend ( ) . getStoreManagerClass ( ) ; if ( CASSANDRA_STORE_MANAGER_CLASSES . contains ( storeManagerClass ) ) { inputFormat = CassandraBinaryInputFormat . class ; // Set the partitioner IPartitioner part = ( ( AbstractCassandraStoreManager ) graph . getBackend ( ) . getStoreManager ( ) ) . getCassandraPartitioner ( ) ; hadoopConf . set ( "cassandra.input.partitioner.class" , part . getClass ( ) . getName ( ) ) ; } else if ( HBASE_STORE_MANAGER_CLASSES . contains ( storeManagerClass ) ) { inputFormat = HBaseBinaryInputFormat . class ; } else { throw new IllegalArgumentException ( "Store manager class " + storeManagerClass + "is not supported" ) ; } // The index name and relation type name ( if the latter is applicable ) final String indexName = index . name ( ) ; final String relationTypeName = RelationTypeIndex . class . isAssignableFrom ( index . getClass ( ) ) ? ( ( RelationTypeIndex ) index ) . getType ( ) . name ( ) : "" ; Preconditions . checkNotNull ( indexName ) ; // Set the class of the IndexUpdateJob titanmrConf . set ( TitanHadoopConfiguration . SCAN_JOB_CLASS , indexJobClass . getName ( ) ) ; // Set the configuration of the IndexUpdateJob copyIndexJobKeys ( hadoopConf , indexName , relationTypeName ) ; titanmrConf . set ( TitanHadoopConfiguration . SCAN_JOB_CONFIG_ROOT , GraphDatabaseConfiguration . class . getName ( ) + "#JOB_NS" ) ; // Copy the StandardTitanGraph configuration under TitanHadoopConfiguration . GRAPH _ CONFIG _ KEYS org . apache . commons . configuration . Configuration localbc = graph . getConfiguration ( ) . getLocalConfiguration ( ) ; localbc . clearProperty ( Graph . GRAPH ) ; copyInputKeys ( hadoopConf , localbc ) ; String jobName = HadoopScanMapper . class . getSimpleName ( ) + "[" + indexJobClass . getSimpleName ( ) + "]" ; try { return new CompletedJobFuture ( HadoopScanRunner . runJob ( hadoopConf , inputFormat , jobName , mapperClass ) ) ; } catch ( Exception e ) { return new FailedJobFuture ( e ) ; }
public class ParameterSerializer { /** * Serialize array of object to a SFSArray * @ param unwrapper structure of java class * @ param array array of objects * @ return the SFSArray */ private ISFSArray parseObjectArray ( ClassUnwrapper unwrapper , Object [ ] array ) { } }
ISFSArray result = new SFSArray ( ) ; for ( Object obj : array ) { result . addSFSObject ( object2params ( unwrapper , obj ) ) ; } return result ;
public class FinalizePromotionOperation { /** * Calls rollback on all { @ link MigrationAwareService } . */ private void rollbackServices ( ) { } }
PartitionMigrationEvent event = getPartitionMigrationEvent ( ) ; for ( MigrationAwareService service : getMigrationAwareServices ( ) ) { try { service . rollbackMigration ( event ) ; } catch ( Throwable e ) { logger . warning ( "While promoting " + getPartitionMigrationEvent ( ) , e ) ; } }
public class TemporaryFiles { /** * Set the directory into which create temporary files . */ public void setTemporaryDirectory ( File dir ) throws IOException { } }
if ( dir != null ) { if ( ! dir . exists ( ) || ! dir . isDirectory ( ) ) if ( ! dir . mkdirs ( ) ) throw new IOException ( "Unable to create temporary directory: " + dir . getAbsolutePath ( ) ) ; } tempDir = dir ;
public class RestoreServerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RestoreServerRequest restoreServerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( restoreServerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( restoreServerRequest . getBackupId ( ) , BACKUPID_BINDING ) ; protocolMarshaller . marshall ( restoreServerRequest . getServerName ( ) , SERVERNAME_BINDING ) ; protocolMarshaller . marshall ( restoreServerRequest . getInstanceType ( ) , INSTANCETYPE_BINDING ) ; protocolMarshaller . marshall ( restoreServerRequest . getKeyPair ( ) , KEYPAIR_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Discovery { /** * Create a collection . * @ param createCollectionOptions the { @ link CreateCollectionOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Collection } */ public ServiceCall < Collection > createCollection ( CreateCollectionOptions createCollectionOptions ) { } }
Validator . notNull ( createCollectionOptions , "createCollectionOptions cannot be null" ) ; String [ ] pathSegments = { "v1/environments" , "collections" } ; String [ ] pathParameters = { createCollectionOptions . environmentId ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "discovery" , "v1" , "createCollection" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; final JsonObject contentJson = new JsonObject ( ) ; contentJson . addProperty ( "name" , createCollectionOptions . name ( ) ) ; if ( createCollectionOptions . description ( ) != null ) { contentJson . addProperty ( "description" , createCollectionOptions . description ( ) ) ; } if ( createCollectionOptions . configurationId ( ) != null ) { contentJson . addProperty ( "configuration_id" , createCollectionOptions . configurationId ( ) ) ; } if ( createCollectionOptions . language ( ) != null ) { contentJson . addProperty ( "language" , createCollectionOptions . language ( ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Collection . class ) ) ;
public class SpringApplication { /** * Apply any { @ link ApplicationContextInitializer } s to the context before it is * refreshed . * @ param context the configured ApplicationContext ( not refreshed yet ) * @ see ConfigurableApplicationContext # refresh ( ) */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) protected void applyInitializers ( ConfigurableApplicationContext context ) { for ( ApplicationContextInitializer initializer : getInitializers ( ) ) { Class < ? > requiredType = GenericTypeResolver . resolveTypeArgument ( initializer . getClass ( ) , ApplicationContextInitializer . class ) ; Assert . isInstanceOf ( requiredType , context , "Unable to call initializer." ) ; initializer . initialize ( context ) ; }
public class BeanProcessor { /** * { @ inheritDoc } */ @ Override public boolean process ( final Set < ? extends TypeElement > annotations , final RoundEnvironment roundEnv ) { } }
final Map < String , VariableElement > fields = new ConcurrentHashMap < > ( ) ; final Map < String , ExecutableElement > methods = new ConcurrentHashMap < > ( ) ; for ( final Element element : roundEnv . getElementsAnnotatedWith ( Bean . class ) ) { if ( element . getKind ( ) == ElementKind . CLASS ) { final Bean bean = element . getAnnotation ( Bean . class ) ; final Class beanDef = new Class ( ) ; final TypeElement classElement = ( TypeElement ) element ; final PackageElement packageElement = ( PackageElement ) classElement . getEnclosingElement ( ) ; this . processingEnv . getMessager ( ) . printMessage ( Diagnostic . Kind . NOTE , "annotated class: " + classElement . getQualifiedName ( ) , element ) ; // fqClassName = classElement . getQualifiedName ( ) . toString ( ) ; beanDef . name ( bean . value ( ) ) ; beanDef . _package ( org . jrebirth . af . tooling . codegen . bean . Package . create ( ) . qualifiedName ( packageElement . getQualifiedName ( ) . toString ( ) ) ) ; final Class td = new Class ( ) ; td . qualifiedName ( classElement . getQualifiedName ( ) . toString ( ) ) ; beanDef . setSuperType ( td ) ; for ( final Element child : classElement . getEnclosedElements ( ) ) { if ( child . getKind ( ) == ElementKind . METHOD ) { final ExecutableElement method = ( ExecutableElement ) child ; final Property propertyDef = new Property ( ) ; propertyDef . type ( Class . of ( method . getReturnType ( ) . toString ( ) ) ) ; propertyDef . name ( method . getSimpleName ( ) . toString ( ) ) ; beanDef . properties ( ) . add ( propertyDef ) ; } else if ( child . getKind ( ) . isField ( ) ) { final VariableElement field = ( VariableElement ) child ; final Property propertyDef = new Property ( ) ; propertyDef . type ( Class . of ( getClassName ( field ) ) ) ; propertyDef . name ( field . getSimpleName ( ) . toString ( ) ) ; beanDef . properties ( ) . add ( propertyDef ) ; } } // } else if ( e . getKind ( ) = = ElementKind . FIELD ) { // final VariableElement varElement = ( VariableElement ) e ; // processingEnv . getMessager ( ) . printMessage ( // Diagnostic . Kind . NOTE , // " annotated field : " + varElement . getSimpleName ( ) , e ) ; // fields . put ( varElement . getSimpleName ( ) . toString ( ) , varElement ) ; try { final String formattedSource = Generators . beanGenerator . generate ( beanDef , Roaster . create ( JavaClassSource . class ) ) ; final JavaFileObject jfo = this . processingEnv . getFiler ( ) . createSourceFile ( beanDef . qualifiedName ( ) ) ; this . processingEnv . getMessager ( ) . printMessage ( Diagnostic . Kind . NOTE , "creating source file: " + jfo . toUri ( ) ) ; final Writer writer = jfo . openWriter ( ) ; writer . write ( formattedSource ) ; writer . close ( ) ; } catch ( final Exception e ) { e . printStackTrace ( ) ; throw new RuntimeException ( e ) ; } } else if ( element . getKind ( ) == ElementKind . METHOD ) { final ExecutableElement exeElement = ( ExecutableElement ) element ; this . processingEnv . getMessager ( ) . printMessage ( Diagnostic . Kind . NOTE , "annotated method: " + exeElement . getSimpleName ( ) , element ) ; methods . put ( exeElement . getSimpleName ( ) . toString ( ) , exeElement ) ; } } return true ;
public class XMLSerializer { /** * Returns an escaped version of the input string . The string is guaranteed * to not contain illegal XML characters ( { @ code & < > } ) . * If no escaping is needed , the input string is returned as is . * @ param pValue the input string that might need escaping . * @ return an escaped version of the input string . */ static String maybeEscapeElementValue ( final String pValue ) { } }
int startEscape = needsEscapeElement ( pValue ) ; if ( startEscape < 0 ) { // If no escaping is needed , simply return original return pValue ; } else { // Otherwise , start replacing StringBuilder builder = new StringBuilder ( pValue . substring ( 0 , startEscape ) ) ; builder . ensureCapacity ( pValue . length ( ) + 30 ) ; int pos = startEscape ; for ( int i = pos ; i < pValue . length ( ) ; i ++ ) { switch ( pValue . charAt ( i ) ) { case '&' : pos = appendAndEscape ( pValue , pos , i , builder , "&amp;" ) ; break ; case '<' : pos = appendAndEscape ( pValue , pos , i , builder , "&lt;" ) ; break ; case '>' : pos = appendAndEscape ( pValue , pos , i , builder , "&gt;" ) ; break ; // case ' \ ' ' : // case ' " ' : default : break ; } } builder . append ( pValue . substring ( pos ) ) ; return builder . toString ( ) ; }
public class FileIoUtil { /** * Writes a properties Object to file . * Returns true on success , false otherwise . * @ param _ file * @ param _ props * @ return true on success , false otherwise */ public static boolean writeProperties ( File _file , Properties _props ) { } }
LOGGER . debug ( "Trying to write Properties to file: " + _file ) ; try ( FileOutputStream out = new FileOutputStream ( _file ) ) { _props . store ( out , _file . getName ( ) ) ; LOGGER . debug ( "Successfully wrote properties to file: " + _file ) ; } catch ( IOException _ex ) { LOGGER . warn ( "Could not save File: " + _file , _ex ) ; return false ; } return true ;
public class AcpService { /** * 功能 : 将批量文件内容使用DEFLATE压缩算法压缩 , Base64编码生成字符串并返回 < br > * 适用到的交易 : 批量代付 , 批量代收 , 批量退货 < br > * @ param filePath 批量文件 - 全路径文件名 < br > * @ return */ public static String enCodeFileContent ( String filePath , String encoding ) { } }
String baseFileContent = "" ; File file = new File ( filePath ) ; if ( ! file . exists ( ) ) { try { file . createNewFile ( ) ; } catch ( IOException e ) { LogUtil . writeErrorLog ( e . getMessage ( ) , e ) ; } } InputStream in = null ; try { in = new FileInputStream ( file ) ; int fl = in . available ( ) ; if ( null != in ) { byte [ ] s = new byte [ fl ] ; in . read ( s , 0 , fl ) ; // 压缩编码 . baseFileContent = new String ( SecureUtil . base64Encode ( SDKUtil . deflater ( s ) ) , encoding ) ; } } catch ( Exception e ) { LogUtil . writeErrorLog ( e . getMessage ( ) , e ) ; } finally { if ( null != in ) { try { in . close ( ) ; } catch ( IOException e ) { LogUtil . writeErrorLog ( e . getMessage ( ) , e ) ; } } } return baseFileContent ;
public class UIComponent { /** * On button release . * @ param button the button * @ return true , if successful */ public boolean onButtonRelease ( MouseButton button ) { } }
if ( ! isEnabled ( ) ) return false ; return parent != null ? parent . onButtonRelease ( button ) : false ;
public class ThriftCodecByteCodeGenerator { /** * Defines the code that calls the builder factory method . */ private void invokeFactoryMethod ( MethodDefinition read , Map < Short , LocalVariableDefinition > structData , LocalVariableDefinition instance ) { } }
if ( metadata . getBuilderMethod ( ) . isPresent ( ) ) { ThriftMethodInjection builderMethod = metadata . getBuilderMethod ( ) . get ( ) ; read . loadVariable ( instance ) ; // push parameters on stack for ( ThriftParameterInjection parameter : builderMethod . getParameters ( ) ) { read . loadVariable ( structData . get ( parameter . getId ( ) ) ) ; } // invoke the method read . invokeVirtual ( builderMethod . getMethod ( ) ) . storeVariable ( instance ) ; }
public class LockedInodePath { /** * Downgrades from the current locking scheme to the desired locking scheme . * @ param desiredLockPattern the pattern to downgrade to */ public void downgradeToPattern ( LockPattern desiredLockPattern ) { } }
switch ( desiredLockPattern ) { case READ : if ( mLockPattern == LockPattern . WRITE_INODE ) { Preconditions . checkState ( ! isImplicitlyLocked ( ) ) ; mLockList . downgradeLastInode ( ) ; } else if ( mLockPattern == LockPattern . WRITE_EDGE ) { downgradeEdgeToInode ( LockMode . READ ) ; } break ; case WRITE_INODE : if ( mLockPattern == LockPattern . WRITE_EDGE ) { downgradeEdgeToInode ( LockMode . WRITE ) ; } else { Preconditions . checkState ( mLockPattern == LockPattern . WRITE_INODE ) ; } break ; case WRITE_EDGE : Preconditions . checkState ( mLockPattern == LockPattern . WRITE_EDGE ) ; break ; // Nothing to do default : throw new IllegalStateException ( "Unknown lock pattern: " + desiredLockPattern ) ; } mLockPattern = desiredLockPattern ;
public class QuestOWL { /** * The caller is in charge of closing the connection after usage . * ( the reasoner is not responsible of connections ) */ @ Override public OntopOWLConnection getConnection ( ) throws ReasonerInternalException { } }
if ( ! questready ) { OWLReasonerRuntimeException owlReasonerRuntimeException = new ReasonerInternalException ( "Ontop was not initialized properly. This is generally indicates, " + "connection problems or error during ontology or mapping pre-processing. " + "\n\nOriginal error message:\n" + questException . getMessage ( ) ) ; owlReasonerRuntimeException . setStackTrace ( questException . getStackTrace ( ) ) ; throw owlReasonerRuntimeException ; } try { OntopConnection conn = queryEngine . getConnection ( ) ; return new DefaultOntopOWLConnection ( conn , inputQueryFactory ) ; } catch ( OntopConnectionException e ) { // TODO : find a better exception ? throw new ReasonerInternalException ( e ) ; }
public class FessMessages { /** * Add the created action message for the key ' success . delete _ file ' with parameters . * < pre > * message : Deleted { 0 } file . * < / pre > * @ param property The property name for the message . ( NotNull ) * @ param arg0 The parameter arg0 for message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addSuccessDeleteFile ( String property , String arg0 ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_delete_file , arg0 ) ) ; return this ;
public class StatsThread { /** * Starst all progress loggers . * @ param previousCrawlDuration the duration of the previous crawl , or zero for a new crawl . */ public void start ( long previousCrawlDuration ) { } }
requestLogger . start ( previousCrawlDuration ) ; resourceLogger . start ( previousCrawlDuration ) ; transferredBytesLogger . start ( previousCrawlDuration ) ; receivedURLsLogger . start ( previousCrawlDuration ) ;
public class JSONObject { /** * Convert this object into a String of JSON text , specifying verbosity . * @ param verbose Whether or not to serialize in compressed for formatted Strings . * @ throws IOException Thrown on IO errors during serialization . */ public String serialize ( boolean verbose ) throws IOException { } }
Serializer serializer ; StringWriter writer = new StringWriter ( ) ; if ( verbose ) { serializer = new SerializerVerbose ( writer ) ; } else { serializer = new Serializer ( writer ) ; } serializer . writeObject ( this ) . flush ( ) ; return writer . toString ( ) ;
public class TraceFactory { /** * Specify what to trace and start tracing it . * @ param activeNames of classes to be activated separated by " : " * " * " represents any name . * @ param traceLevel to be applied . * @ throws java . io . IOException */ public void setActiveTrace ( String activeNames , int traceLevel ) throws java . io . IOException { } }
TraceFactory . activeNames = activeNames ; TraceFactory . traceLevel = traceLevel ; ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcFooting ( ) { } }
if ( ifcFootingEClass == null ) { ifcFootingEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 257 ) ; } return ifcFootingEClass ;
public class MultiDbJDBCConnection { /** * { @ inheritDoc } */ @ Override protected ResultSet findLastOrderNumberByParentIdentifier ( String parentIdentifier ) throws SQLException { } }
if ( findLastOrderNumberByParentId == null ) { findLastOrderNumberByParentId = dbConnection . prepareStatement ( FIND_LAST_ORDER_NUMBER_BY_PARENTID ) ; } else { findLastOrderNumberByParentId . clearParameters ( ) ; } findLastOrderNumberByParentId . setString ( 1 , parentIdentifier ) ; return findLastOrderNumberByParentId . executeQuery ( ) ;
public class StringUtils { /** * < p > Uncapitalizes a String , changing the first character to lower case as * per { @ link Character # toLowerCase ( int ) } . No other characters are changed . < / p > * < p > For a word based algorithm , see { @ link org . apache . commons . lang3 . text . WordUtils # uncapitalize ( String ) } . * A { @ code null } input String returns { @ code null } . < / p > * < pre > * StringUtils . uncapitalize ( null ) = null * StringUtils . uncapitalize ( " " ) = " " * StringUtils . uncapitalize ( " cat " ) = " cat " * StringUtils . uncapitalize ( " Cat " ) = " cat " * StringUtils . uncapitalize ( " CAT " ) = " cAT " * < / pre > * @ param str the String to uncapitalize , may be null * @ return the uncapitalized String , { @ code null } if null String input * @ see org . apache . commons . lang3 . text . WordUtils # uncapitalize ( String ) * @ see # capitalize ( String ) * @ since 2.0 */ public static String uncapitalize ( final String str ) { } }
int strLen ; if ( str == null || ( strLen = str . length ( ) ) == 0 ) { return str ; } final int firstCodepoint = str . codePointAt ( 0 ) ; final int newCodePoint = Character . toLowerCase ( ( char ) firstCodepoint ) ; if ( firstCodepoint == newCodePoint ) { // already capitalized return str ; } final int newCodePoints [ ] = new int [ strLen ] ; // cannot be longer than the char array int outOffset = 0 ; newCodePoints [ outOffset ++ ] = newCodePoint ; // copy the first codepoint for ( int inOffset = Character . charCount ( firstCodepoint ) ; inOffset < strLen ; ) { final int codepoint = str . codePointAt ( inOffset ) ; newCodePoints [ outOffset ++ ] = codepoint ; // copy the remaining ones inOffset += Character . charCount ( codepoint ) ; } return new String ( newCodePoints , 0 , outOffset ) ;
public class IndexUpdateTransactionEventHandler { /** * in async mode add the index action to a collection for consumption in { @ link # afterCommit ( TransactionData , Collection ) } , in sync mode , run it directly */ private Void indexUpdate ( Collection < Consumer < Void > > state , Consumer < Void > indexAction ) { } }
if ( async ) { state . add ( indexAction ) ; } else { indexAction . accept ( null ) ; } return null ;
public class InstanceDetailsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InstanceDetails instanceDetails , ProtocolMarshaller protocolMarshaller ) { } }
if ( instanceDetails == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceDetails . getAvailabilityZone ( ) , AVAILABILITYZONE_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getIamInstanceProfile ( ) , IAMINSTANCEPROFILE_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getImageDescription ( ) , IMAGEDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getImageId ( ) , IMAGEID_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getInstanceState ( ) , INSTANCESTATE_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getInstanceType ( ) , INSTANCETYPE_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getLaunchTime ( ) , LAUNCHTIME_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getNetworkInterfaces ( ) , NETWORKINTERFACES_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getPlatform ( ) , PLATFORM_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getProductCodes ( ) , PRODUCTCODES_BINDING ) ; protocolMarshaller . marshall ( instanceDetails . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SxRestClient { /** * Executes REST request and return results for a given IPs . * Normally there is only 1 result in the list per IP . * @ param ip IP to get geo info for . Multiple IPs can be used with comma as separator . * @ return list of SxGeoResult results * @ throws IllegalArgumentException if IP address is invalid . */ @ NotNull public List < SxGeoResult > getList ( @ NotNull String ip ) { } }
if ( ! IPV4_COMMA_SEPARATED_PATTERN . matcher ( ip ) . matches ( ) ) { throw new IllegalArgumentException ( "Illegal IP address or list: " + ip ) ; } clientQueriesCount ++ ; List < SxGeoResult > cachedResult = cache == null ? null : cache . getList ( ip ) ; if ( cachedResult != null ) { return cachedResult ; } try { NodeList ipNodes = query ( ip ) ; ArrayList < SxGeoResult > result = new ArrayList < > ( ) ; for ( int i = 0 ; i < ipNodes . getLength ( ) ; i ++ ) { result . add ( parseIp ( ( Element ) ipNodes . item ( i ) ) ) ; } if ( cache != null ) { cache . add ( ip , result ) ; } return result ; } catch ( ParserConfigurationException | SAXException | IOException e ) { throw new RuntimeException ( e ) ; }
public class LazyInitializer { /** * Returns the object wrapped by this instance . On first access the object * is created . After that it is cached and can be accessed pretty fast . * @ return the object initialized by this { @ code LazyInitializer } * @ throws ConcurrentException if an error occurred during initialization of * the object */ @ Override public T get ( ) throws ConcurrentException { } }
// use a temporary variable to reduce the number of reads of the // volatile field T result = object ; if ( result == NO_INIT ) { synchronized ( this ) { result = object ; if ( result == NO_INIT ) { object = result = initialize ( ) ; } } } return result ;
public class ConditionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Condition condition , ProtocolMarshaller protocolMarshaller ) { } }
if ( condition == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( condition . getEq ( ) , EQ_BINDING ) ; protocolMarshaller . marshall ( condition . getGt ( ) , GT_BINDING ) ; protocolMarshaller . marshall ( condition . getGte ( ) , GTE_BINDING ) ; protocolMarshaller . marshall ( condition . getLt ( ) , LT_BINDING ) ; protocolMarshaller . marshall ( condition . getLte ( ) , LTE_BINDING ) ; protocolMarshaller . marshall ( condition . getNeq ( ) , NEQ_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Singles { /** * Wait until all the provided Future ' s to complete * @ see CompletableFuture # allOf ( CompletableFuture . . . ) * @ param fts Singles to wait on * @ return Single that completes when all the provided Futures Complete . Empty Future result , or holds an Exception * from a provided Future that failed . */ public static < T > Single < T > allOf ( Single < T > ... fts ) { } }
return Single . fromPublisher ( Future . allOf ( futures ( fts ) ) ) ;
public class TwitterObjectFactory { /** * Constructs a User object from rawJSON string . * @ param rawJSON raw JSON form as String * @ return User * @ throws TwitterException when provided string is not a valid JSON string . * @ since Twitter4J 2.1.7 */ public static User createUser ( String rawJSON ) throws TwitterException { } }
try { return new UserJSONImpl ( new JSONObject ( rawJSON ) ) ; } catch ( JSONException e ) { throw new TwitterException ( e ) ; }
public class EJSDeployedSupport { /** * d194342.1.1 - added entire method . */ @ Override public void enlistInvocationCallback ( InvocationCallback callback , Object cookie ) { } }
if ( ivEJBMethodCallback == null ) { ivEJBMethodCallback = new ArrayList < InvocationCallback > ( ) ; ivEJBMethodCallbackCookie = new ArrayList < Object > ( ) ; } ivEJBMethodCallback . add ( callback ) ; ivEJBMethodCallbackCookie . add ( cookie ) ;
public class HeapCache { /** * Remove the entry from the hash table . The entry is already removed from the replacement list . * Stop the timer , if needed . The remove races with a clear . The clear * is not updating each entry state to e . isGone ( ) but just drops the whole hash table instead . * < p > With completion of the method the entry content is no more visible . " Nulling " out the key * or value of the entry is incorrect , since there can be another thread which is just about to * return the entry contents . */ public void removeEntryForEviction ( Entry < K , V > e ) { } }
boolean f = hash . remove ( e ) ; checkForHashCodeChange ( e ) ; timing . cancelExpiryTimer ( e ) ; e . setGone ( ) ;
public class DiffieHellmanGroup1Sha1 { /** * Calculates the exchange hash as an SHA1 hash of the following data . * < blockquote > * < pre > * String the client ' s version string ( CR and NL excluded ) * String the server ' s version string ( CR and NL excluded ) * String the payload of the client ' s SSH _ MSG _ KEXINIT * String the payload of the server ' s SSH _ MSG _ KEXINIT * String the host key * BigInteger e , exchange value sent by the client * BigInteger f , exchange value sent by the server * BigInteger K , the shared secret * < / pre > * < / blockquote > * @ throws IOException */ protected void calculateExchangeHash ( ) throws SshException { } }
Digest hash = ( Digest ) ComponentManager . getInstance ( ) . supportedDigests ( ) . getInstance ( "SHA-1" ) ; // The local software version comments hash . putString ( clientId ) ; // The remote software version comments hash . putString ( serverId ) ; // The local kex init payload hash . putInt ( clientKexInit . length ) ; hash . putBytes ( clientKexInit ) ; // The remote kex init payload hash . putInt ( serverKexInit . length ) ; hash . putBytes ( serverKexInit ) ; // The host key hash . putInt ( hostKey . length ) ; hash . putBytes ( hostKey ) ; // The diffie hellman e value hash . putBigInteger ( e ) ; // The diffie hellman f value hash . putBigInteger ( f ) ; // The diffie hellman k value hash . putBigInteger ( secret ) ; // Do the final output exchangeHash = hash . doFinal ( ) ;
public class HttpContinue { /** * Sends a continue response using blocking IO * @ param exchange The exchange */ public static void sendContinueResponseBlocking ( final HttpServerExchange exchange ) throws IOException { } }
if ( ! exchange . isResponseChannelAvailable ( ) ) { throw UndertowMessages . MESSAGES . cannotSendContinueResponse ( ) ; } if ( exchange . getAttachment ( ALREADY_SENT ) != null ) { return ; } HttpServerExchange newExchange = exchange . getConnection ( ) . sendOutOfBandResponse ( exchange ) ; exchange . putAttachment ( ALREADY_SENT , true ) ; newExchange . setStatusCode ( StatusCodes . CONTINUE ) ; newExchange . getResponseHeaders ( ) . put ( Headers . CONTENT_LENGTH , 0 ) ; newExchange . startBlocking ( ) ; newExchange . getOutputStream ( ) . close ( ) ; newExchange . getInputStream ( ) . close ( ) ;
public class BitmapUtil { /** * Decodes the bounds of an image and returns its width and height or null if the size can ' t be * determined * @ param is the InputStream containing the image data * @ return dimensions of the image */ public static @ Nullable Pair < Integer , Integer > decodeDimensions ( InputStream is ) { } }
Preconditions . checkNotNull ( is ) ; ByteBuffer byteBuffer = DECODE_BUFFERS . acquire ( ) ; if ( byteBuffer == null ) { byteBuffer = ByteBuffer . allocate ( DECODE_BUFFER_SIZE ) ; } BitmapFactory . Options options = new BitmapFactory . Options ( ) ; options . inJustDecodeBounds = true ; try { options . inTempStorage = byteBuffer . array ( ) ; BitmapFactory . decodeStream ( is , null , options ) ; return ( options . outWidth == - 1 || options . outHeight == - 1 ) ? null : new Pair < > ( options . outWidth , options . outHeight ) ; } finally { DECODE_BUFFERS . release ( byteBuffer ) ; }
public class ConnectController { /** * Adds a ConnectInterceptor to receive callbacks during the connection process . * Useful for programmatic configuration . * @ param interceptor the connect interceptor to add */ public void addInterceptor ( ConnectInterceptor < ? > interceptor ) { } }
Class < ? > serviceApiType = GenericTypeResolver . resolveTypeArgument ( interceptor . getClass ( ) , ConnectInterceptor . class ) ; connectInterceptors . add ( serviceApiType , interceptor ) ;
public class Comparables { /** * Checks the provided { @ link Comparable } instances for equality . * Special numeric comparison logic is used for { @ link Double } , { @ link Long } , * { @ link Float } , { @ link Integer } , { @ link Short } and { @ link Byte } . See * { @ link Numbers # equal ( Number , Number ) } for more details . * @ param lhs the left - hand side { @ link Comparable } . Can ' t be { @ code null } . * @ param rhs the right - hand side { @ link Comparable } . May be { @ code null } . * @ return { @ code true } if the provided comparables are equal , { @ code false } * otherwise . */ public static boolean equal ( Comparable lhs , Comparable rhs ) { } }
assert lhs != null ; if ( rhs == null ) { return false ; } if ( lhs . getClass ( ) == rhs . getClass ( ) ) { return lhs . equals ( rhs ) ; } if ( lhs instanceof Number && rhs instanceof Number ) { return Numbers . equal ( ( Number ) lhs , ( Number ) rhs ) ; } return lhs . equals ( rhs ) ;
public class Choice3 { /** * Static factory method for wrapping a value of type < code > A < / code > in a { @ link Choice3 } . * @ param b the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ return the wrapped value as a { @ link Choice3 } & lt ; A , B , C & gt ; */ public static < A , B , C > Choice3 < A , B , C > b ( B b ) { } }
return new _B < > ( b ) ;
public class ZonedDateTime { /** * Obtains an instance of { @ code ZonedDateTime } from a temporal object . * A { @ code TemporalAccessor } represents some form of date and time information . * This factory converts the arbitrary temporal object to an instance of { @ code ZonedDateTime } . * The conversion will first obtain a { @ code ZoneId } . It will then try to obtain an instant . * If that fails it will try to obtain a local date - time . * The zoned date time will either be a combination of { @ code ZoneId } and instant , * or { @ code ZoneId } and local date - time . * This method matches the signature of the functional interface { @ link TemporalQuery } * allowing it to be used in queries via method reference , { @ code ZonedDateTime : : from } . * @ param temporal the temporal object to convert , not null * @ return the zoned date - time , not null * @ throws DateTimeException if unable to convert to an { @ code ZonedDateTime } */ public static ZonedDateTime from ( TemporalAccessor temporal ) { } }
if ( temporal instanceof ZonedDateTime ) { return ( ZonedDateTime ) temporal ; } try { ZoneId zone = ZoneId . from ( temporal ) ; if ( temporal . isSupported ( INSTANT_SECONDS ) ) { try { long epochSecond = temporal . getLong ( INSTANT_SECONDS ) ; int nanoOfSecond = temporal . get ( NANO_OF_SECOND ) ; return create ( epochSecond , nanoOfSecond , zone ) ; } catch ( DateTimeException ex ) { // ignore } } LocalDateTime ldt = LocalDateTime . from ( temporal ) ; return of ( ldt , zone ) ; } catch ( DateTimeException ex ) { throw new DateTimeException ( "Unable to obtain ZonedDateTime from TemporalAccessor: " + temporal + ", type " + temporal . getClass ( ) . getName ( ) ) ; }
public class Card { /** * Retrieves an Issuing < code > Card < / code > object . */ public static Card retrieve ( String card ) throws StripeException { } }
return retrieve ( card , ( Map < String , Object > ) null , ( RequestOptions ) null ) ;
public class VisualizrReporter { /** * Report a histogram using fields max / mean / min / stddev , p50 / p75 / p95 / p98 / p99 / p999 / count * @ param name * @ param histogram */ private void reportHistogram ( String name , Histogram histogram ) { } }
final Snapshot snapshot = histogram . getSnapshot ( ) ; String prefixedName = prefix ( name ) ; if ( ! snapshots . hasDescriptor ( prefixedName ) ) { MetricItem . Builder builder = MetricItem . Builder . create ( ) ; builder . count ( "max" ) ; builder . count ( "mean" ) ; builder . count ( "min" ) ; builder . count ( "stddev" ) ; builder . count ( "p50" ) ; builder . count ( "p75" ) ; builder . count ( "p95" ) ; builder . count ( "p98" ) ; builder . count ( "p99" ) ; builder . count ( "p95" ) ; builder . count ( "p999" ) ; builder . count ( "count" ) ; snapshots . setDescriptor ( prefixedName , builder . build ( ) ) ; } Map < String , Number > values = new HashMap < > ( ) ; values . put ( "max" , ( snapshot . getMax ( ) ) ) ; values . put ( "mean" , ( snapshot . getMean ( ) ) ) ; values . put ( "min" , ( snapshot . getMin ( ) ) ) ; values . put ( "stddev" , ( snapshot . getStdDev ( ) ) ) ; values . put ( "p50" , ( snapshot . getMedian ( ) ) ) ; values . put ( "p75" , ( snapshot . get75thPercentile ( ) ) ) ; values . put ( "p95" , ( snapshot . get95thPercentile ( ) ) ) ; values . put ( "p98" , ( snapshot . get98thPercentile ( ) ) ) ; values . put ( "p99" , ( snapshot . get99thPercentile ( ) ) ) ; values . put ( "p999" , ( snapshot . get999thPercentile ( ) ) ) ; values . put ( "count" , histogram . getCount ( ) ) ; snapshots . addSnapshot ( prefixedName , getTimestamp ( ) , values ) ;
public class CSSErrorStrategy { /** * Consumes token until lexer state is function - balanced and * token from follow is matched . Matched token is also consumed */ protected void consumeUntilGreedy ( Parser recognizer , IntervalSet set , CSSLexerState . RecoveryMode mode ) { } }
CSSToken t ; do { Token next = recognizer . getInputStream ( ) . LT ( 1 ) ; if ( next instanceof CSSToken ) { t = ( CSSToken ) recognizer . getInputStream ( ) . LT ( 1 ) ; if ( t . getType ( ) == Token . EOF ) { logger . trace ( "token eof " ) ; break ; } } else break ; /* not a CSSToken , probably EOF */ logger . trace ( "Skipped greedy: {}" , t . getText ( ) ) ; // consume token even if it will match recognizer . consume ( ) ; } while ( ! ( t . getLexerState ( ) . isBalanced ( mode , null , t ) && set . contains ( t . getType ( ) ) ) ) ;
public class JaxWsHttpServletRequestAdapter { /** * ( non - Javadoc ) * @ see javax . servlet . ServletRequest # getLocalName ( ) */ @ Override public String getLocalName ( ) { } }
try { collaborator . preInvoke ( componentMetaData ) ; return request . getLocalName ( ) ; } finally { collaborator . postInvoke ( ) ; }
public class CmsGitToolOptionsPanel { /** * Sets an additional info value if it ' s not empty . < p > * @ param user the user on which to set the additional info * @ param key the additional info key * @ param value the additional info value */ private void setUserInfo ( CmsUser user , String key , String value ) { } }
if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( value ) ) { user . getAdditionalInfo ( ) . put ( key , value ) ; }
public class Vectors { /** * Creates a product function accumulator , that calculates the product of * all elements in the vector after applying given { @ code function } to * each of them . * @ param neutral the neutral value * @ param function the vector function * @ return a product function accumulator */ public static VectorAccumulator asProductFunctionAccumulator ( final double neutral , final VectorFunction function ) { } }
return new VectorAccumulator ( ) { private final VectorAccumulator productAccumulator = Vectors . asProductAccumulator ( neutral ) ; @ Override public void update ( int i , double value ) { productAccumulator . update ( i , function . evaluate ( i , value ) ) ; } @ Override public double accumulate ( ) { return productAccumulator . accumulate ( ) ; } } ;