signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ClassificationService { /** * Returns the total effort points in all of the { @ link ClassificationModel } s associated with the provided { @ link FileModel } . */ public int getMigrationEffortPoints ( FileModel fileModel ) { } }
GraphTraversal < Vertex , Vertex > classificationPipeline = new GraphTraversalSource ( getGraphContext ( ) . getGraph ( ) ) . V ( fileModel . getElement ( ) ) ; classificationPipeline . in ( ClassificationModel . FILE_MODEL ) ; classificationPipeline . has ( EffortReportModel . EFFORT , P . gt ( 0 ) ) ; classificationPipeline . has ( WindupVertexFrame . TYPE_PROP , Text . textContains ( ClassificationModel . TYPE ) ) ; int classificationEffort = 0 ; for ( Vertex v : classificationPipeline . toList ( ) ) { Property < Integer > migrationEffort = v . property ( ClassificationModel . EFFORT ) ; if ( migrationEffort . isPresent ( ) ) { classificationEffort += migrationEffort . value ( ) ; } } return classificationEffort ;
public class HeaderFooterRecyclerAdapter { /** * Notifies that a content item is inserted . * @ param position the position of the content item . */ public final void notifyContentItemInserted ( int position ) { } }
int newHeaderItemCount = getHeaderItemCount ( ) ; int newContentItemCount = getContentItemCount ( ) ; if ( position < 0 || position >= newContentItemCount ) { throw new IndexOutOfBoundsException ( "The given position " + position + " is not within the position bounds for content items [0 - " + ( newContentItemCount - 1 ) + "]." ) ; } notifyItemInserted ( position + newHeaderItemCount ) ;
public class GCSHelper { /** * Retrieve part of the file . * @ throws IOException */ public ByteArrayOutputStream getPartialObjectData ( String bucket , String fname , long start , long endIncl ) throws IOException { } }
return getPartialObjectData ( bucket , fname , start , endIncl , null ) ;
public class Materialize { /** * set the insetsFrameLayout to display the content in fullscreen * under the statusBar and navigationBar * @ param fullscreen */ public void setFullscreen ( boolean fullscreen ) { } }
if ( mBuilder . mScrimInsetsLayout != null ) { mBuilder . mScrimInsetsLayout . setTintStatusBar ( ! fullscreen ) ; mBuilder . mScrimInsetsLayout . setTintNavigationBar ( ! fullscreen ) ; }
public class WebApplicationHandler { protected void dispatch ( String pathInContext , HttpServletRequest request , HttpServletResponse response , ServletHolder servletHolder , int type ) throws ServletException , UnavailableException , IOException { } }
if ( type == Dispatcher . __REQUEST ) { // This is NOT a dispatched request ( it it is an initial request ) ServletHttpRequest servletHttpRequest = ( ServletHttpRequest ) request ; ServletHttpResponse servletHttpResponse = ( ServletHttpResponse ) response ; // protect web - inf and meta - inf if ( StringUtil . startsWithIgnoreCase ( pathInContext , "/web-inf" ) || StringUtil . startsWithIgnoreCase ( pathInContext , "/meta-inf" ) ) { response . sendError ( HttpResponse . __404_Not_Found ) ; return ; } // Security Check if ( ! getHttpContext ( ) . checkSecurityConstraints ( pathInContext , servletHttpRequest . getHttpRequest ( ) , servletHttpResponse . getHttpResponse ( ) ) ) return ; } else { // This is a dispatched request . // Handle dispatch to j _ security _ check HttpContext context = getHttpContext ( ) ; if ( context != null && context instanceof ServletHttpContext && pathInContext != null && pathInContext . endsWith ( FormAuthenticator . __J_SECURITY_CHECK ) ) { ServletHttpRequest servletHttpRequest = ( ServletHttpRequest ) context . getHttpConnection ( ) . getRequest ( ) . getWrapper ( ) ; ServletHttpResponse servletHttpResponse = servletHttpRequest . getServletHttpResponse ( ) ; ServletHttpContext servletContext = ( ServletHttpContext ) context ; if ( ! servletContext . jSecurityCheck ( pathInContext , servletHttpRequest . getHttpRequest ( ) , servletHttpResponse . getHttpResponse ( ) ) ) return ; } } // Build and / or cache filter chain FilterChain chain = null ; if ( pathInContext != null ) { chain = getChainForPath ( type , pathInContext , servletHolder ) ; } else { chain = getChainForName ( type , servletHolder ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "chain=" + chain ) ; // Do the handling thang if ( chain != null ) chain . doFilter ( request , response ) ; else if ( servletHolder != null ) servletHolder . handle ( request , response ) ; else // Not found notFound ( request , response ) ;
public class JobSchedulesImpl { /** * Updates the properties of the specified job schedule . * This replaces only the job schedule properties specified in the request . For example , if the schedule property is not specified with this request , then the Batch service will keep the existing schedule . Changes to a job schedule only impact jobs created by the schedule after the update has taken place ; currently running jobs are unaffected . * @ param jobScheduleId The ID of the job schedule to update . * @ param jobSchedulePatchParameter The parameters for the request . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void patch ( String jobScheduleId , JobSchedulePatchParameter jobSchedulePatchParameter ) { } }
patchWithServiceResponseAsync ( jobScheduleId , jobSchedulePatchParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AbstractHttpTransport { /** * This method checks the request for the has conditions which may either be contained in URL * query arguments or in a cookie sent from the client . * @ param request * the request object * @ return The has conditions from the request . * @ throws IOException * @ throws UnsupportedEncodingException */ protected String getHasConditionsFromRequest ( HttpServletRequest request ) throws IOException { } }
final String sourceMethod = "getHasConditionsFromRequest" ; // $ NON - NLS - 1 $ boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( sourceClass , sourceMethod , new Object [ ] { request } ) ; } String ret = null ; if ( request . getParameter ( FEATUREMAPHASH_REQPARAM ) != null ) { // The cookie called ' has ' contains the has conditions if ( isTraceLogging ) { log . finer ( "has hash = " + request . getParameter ( FEATUREMAPHASH_REQPARAM ) ) ; // $ NON - NLS - 1 $ } Cookie [ ] cookies = request . getCookies ( ) ; if ( cookies != null ) { for ( int i = 0 ; ret == null && i < cookies . length ; i ++ ) { Cookie cookie = cookies [ i ] ; if ( cookie . getName ( ) . equals ( FEATUREMAP_REQPARAM ) && cookie . getValue ( ) != null ) { if ( isTraceLogging ) { log . finer ( "has cookie = " + cookie . getValue ( ) ) ; // $ NON - NLS - 1 $ } ret = URLDecoder . decode ( cookie . getValue ( ) , "US-ASCII" ) ; // $ NON - NLS - 1 $ break ; } } } if ( ret == null ) { if ( log . isLoggable ( Level . WARNING ) ) { StringBuffer url = request . getRequestURL ( ) ; if ( url != null ) { // might be null if using mock request for unit testing url . append ( "?" ) . append ( request . getQueryString ( ) ) . toString ( ) ; // $ NON - NLS - 1 $ log . warning ( MessageFormat . format ( Messages . AbstractHttpTransport_0 , new Object [ ] { url , request . getHeader ( "User-Agent" ) } ) ) ; // $ NON - NLS - 1 $ } } } } else { ret = request . getParameter ( FEATUREMAP_REQPARAM ) ; if ( isTraceLogging ) { log . finer ( "reading features from has query arg" ) ; // $ NON - NLS - 1 $ } } if ( isTraceLogging ) { log . exiting ( sourceClass , sourceMethod , ret ) ; } return ret ;
public class ArrayTagSet { /** * Add a new tag to the set . */ ArrayTagSet add ( String k , String v ) { } }
return add ( new BasicTag ( k , v ) ) ;
public class MockEC2QueryHandler { /** * Handles " describeSecurityGroups " request and returns response with a security group . * @ return a DescribeInternetGatewaysResponseType with our predefined internet gateway in aws - mock . properties ( or if * not overridden , as defined in aws - mock - default . properties ) */ private DescribeSecurityGroupsResponseType describeSecurityGroups ( ) { } }
DescribeSecurityGroupsResponseType ret = new DescribeSecurityGroupsResponseType ( ) ; ret . setRequestId ( UUID . randomUUID ( ) . toString ( ) ) ; SecurityGroupSetType securityGroupSet = new SecurityGroupSetType ( ) ; for ( Iterator < MockSecurityGroup > mockSecurityGroup = mockSecurityGroupController . describeSecurityGroups ( ) . iterator ( ) ; mockSecurityGroup . hasNext ( ) ; ) { MockSecurityGroup item = mockSecurityGroup . next ( ) ; // initialize securityGroupItem SecurityGroupItemType securityGroupItem = new SecurityGroupItemType ( ) ; securityGroupItem . setOwnerId ( MOCK_SECURITY_OWNER_ID ) ; securityGroupItem . setGroupName ( item . getGroupName ( ) ) ; if ( ! DEFAULT_MOCK_PLACEMENT . getAvailabilityZone ( ) . equals ( currentRegion ) ) { securityGroupItem . setGroupId ( currentRegion + "_" + item . getGroupId ( ) ) ; securityGroupItem . setVpcId ( currentRegion + "_" + item . getVpcId ( ) ) ; } else { securityGroupItem . setGroupId ( item . getGroupId ( ) ) ; securityGroupItem . setVpcId ( item . getVpcId ( ) ) ; } securityGroupItem . setGroupDescription ( item . getGroupDescription ( ) ) ; IpPermissionSetType ipPermissionSet = new IpPermissionSetType ( ) ; for ( MockIpPermissionType mockIpPermissionType : item . getIpPermissions ( ) ) { // initialize ipPermission IpPermissionType ipPermission = new IpPermissionType ( ) ; ipPermission . setFromPort ( mockIpPermissionType . getFromPort ( ) ) ; ipPermission . setToPort ( mockIpPermissionType . getToPort ( ) ) ; ipPermission . setIpProtocol ( mockIpPermissionType . getIpProtocol ( ) ) ; ipPermissionSet . getItem ( ) . add ( ipPermission ) ; } IpPermissionSetType ipPermissionEgressSet = new IpPermissionSetType ( ) ; for ( MockIpPermissionType mockIpPermissionType : item . getIpPermissionsEgress ( ) ) { // initialize ipPermission IpPermissionType ipPermission = new IpPermissionType ( ) ; ipPermission . setFromPort ( mockIpPermissionType . getFromPort ( ) ) ; ipPermission . setToPort ( mockIpPermissionType . getToPort ( ) ) ; ipPermission . setIpProtocol ( mockIpPermissionType . getIpProtocol ( ) ) ; ipPermissionEgressSet . getItem ( ) . add ( ipPermission ) ; } securityGroupItem . setIpPermissionsEgress ( ipPermissionEgressSet ) ; securityGroupSet . getItem ( ) . add ( securityGroupItem ) ; } ret . setSecurityGroupInfo ( securityGroupSet ) ; return ret ;
public class AbstractAttribute { /** * ( non - Javadoc ) * @ see javax . persistence . metamodel . Attribute # isAssociation ( ) */ public boolean isAssociation ( ) { } }
return persistenceAttribType . equals ( PersistentAttributeType . MANY_TO_MANY ) || persistenceAttribType . equals ( PersistentAttributeType . MANY_TO_ONE ) || persistenceAttribType . equals ( PersistentAttributeType . ONE_TO_MANY ) || persistenceAttribType . equals ( PersistentAttributeType . ONE_TO_ONE ) ;
public class DirectQuickSelectSketchR { /** * Sketch */ @ Override public int getCurrentBytes ( final boolean compact ) { } }
if ( ! compact ) { final byte lgArrLongs = mem_ . getByte ( LG_ARR_LONGS_BYTE ) ; final int preambleLongs = mem_ . getByte ( PREAMBLE_LONGS_BYTE ) & 0X3F ; final int lengthBytes = ( preambleLongs + ( 1 << lgArrLongs ) ) << 3 ; return lengthBytes ; } final int preLongs = getCurrentPreambleLongs ( true ) ; final int curCount = getRetainedEntries ( true ) ; return ( preLongs + curCount ) << 3 ;
public class SpoiledChildInterfaceImplementor { /** * builds a set of all non constructor or static initializer method / signatures * @ param cls * the class to build the method set from * @ return a set of method names / signatures */ private static Set < QMethod > buildMethodSet ( JavaClass cls ) { } }
Set < QMethod > methods = new HashSet < > ( ) ; boolean isInterface = cls . isInterface ( ) ; for ( Method m : cls . getMethods ( ) ) { boolean isDefaultInterfaceMethod = isInterface && ! m . isAbstract ( ) ; boolean isSyntheticForParentCall ; if ( m . isSynthetic ( ) ) { BitSet bytecodeSet = ClassContext . getBytecodeSet ( cls , m ) ; isSyntheticForParentCall = ( bytecodeSet != null ) && bytecodeSet . get ( Const . INVOKESPECIAL ) ; } else { isSyntheticForParentCall = false ; } if ( ! isSyntheticForParentCall && ! isDefaultInterfaceMethod ) { String methodName = m . getName ( ) ; QMethod methodInfo = new QMethod ( methodName , m . getSignature ( ) ) ; if ( ! OBJECT_METHODS . contains ( methodInfo ) ) { if ( ! Values . CONSTRUCTOR . equals ( methodName ) && ! Values . STATIC_INITIALIZER . equals ( methodName ) ) { methods . add ( methodInfo ) ; } } } } return methods ;
public class AnimatorSet { /** * This method creates a < code > Builder < / code > object , which is used to * set up playing constraints . This initial < code > play ( ) < / code > method * tells the < code > Builder < / code > the animation that is the dependency for * the succeeding commands to the < code > Builder < / code > . For example , * calling < code > play ( a1 ) . with ( a2 ) < / code > sets up the AnimatorSet to play * < code > a1 < / code > and < code > a2 < / code > at the same time , * < code > play ( a1 ) . before ( a2 ) < / code > sets up the AnimatorSet to play * < code > a1 < / code > first , followed by < code > a2 < / code > , and * < code > play ( a1 ) . after ( a2 ) < / code > sets up the AnimatorSet to play * < code > a2 < / code > first , followed by < code > a1 < / code > . * < p > Note that < code > play ( ) < / code > is the only way to tell the * < code > Builder < / code > the animation upon which the dependency is created , * so successive calls to the various functions in < code > Builder < / code > * will all refer to the initial parameter supplied in < code > play ( ) < / code > * as the dependency of the other animations . For example , calling * < code > play ( a1 ) . before ( a2 ) . before ( a3 ) < / code > will play both < code > a2 < / code > * and < code > a3 < / code > when a1 ends ; it does not set up a dependency between * < code > a2 < / code > and < code > a3 < / code > . < / p > * @ param anim The animation that is the dependency used in later calls to the * methods in the returned < code > Builder < / code > object . A null parameter will result * in a null < code > Builder < / code > return value . * @ return Builder The object that constructs the AnimatorSet based on the dependencies * outlined in the calls to < code > play < / code > and the other methods in the * < code > Builder < / code object . */ public Builder play ( Animator anim ) { } }
if ( anim != null ) { mNeedsSort = true ; return new Builder ( anim ) ; } return null ;
public class WaitFor { /** * Waits up to the provided wait time for a cookies with the provided name has a value equal to the * expected value . This information will be logged and recorded , with a * screenshot for traceability and added debugging support . * @ param cookieName the name of the cookie * @ param expectedCookieValue the expected value of the cookie * @ param seconds the number of seconds to wait */ public void cookieEquals ( double seconds , String cookieName , String expectedCookieValue ) { } }
double end = System . currentTimeMillis ( ) + ( seconds * 1000 ) ; while ( app . is ( ) . cookiePresent ( cookieName ) && System . currentTimeMillis ( ) < end ) ; if ( app . is ( ) . cookiePresent ( cookieName ) ) { while ( ! app . get ( ) . cookieValue ( cookieName ) . equals ( expectedCookieValue ) && System . currentTimeMillis ( ) < end ) ; } double timeTook = Math . min ( ( seconds * 1000 ) - ( end - System . currentTimeMillis ( ) ) , seconds * 1000 ) / 1000 ; checkCookieEquals ( cookieName , expectedCookieValue , seconds , timeTook ) ;
public class ExtractMsgsVisitor { /** * Implementations for specific nodes . */ @ Override protected void visitMsgNode ( MsgNode node ) { } }
MsgPartsAndIds msgPartsAndIds = MsgUtils . buildMsgPartsAndComputeMsgIdForDualFormat ( node ) ; SoyMsg . Builder builder = SoyMsg . builder ( ) . setId ( msgPartsAndIds . id ) ; if ( node . getMeaning ( ) != null ) { builder . setMeaning ( node . getMeaning ( ) ) ; } SoyMsg msg = builder . setDesc ( node . getDesc ( ) ) . setIsHidden ( node . isHidden ( ) ) . setContentType ( node . getContentType ( ) ) . addSourceLocation ( node . getSourceLocation ( ) ) . setIsPlrselMsg ( node . isPlrselMsg ( ) ) . setParts ( msgPartsAndIds . parts ) . build ( ) ; msgs . add ( msg ) ;
public class FoundationLoggingThrowableInformationPatternConverter { /** * Method generates abbreviated exception message . * @ param message * Original log message * @ param throwable * The attached throwable * @ return Abbreviated exception message */ private String generateAbbreviatedExceptionMessage ( final Throwable throwable ) { } }
final StringBuilder builder = new StringBuilder ( ) ; builder . append ( ": " ) ; builder . append ( throwable . getClass ( ) . getCanonicalName ( ) ) ; builder . append ( ": " ) ; builder . append ( throwable . getMessage ( ) ) ; Throwable cause = throwable . getCause ( ) ; while ( cause != null ) { builder . append ( '\n' ) ; builder . append ( "Caused by: " ) ; builder . append ( cause . getClass ( ) . getCanonicalName ( ) ) ; builder . append ( ": " ) ; builder . append ( cause . getMessage ( ) ) ; // make sure the exception cause is not itself to prevent infinite // looping assert ( cause != cause . getCause ( ) ) ; cause = ( cause == cause . getCause ( ) ? null : cause . getCause ( ) ) ; } return builder . toString ( ) ;
public class EC2Context { /** * < br > * Needed AWS actions : * < ul > * < li > autoscaling : DescribeAutoScalingGroups < / li > * < / ul > * @ param autoScalingGroupName the name to search for * @ return the given auto scaling group */ public AutoScalingGroup getAutoScalingGroup ( String autoScalingGroupName ) { } }
Preconditions . checkArgument ( autoScalingGroupName != null && ! autoScalingGroupName . isEmpty ( ) ) ; DescribeAutoScalingGroupsRequest req = new DescribeAutoScalingGroupsRequest ( ) ; req . setAutoScalingGroupNames ( Collections . singleton ( autoScalingGroupName ) ) ; DescribeAutoScalingGroupsResult result = this . autoScalingClient . describeAutoScalingGroups ( req ) ; if ( result . getAutoScalingGroups ( ) . size ( ) != 1 ) { throw new IllegalStateException ( "Found multiple auto scaling groups" ) ; } return result . getAutoScalingGroups ( ) . get ( 0 ) ;
public class MMCIFFileTools { /** * Converts a SpaceGroup object to a { @ link Symmetry } object . * @ param sg * @ return */ public static Symmetry convertSpaceGroupToSymmetry ( SpaceGroup sg ) { } }
Symmetry sym = new Symmetry ( ) ; sym . setSpace_group_name_H_M ( sg . getShortSymbol ( ) ) ; // TODO do we need to fill any of the other values ? return sym ;
public class SSLConfigManager { /** * This method adds an SSL config to the SSLConfigManager map and list . * @ param alias * @ param sslConfig * @ throws Exception */ public synchronized void removeSSLConfigFromMap ( String alias , SSLConfig sslConfig ) throws Exception { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "removeSSLConfigFromMap" , new Object [ ] { alias } ) ; sslConfigMap . remove ( alias ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "removeSSLConfigFromMap" ) ;
public class FilterFactories { /** * Creates a { @ link Filter } . * @ param filterFactoryFqcn The fully qualified class name of the { @ link FilterFactory } * @ param params The arguments to the { @ link FilterFactory } */ public static Filter createFilter ( String filterFactoryFqcn , FilterFactoryParams params ) throws FilterFactory . FilterNotCreatedException { } }
FilterFactory filterFactory = createFilterFactory ( filterFactoryFqcn ) ; return filterFactory . createFilter ( params ) ;
public class OAuthUtils { /** * Returns a signature base string . The signature base string * is a consistent , reproducible concatenation of several of * the HTTP request elements into a single string . * The signature base string includes the following components of the HTTP request : * < ul > * < li > the HTTP request method ( e . g . , " GET " , " POST " , etc . ) < / li > * < li > the authority as declared by the HTTP " Host " request header field < / li > * < li > the path and query components of the request resource URI < / li > * < li > the protocol parameters excluding the " oauth _ signature " < / li > * < li > parameters included in the request entity - body if they comply with * the strict restrictions defined in Section 3.4.1.3 < / li > * < / ul > * NOTE 1 : only 4 first parameter sources are used for now . * NOTE 2 : no percent encoding for custom HTTP methods for now . * @ see < a href = " http : / / tools . ietf . org / html / rfc5849 # section - 3.4.1 " > 3.4.1. * Signature Base String < / a > * @ param requestMethod request method * @ param requestUrl request url * @ param protocolParameters protocol parameters * @ return signature base string * @ throws AuthException if some of parameters has unacceptable value */ public static String getSignatureBaseString ( String requestMethod , String requestUrl , Map < String , String > protocolParameters ) throws AuthException { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( requestMethod . toUpperCase ( ) ) . append ( "&" ) . append ( AuthUtils . percentEncode ( normalizeUrl ( requestUrl ) ) ) . append ( "&" ) . append ( AuthUtils . percentEncode ( normalizeParameters ( requestUrl , protocolParameters ) ) ) ; return sb . toString ( ) ;
public class ProgressiveJpegParser { /** * If this is the first time calling this method , the buffer will be checked to make sure it * starts with SOI marker ( 0xffd8 ) . If the image has been identified as a non - JPEG , data will be * ignored and false will be returned immediately on all subsequent calls . * This object maintains state of the position of the last read byte . On repeated calls to this * method , it will continue from where it left off . * @ param encodedImage Next set of bytes received by the caller * @ return true if a new full scan has been found */ public boolean parseMoreData ( final EncodedImage encodedImage ) { } }
if ( mParserState == NOT_A_JPEG ) { return false ; } final int dataBufferSize = encodedImage . getSize ( ) ; // Is there any new data to parse ? // mBytesParsed might be greater than size of dataBuffer - that happens when // we skip more data than is available to read inside doParseMoreData method if ( dataBufferSize <= mBytesParsed ) { return false ; } final InputStream bufferedDataStream = new PooledByteArrayBufferedInputStream ( encodedImage . getInputStream ( ) , mByteArrayPool . get ( BUFFER_SIZE ) , mByteArrayPool ) ; try { StreamUtil . skip ( bufferedDataStream , mBytesParsed ) ; return doParseMoreData ( bufferedDataStream ) ; } catch ( IOException ioe ) { // Does not happen - streams returned by PooledByteBuffers do not throw IOExceptions Throwables . propagate ( ioe ) ; return false ; } finally { Closeables . closeQuietly ( bufferedDataStream ) ; }
public class UaaStringUtils { /** * Hide the passwords and secrets in a config map ( e . g . for logging ) . * @ param map a map with String keys ( e . g . Properties ) and String or nested * map values * @ return new properties with no plaintext passwords and secrets */ public static Map < String , ? > hidePasswords ( Map < String , ? > map ) { } }
Map < String , Object > result = new LinkedHashMap < String , Object > ( ) ; result . putAll ( map ) ; for ( String key : map . keySet ( ) ) { Object value = map . get ( key ) ; if ( value instanceof String ) { if ( isPassword ( key ) ) { result . put ( key , "#" ) ; } } else if ( value instanceof Map ) { @ SuppressWarnings ( "unchecked" ) Map < String , ? > bare = ( Map < String , ? > ) value ; result . put ( key , hidePasswords ( bare ) ) ; } } return result ;
public class SdkDigestInputStream { /** * Skips over and discards < code > n < / code > bytes of data from this input * stream , while taking the skipped bytes into account for digest * calculation . The < code > skip < / code > method may , for a variety of reasons , * end up skipping over some smaller number of bytes , possibly * < code > 0 < / code > . This may result from any of a number of conditions ; * reaching end of file before < code > n < / code > bytes have been skipped is * only one possibility . The actual number of bytes skipped is returned . If * < code > n < / code > is negative , no bytes are skipped . * The < code > skip < / code > method of this class creates a byte array and then * repeatedly reads into it until < code > n < / code > bytes have been read or the * end of the stream has been reached . Subclasses are encouraged to provide * a more efficient implementation of this method . For instance , the * implementation may depend on the ability to seek . * @ param n * the number of bytes to be skipped . * @ return the actual number of bytes skipped . * @ exception IOException * if the stream does not support seek , or if some other I / O * error occurs . */ @ Override public final long skip ( final long n ) throws IOException { } }
if ( n <= 0 ) return n ; byte [ ] b = new byte [ ( int ) Math . min ( SKIP_BUF_SIZE , n ) ] ; long m = n ; // remaining number of bytes to read while ( m > 0 ) { int len = read ( b , 0 , ( int ) Math . min ( m , b . length ) ) ; if ( len == - 1 ) return n - m ; m -= len ; } assert ( m == 0 ) ; return n ;
public class AceDataset { /** * Add a Weather Station to the dataset . * Add a new Weather Station from a { @ code byte [ ] } consisting of the JSON * for that weather station data . * @ param source JSON weather station data * @ throws IOException if there is an I / O error */ public void addWeather ( byte [ ] source ) throws IOException { } }
AceWeather weather = new AceWeather ( source ) ; String wstId = weather . getValue ( "wst_id" ) ; String climId = weather . getValue ( "clim_id" ) ; String wid = weather . getId ( ) ; if ( this . weatherMap . containsKey ( wid ) ) { LOG . error ( "Duplicate data found for wst_id: {}" , wstId ) ; } else { this . weatherMap . put ( wid , weather ) ; } if ( climId == null ) { this . widMap . put ( wstId , wid ) ; } else { this . widMap . put ( wstId + climId , wid ) ; } // Add default data link for the case survey data do not provide climate ID if ( ! widMap . containsKey ( wstId ) || ( climId != null && climId . startsWith ( "0" ) ) ) { this . widMap . put ( wstId , wid ) ; }
public class InstrumentsNoDelayLoader { /** * the files are stored in folder + versiob + build For instance instruments version 4.6 build * 46000 will be in / instruments _ no _ delay / 4.6/46000/ */ private String pathForVersion ( InstrumentsVersion version ) { } }
String path = version . getVersion ( ) + File . separatorChar + version . getBuild ( ) ; return path ;
public class XStreamFactory { /** * Create XStream for chart template load / save . * @ return XStream */ public static XStream createChartTemplateXStream ( ) { } }
XStream xstream = new XStream ( new DomDriver ( "UTF-8" ) ) ; xstream . setMode ( XStream . NO_REFERENCES ) ; xstream . registerConverter ( new FontConverter ( ) ) ; xstream . alias ( "chart-template" , ChartTemplate . class ) ; xstream . useAttributeFor ( ChartTemplate . class , "version" ) ; xstream . alias ( "color" , Color . class ) ; return xstream ;
public class ServiceSettingMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ServiceSetting serviceSetting , ProtocolMarshaller protocolMarshaller ) { } }
if ( serviceSetting == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serviceSetting . getSettingId ( ) , SETTINGID_BINDING ) ; protocolMarshaller . marshall ( serviceSetting . getSettingValue ( ) , SETTINGVALUE_BINDING ) ; protocolMarshaller . marshall ( serviceSetting . getLastModifiedDate ( ) , LASTMODIFIEDDATE_BINDING ) ; protocolMarshaller . marshall ( serviceSetting . getLastModifiedUser ( ) , LASTMODIFIEDUSER_BINDING ) ; protocolMarshaller . marshall ( serviceSetting . getARN ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( serviceSetting . getStatus ( ) , STATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcCableCarrierSegmentTypeEnum ( ) { } }
if ( ifcCableCarrierSegmentTypeEnumEEnum == null ) { ifcCableCarrierSegmentTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 790 ) ; } return ifcCableCarrierSegmentTypeEnumEEnum ;
public class RelativeDateFormat { /** * Set capitalizationOfRelativeUnitsForListOrMenu , capitalizationOfRelativeUnitsForStandAlone */ private void initCapitalizationContextInfo ( ULocale locale ) { } }
ICUResourceBundle rb = ( ICUResourceBundle ) UResourceBundle . getBundleInstance ( ICUData . ICU_BASE_NAME , locale ) ; try { ICUResourceBundle rdb = rb . getWithFallback ( "contextTransforms/relative" ) ; int [ ] intVector = rdb . getIntVector ( ) ; if ( intVector . length >= 2 ) { capitalizationOfRelativeUnitsForListOrMenu = ( intVector [ 0 ] != 0 ) ; capitalizationOfRelativeUnitsForStandAlone = ( intVector [ 1 ] != 0 ) ; } } catch ( MissingResourceException e ) { // use default }
public class LenientAuthorizationCodeTokenRequest { /** * Executes request for an access token , and returns the HTTP response . * To execute and parse the response to { @ link TokenResponse } , instead use * { @ link # execute ( ) } . * Callers should call { @ link HttpResponse # disconnect } when the returned * HTTP response object is no longer needed . However , * { @ link HttpResponse # disconnect } does not have to be called if the * response stream is properly closed . Example usage : * < pre > * HttpResponse response = tokenRequest . executeUnparsed ( ) ; * try { * / / process the HTTP response object * } finally { * response . disconnect ( ) ; * < / pre > * @ return successful access token response , which can then be parsed * directly using { @ link HttpResponse # parseAs ( Class ) } or some other * parsing method * @ throws TokenResponseException for an error response */ private TokenResponse executeLeniently ( ) throws IOException { } }
// must set clientAuthentication as last execute interceptor in case it // needs to sign request HttpRequestFactory requestFactory = getTransport ( ) . createRequestFactory ( new HttpRequestInitializer ( ) { public void initialize ( HttpRequest request ) throws IOException { if ( getRequestInitializer ( ) != null ) { getRequestInitializer ( ) . initialize ( request ) ; } final HttpExecuteInterceptor interceptor = request . getInterceptor ( ) ; request . setInterceptor ( new HttpExecuteInterceptor ( ) { public void intercept ( HttpRequest request ) throws IOException { if ( interceptor != null ) { interceptor . intercept ( request ) ; } if ( getClientAuthentication ( ) != null ) { getClientAuthentication ( ) . intercept ( request ) ; } } } ) ; } } ) ; // make request HttpRequest request = requestFactory . buildPostRequest ( getTokenServerUrl ( ) , new UrlEncodedContent ( this ) ) ; request . setParser ( new JsonObjectParser ( getJsonFactory ( ) ) ) ; request . setThrowExceptionOnExecuteError ( false ) ; HttpResponse response = request . execute ( ) ; if ( response . isSuccessStatusCode ( ) ) { if ( ! HttpResponseUtils . hasMessageBody ( response ) ) { return null ; } // check and see if status code is 200 but has error response String responseContent = HttpResponseUtils . parseAsStringWithoutClosing ( response ) ; TokenResponse tokenResponse = response . getRequest ( ) . getParser ( ) . parseAndClose ( new StringReader ( responseContent ) , TokenResponse . class ) ; if ( tokenResponse . containsKey ( "error" ) ) { throw LenientTokenResponseException . from ( getJsonFactory ( ) , response , responseContent ) ; } return response . getRequest ( ) . getParser ( ) . parseAndClose ( new StringReader ( responseContent ) , TokenResponse . class ) ; } throw TokenResponseException . from ( getJsonFactory ( ) , response ) ;
public class DataLabelingServiceClient { /** * Creates an instruction for how data should be labeled . * < p > Sample code : * < pre > < code > * try ( DataLabelingServiceClient dataLabelingServiceClient = DataLabelingServiceClient . create ( ) ) { * String formattedParent = DataLabelingServiceClient . formatProjectName ( " [ PROJECT ] " ) ; * Instruction instruction = Instruction . newBuilder ( ) . build ( ) ; * CreateInstructionRequest request = CreateInstructionRequest . newBuilder ( ) * . setParent ( formattedParent ) * . setInstruction ( instruction ) * . build ( ) ; * Instruction response = dataLabelingServiceClient . createInstructionAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Instruction , CreateInstructionMetadata > createInstructionAsync ( CreateInstructionRequest request ) { } }
return createInstructionOperationCallable ( ) . futureCall ( request ) ;
public class MongoDBQueryModelDAO { @ Override public void save ( T object , AccessControlContext accessControlContext ) throws OptimisticLockException , AccessControlException , JeppettoException { } }
ensureAccessControlEnabled ( ) ; T enhancedEntity = dirtyableDBObjectEnhancer . enhance ( object ) ; DirtyableDBObject dbo = ( DirtyableDBObject ) enhancedEntity ; if ( dbo . isPersisted ( dbCollection ) ) { verifyWriteAllowed ( dbo , accessControlContext ) ; } else { if ( dbo . get ( ID_FIELD ) == null ) { dbo . put ( ID_FIELD , new ObjectId ( ) ) ; // If the id isn ' t explicitly set , assume intent is for mongo ids } assessAndAssignAccessControl ( dbo , accessControlContext ) ; } DBObject identifyingQuery = buildIdentifyingQuery ( dbo ) ; if ( MongoDBSession . isActive ( ) ) { MongoDBSession . trackForSave ( this , identifyingQuery , enhancedEntity , createIdentifyingQueries ( dbo ) ) ; } else { trueSave ( identifyingQuery , dbo ) ; }
public class FastPathResolver { /** * Strip away any " jar : " prefix from a filename URI , and convert it to a file path , handling possibly - broken * mixes of filesystem and URI conventions ; resolve relative paths relative to resolveBasePath . * @ param resolveBasePath * The base path . * @ param relativePath * The path to resolve relative to the base path . * @ return The resolved path . */ public static String resolve ( final String resolveBasePath , final String relativePath ) { } }
// See : http : / / stackoverflow . com / a / 17870390/3950982 // https : / / weblogs . java . net / blog / kohsuke / archive / 2007/04 / how _ to _ convert . html if ( relativePath == null || relativePath . isEmpty ( ) ) { return resolveBasePath == null ? "" : resolveBasePath ; } String prefix = "" ; boolean isAbsolutePath = false ; boolean isFileOrJarURL = false ; int startIdx = 0 ; if ( relativePath . regionMatches ( true , startIdx , "jar:" , 0 , 4 ) ) { // " jar : " prefix can be stripped startIdx += 4 ; isFileOrJarURL = true ; } if ( relativePath . regionMatches ( true , startIdx , "http://" , 0 , 7 ) ) { // Detect http : / / startIdx += 7 ; // Force protocol name to lowercase prefix = "http://" ; // Treat the part after the protocol as an absolute path , so the domain is not treated as a directory // relative to the current directory . isAbsolutePath = true ; // Don ' t un - escape percent encoding etc . } else if ( relativePath . regionMatches ( true , startIdx , "https://" , 0 , 8 ) ) { // Detect https : / / startIdx += 8 ; prefix = "https://" ; isAbsolutePath = true ; } else if ( relativePath . regionMatches ( true , startIdx , "jrt:" , 0 , 5 ) ) { // Detect jrt : startIdx += 4 ; prefix = "jrt:" ; isAbsolutePath = true ; } else if ( relativePath . regionMatches ( true , startIdx , "file:" , 0 , 5 ) ) { // Strip off any " file : " prefix from relative path startIdx += 5 ; if ( WINDOWS ) { if ( relativePath . startsWith ( "\\\\\\\\" , startIdx ) || relativePath . startsWith ( "////" , startIdx ) ) { // Windows UNC URL startIdx += 4 ; prefix = "//" ; isAbsolutePath = true ; } else { if ( relativePath . startsWith ( "\\\\" , startIdx ) ) { startIdx += 2 ; } } } if ( relativePath . startsWith ( "//" , startIdx ) ) { startIdx += 2 ; } isFileOrJarURL = true ; } else if ( WINDOWS && ( relativePath . startsWith ( "//" ) || relativePath . startsWith ( "\\\\" ) ) ) { // Windows UNC path startIdx += 2 ; prefix = "//" ; isAbsolutePath = true ; } // Handle Windows paths starting with a drive designation as an absolute path if ( WINDOWS ) { if ( relativePath . length ( ) - startIdx > 2 && Character . isLetter ( relativePath . charAt ( startIdx ) ) && relativePath . charAt ( startIdx + 1 ) == ':' ) { isAbsolutePath = true ; } else if ( relativePath . length ( ) - startIdx > 3 && ( relativePath . charAt ( startIdx ) == '/' || relativePath . charAt ( startIdx ) == '\\' ) && Character . isLetter ( relativePath . charAt ( startIdx + 1 ) ) && relativePath . charAt ( startIdx + 2 ) == ':' ) { isAbsolutePath = true ; startIdx ++ ; } } // Catch - all for paths starting with separator if ( relativePath . length ( ) - startIdx > 1 && ( relativePath . charAt ( startIdx ) == '/' || relativePath . charAt ( startIdx ) == '\\' ) ) { isAbsolutePath = true ; } // Normalize the path , then add any UNC or URL prefix String pathStr = normalizePath ( startIdx == 0 ? relativePath : relativePath . substring ( startIdx ) , isFileOrJarURL ) ; if ( ! pathStr . equals ( "/" ) ) { // Remove any " ! / " on end of URL if ( pathStr . endsWith ( "/" ) ) { pathStr = pathStr . substring ( 0 , pathStr . length ( ) - 1 ) ; } if ( pathStr . endsWith ( "!" ) ) { pathStr = pathStr . substring ( 0 , pathStr . length ( ) - 1 ) ; } if ( pathStr . endsWith ( "/" ) ) { pathStr = pathStr . substring ( 0 , pathStr . length ( ) - 1 ) ; } if ( pathStr . isEmpty ( ) ) { pathStr = "/" ; } } // Sanitize path ( resolve " . . " sections , collapse " / / " double separators , etc . ) String pathResolved ; if ( isAbsolutePath || resolveBasePath == null || resolveBasePath . isEmpty ( ) ) { // There is no base path to resolve against , or path is an absolute path or http ( s ) : / / URL // ( ignore the base path ) pathResolved = FileUtils . sanitizeEntryPath ( pathStr , /* removeInitialSlash = */ false ) ; } else { // Path is a relative path - - resolve it relative to the base path pathResolved = FileUtils . sanitizeEntryPath ( resolveBasePath + ( resolveBasePath . endsWith ( "/" ) ? "" : "/" ) + pathStr , /* removeInitialSlash = */ false ) ; } // Add any prefix back , e . g . " https : / / " return prefix . isEmpty ( ) ? pathResolved : prefix + pathResolved ;
public class HttpClientFactory { /** * Configures the HttpClient with an SSL TrustManager that will accept any * SSL server certificate . The server SSL certificate will not be verified . * This method creates a new Scheme for " https " that is setup for an SSL * context to uses an DoNotVerifySSLCertificateTrustManager instance . This * scheme will be registered with the HttpClient using the * getConnectionManager ( ) . getSchemeRegistry ( ) . register ( https ) method . * @ param client The HttpClient to configure . */ static public void configureWithNoSslCertificateVerification ( HttpClient client ) throws NoSuchAlgorithmException , KeyManagementException { } }
// create a new https scheme with no SSL verification Scheme httpsScheme = SchemeFactory . createDoNotVerifyHttpsScheme ( ) ; // register this new scheme on the https client client . getConnectionManager ( ) . getSchemeRegistry ( ) . register ( httpsScheme ) ;
public class PForDelta { /** * The core implementation of compressing a block with blockSize * integers using PForDelta with the given parameter b * @ param inputBlock * the block to be compressed * @ param bits * the the value of the parameter b * @ param blockSize * the block size * @ return the compressed block */ public static int [ ] compressOneBlock ( int [ ] inputBlock , int bits , int blockSize ) { } }
int [ ] expAux = new int [ blockSize * 2 ] ; int maxCompBitSize = HEADER_SIZE + blockSize * ( MAX_BITS + MAX_BITS + MAX_BITS ) + 32 ; int [ ] tmpCompressedBlock = new int [ ( maxCompBitSize >>> 5 ) ] ; int outputOffset = HEADER_SIZE ; int expUpperBound = 1 << bits ; int expNum = 0 ; for ( int elem : inputBlock ) { if ( elem >= expUpperBound ) { expNum ++ ; } } int expIndex = 0 ; // compress the b - bit slots for ( int i = 0 ; i < blockSize ; ++ i ) { if ( inputBlock [ i ] < expUpperBound ) { writeBits ( tmpCompressedBlock , inputBlock [ i ] , outputOffset , bits ) ; } else // exp { // store the lower bits - bits of the exception writeBits ( tmpCompressedBlock , inputBlock [ i ] & MASK [ bits ] , outputOffset , bits ) ; // write the position of exception expAux [ expIndex ] = i ; // write the higher 32 - bits bits of the // exception expAux [ expIndex + expNum ] = ( inputBlock [ i ] >>> bits ) & MASK [ 32 - bits ] ; expIndex ++ ; } outputOffset += bits ; } // the first int in the compressed block stores the value of b // and the number of exceptions // tmpCompressedBlock [ 0 ] = ( ( bits & MASK [ POSSIBLE _ B _ BITS ] ) < < // (31 - POSSIBLE _ B _ BITS ) ) | ( expNum & MASK [ 31 - POSSIBLE _ B _ BITS ] ) ; tmpCompressedBlock [ 0 ] = ( ( bits & MASK [ 10 ] ) << 10 ) | ( expNum & 0x3ff ) ; tmpCompressedBlock [ 1 ] = inputBlock [ blockSize - 1 ] ; // compress exceptions if ( expNum > 0 ) { int compressedBitSize = compressBlockByS16 ( tmpCompressedBlock , outputOffset , expAux , expNum * 2 ) ; outputOffset += compressedBitSize ; } // discard the redundant parts in the tmpCompressedBlock int compressedSizeInInts = ( outputOffset + 31 ) >>> 5 ; int [ ] compBlock ; compBlock = new int [ compressedSizeInInts ] ; System . arraycopy ( tmpCompressedBlock , 0 , compBlock , 0 , compressedSizeInInts ) ; return compBlock ;
public class PluginCommandLine { /** * Returns < code > true < / code > if the option is present . * @ param optionName * The option name * @ return < code > true < / code > if the option is present * @ see it . jnrpe . ICommandLine # hasOption ( String ) */ public boolean hasOption ( final String optionName ) { } }
if ( optionName . length ( ) == 1 ) { return hasOption ( optionName . charAt ( 0 ) ) ; } return commandLine . hasOption ( "--" + optionName ) ;
public class CommerceUserSegmentEntryUtil { /** * Returns the last commerce user segment entry in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce user segment entry , or < code > null < / code > if a matching commerce user segment entry could not be found */ public static CommerceUserSegmentEntry fetchByGroupId_Last ( long groupId , OrderByComparator < CommerceUserSegmentEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByGroupId_Last ( groupId , orderByComparator ) ;
public class ExcelReader { /** * 读取Excel为Bean的列表 * @ param < T > Bean类型 * @ param headerRowIndex 标题所在行 , 如果标题行在读取的内容行中间 , 这行做为数据将忽略 , , 从0开始计数 * @ param startRowIndex 起始行 ( 包含 , 从0开始计数 ) * @ param beanType 每行对应Bean的类型 * @ return Map的列表 * @ since 4.0.1 */ public < T > List < T > read ( int headerRowIndex , int startRowIndex , Class < T > beanType ) { } }
return read ( headerRowIndex , startRowIndex , Integer . MAX_VALUE , beanType ) ;
public class ObjectDigestUtil { /** * 将Object按照key1 = value1 & key2 = value2的形式拼接起来 , 程序不递归 。 Value需为基本类型 , * 或者其toString方法能表示其实际的值 * @ param o * @ return */ private static String getOrderedMapString ( Map < Object , Object > map ) { } }
ArrayList < String > list = new ArrayList < String > ( ) ; for ( Map . Entry < Object , Object > entry : map . entrySet ( ) ) { if ( entry . getValue ( ) != null ) { Object key = entry . getKey ( ) ; Object value = entry . getValue ( ) ; String objectString = getObjectString ( value ) ; if ( ! StringUtils . isEmpty ( objectString ) ) { list . add ( getObjectString ( key ) + "=" + objectString + "&" ) ; } } } int size = list . size ( ) ; String [ ] arrayToSort = list . toArray ( new String [ size ] ) ; Arrays . sort ( arrayToSort , String . CASE_INSENSITIVE_ORDER ) ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < size ; i ++ ) { sb . append ( arrayToSort [ i ] ) ; } return "{" + sb . toString ( ) + "}" ;
public class Ginv { /** * Add a factor times one column to another column * @ param matrix * the matrix to modify * @ param diag * coordinate on the diagonal * @ param fromRow * first row to process * @ param col * column to process * @ param factor * factor to multiply */ public static void addColTimes ( Matrix matrix , long diag , long fromRow , long col , double factor ) { } }
long rows = matrix . getRowCount ( ) ; for ( long row = fromRow ; row < rows ; row ++ ) { matrix . setAsDouble ( matrix . getAsDouble ( row , col ) - factor * matrix . getAsDouble ( row , diag ) , row , col ) ; }
public class ExportConfigurationsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ExportConfigurationsRequest exportConfigurationsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( exportConfigurationsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OutboundResourceadapterTypeImpl { /** * If not already created , a new < code > authentication - mechanism < / code > element will be created and returned . * Otherwise , the first existing < code > authentication - mechanism < / code > element will be returned . * @ return the instance defined for the element < code > authentication - mechanism < / code > */ public AuthenticationMechanismType < OutboundResourceadapterType < T > > getOrCreateAuthenticationMechanism ( ) { } }
List < Node > nodeList = childNode . get ( "authentication-mechanism" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new AuthenticationMechanismTypeImpl < OutboundResourceadapterType < T > > ( this , "authentication-mechanism" , childNode , nodeList . get ( 0 ) ) ; } return createAuthenticationMechanism ( ) ;
public class PseudoNthSpecifierChecker { /** * Add the { @ code : nth - child } elements . * @ see < a href = " http : / / www . w3 . org / TR / css3 - selectors / # nth - child - pseudo " > < code > : nth - child < / code > pseudo - class < / a > */ private void addNthChild ( ) { } }
for ( Node node : nodes ) { int count = 1 ; Node n = DOMHelper . getPreviousSiblingElement ( node ) ; while ( n != null ) { count ++ ; n = DOMHelper . getPreviousSiblingElement ( n ) ; } if ( specifier . isMatch ( count ) ) { result . add ( node ) ; } }
public class ProcessorManager { /** * This method processes the supplied information against the configured processor * details for the trace . * @ param trace The trace * @ param node The node being processed * @ param direction The direction * @ param headers The headers * @ param values The values */ public void process ( Trace trace , Node node , Direction direction , Map < String , ? > headers , Object ... values ) { } }
if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "ProcessManager: process trace=" + trace + " node=" + node + " direction=" + direction + " headers=" + headers + " values=" + values + " : available processors=" + processors ) ; } if ( trace . getTransaction ( ) != null ) { List < ProcessorWrapper > procs = null ; synchronized ( processors ) { procs = processors . get ( trace . getTransaction ( ) ) ; } if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "ProcessManager: trace name=" + trace . getTransaction ( ) + " processors=" + procs ) ; } if ( procs != null ) { for ( int i = 0 ; i < procs . size ( ) ; i ++ ) { procs . get ( i ) . process ( trace , node , direction , headers , values ) ; } } }
public class DatabaseOperationsInner { /** * Gets a list of operations performed on the database . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DatabaseOperationInner & gt ; object */ public Observable < Page < DatabaseOperationInner > > listByDatabaseNextAsync ( final String nextPageLink ) { } }
return listByDatabaseNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < DatabaseOperationInner > > , Page < DatabaseOperationInner > > ( ) { @ Override public Page < DatabaseOperationInner > call ( ServiceResponse < Page < DatabaseOperationInner > > response ) { return response . body ( ) ; } } ) ;
public class ConfigurationImpl { /** * { @ inheritDoc } */ @ Override public Locale getLocale ( ) { } }
if ( root instanceof RootDocImpl ) return ( ( RootDocImpl ) root ) . getLocale ( ) ; else return Locale . getDefault ( ) ;
public class RateLimiterUtil { /** * 一个用来定制RateLimiter的方法 。 * @ param permitsPerSecond 每秒允许的请求书 , 可看成QPS * @ param maxBurstSeconds 最大的突发缓冲时间 。 用来应对突发流量 。 Guava的实现默认是1s 。 permitsPerSecond * maxBurstSeconds的数量 , 就是闲置时预留的缓冲token数量 * @ param filledWithToken 是否需要创建时就保留有permitsPerSecond * maxBurstSeconds的token */ public static RateLimiter create ( double permitsPerSecond , double maxBurstSeconds , boolean filledWithToken ) throws ReflectiveOperationException { } }
Class < ? > sleepingStopwatchClass = Class . forName ( "com.google.common.util.concurrent.RateLimiter$SleepingStopwatch" ) ; Method createStopwatchMethod = sleepingStopwatchClass . getDeclaredMethod ( "createFromSystemTimer" ) ; createStopwatchMethod . setAccessible ( true ) ; Object stopwatch = createStopwatchMethod . invoke ( null ) ; Class < ? > burstyRateLimiterClass = Class . forName ( "com.google.common.util.concurrent.SmoothRateLimiter$SmoothBursty" ) ; Constructor < ? > burstyRateLimiterConstructor = burstyRateLimiterClass . getDeclaredConstructors ( ) [ 0 ] ; burstyRateLimiterConstructor . setAccessible ( true ) ; // set maxBurstSeconds RateLimiter rateLimiter = ( RateLimiter ) burstyRateLimiterConstructor . newInstance ( stopwatch , maxBurstSeconds ) ; rateLimiter . setRate ( permitsPerSecond ) ; if ( filledWithToken ) { // set storedPermits setField ( rateLimiter , "storedPermits" , permitsPerSecond * maxBurstSeconds ) ; } return rateLimiter ;
public class RemoteRecordOwner { /** * Free this remote record owner . * Also explicitly unexports the RMI object . */ public void free ( ) { } }
if ( m_recordOwnerCollection != null ) m_recordOwnerCollection . free ( ) ; m_recordOwnerCollection = null ; if ( m_messageFilterList != null ) m_messageFilterList . free ( ) ; m_messageFilterList = null ; // Close all records associated with this SessionObject if ( m_vRecordList != null ) m_vRecordList . free ( this ) ; // Free the records that belong to me m_vRecordList = null ; if ( m_databaseCollection != null ) m_databaseCollection . free ( ) ; m_databaseCollection = null ; // try { // UnicastRemoteObject . unexportObject ( this , false ) ; / / I ' m no longer available for remote calls ( RMI , not EJB ) // } catch ( NoSuchObjectException ex ) { // ex . printStackTrace ( ) ; if ( m_sessionObjectParent != null ) m_sessionObjectParent . removeRecordOwner ( this ) ; m_sessionObjectParent = null ;
public class WordVectorSerializer { /** * This method loads previously saved SequenceVectors model from File * @ param factory * @ param file * @ param < T > * @ return */ public static < T extends SequenceElement > SequenceVectors < T > readSequenceVectors ( @ NonNull SequenceElementFactory < T > factory , @ NonNull File file ) throws IOException { } }
return readSequenceVectors ( factory , new FileInputStream ( file ) ) ;
public class CategoryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Category category , ProtocolMarshaller protocolMarshaller ) { } }
if ( category == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( category . getCode ( ) , CODE_BINDING ) ; protocolMarshaller . marshall ( category . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LaJobRunner { protected void arrangePreparedAccessContext ( LaJobRuntime runtime ) { } }
if ( accessContextArranger == null ) { return ; } final AccessContextResource resource = createAccessContextResource ( runtime ) ; final AccessContext context = accessContextArranger . arrangePreparedAccessContext ( resource ) ; if ( context == null ) { String msg = "Cannot return null from access context arranger: " + accessContextArranger + " runtime=" + runtime ; throw new IllegalStateException ( msg ) ; } PreparedAccessContext . setAccessContextOnThread ( context ) ;
public class BundleUtils { /** * Discovers the bundle context for a bundle . If the bundle is an 4.1.0 or greater bundle it should have a method * that just returns the bundle context . Otherwise uses reflection to look for an internal bundle context . * @ param bundle the bundle from which the bundle context is needed * @ return corresponding bundle context or null if bundle context cannot be discovered */ public static BundleContext getBundleContext ( final Bundle bundle ) { } }
try { // first try to find the getBundleContext method ( OSGi spec > = 4.10) final Method method = Bundle . class . getDeclaredMethod ( "getBundleContext" ) ; if ( ! method . isAccessible ( ) ) { method . setAccessible ( true ) ; } return ( BundleContext ) method . invoke ( bundle ) ; } catch ( Exception e ) { // then try to find a field in the bundle that looks like a bundle context try { final Field [ ] fields = bundle . getClass ( ) . getDeclaredFields ( ) ; for ( Field field : fields ) { if ( BundleContext . class . isAssignableFrom ( field . getType ( ) ) ) { if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } return ( BundleContext ) field . get ( bundle ) ; } } } catch ( Exception ignore ) { // ignore } } // well , discovery failed return null ;
public class WSJdbcResultSet { /** * Updates a column with a binary stream value . The updateXXX methods are used to update column values * in the current row , or the insert row . The updateXXX methods do not update the underlying database ; instead the * updateRow or insertRow methods are called to update the database . * @ param columnName - the name of the column * x - the new column value * length - of the stream * @ throws SQLException if a database access error occurs . */ public void updateBinaryStream ( String arg0 , InputStream arg1 , int arg2 ) throws SQLException { } }
try { rsetImpl . updateBinaryStream ( arg0 , arg1 , arg2 ) ; } catch ( SQLException ex ) { FFDCFilter . processException ( ex , "com.ibm.ws.rsadapter.jdbc.WSJdbcResultSet.updateBinaryStream" , "3075" , this ) ; throw WSJdbcUtil . mapException ( this , ex ) ; } catch ( NullPointerException nullX ) { // No FFDC code needed ; we might be closed . throw runtimeXIfNotClosed ( nullX ) ; }
public class Distance { /** * Gets the Kullback Leibler divergence . * @ param p P vector . * @ param q Q vector . * @ return The Kullback Leibler divergence between u and v . */ public static double KullbackLeiblerDivergence ( double [ ] p , double [ ] q ) { } }
boolean intersection = false ; double k = 0 ; for ( int i = 0 ; i < p . length ; i ++ ) { if ( p [ i ] != 0 && q [ i ] != 0 ) { intersection = true ; k += p [ i ] * Math . log ( p [ i ] / q [ i ] ) ; } } if ( intersection ) return k ; else return Double . POSITIVE_INFINITY ;
public class OpenTSDBMain { /** * Returns the agent properties * @ return the agent properties or null if reflective call failed */ protected static Properties getAgentProperties ( ) { } }
try { Class < ? > clazz = Class . forName ( "sun.misc.VMSupport" ) ; Method m = clazz . getDeclaredMethod ( "getAgentProperties" ) ; m . setAccessible ( true ) ; Properties p = ( Properties ) m . invoke ( null ) ; return p ; } catch ( Throwable t ) { return null ; }
public class ApiOvhMe { /** * Alter this object properties * REST : PUT / me / paymentMean / creditCard / { id } * @ param body [ required ] New object properties * @ param id [ required ] Id of the object */ public void paymentMean_creditCard_id_PUT ( Long id , OvhCreditCard body ) throws IOException { } }
String qPath = "/me/paymentMean/creditCard/{id}" ; StringBuilder sb = path ( qPath , id ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class GrailsASTUtils { /** * Set the method target of a MethodCallExpression to the first matching method with same number and type of arguments . * @ param methodCallExpression * @ param targetClassNode * @ param targetParameterClassTypes * @ return The method call expression */ public static MethodCallExpression applyMethodTarget ( final MethodCallExpression methodCallExpression , final ClassNode targetClassNode , final Class < ? > ... targetParameterClassTypes ) { } }
return applyMethodTarget ( methodCallExpression , targetClassNode , convertTargetParameterTypes ( targetParameterClassTypes ) ) ;
public class CloudStorageRetryHandler { /** * Records a retry attempt for the given StorageException , sleeping for an amount of time * dependent on the attempt number . Throws a StorageException if we ' ve exhausted all retries . * @ param exs The StorageException error that prompted this retry attempt . */ private void handleRetryForStorageException ( final StorageException exs ) throws StorageException { } }
retries ++ ; if ( retries > maxRetries ) { throw new StorageException ( exs . getCode ( ) , "All " + maxRetries + " retries failed. Waited a total of " + totalWaitTime + " ms between attempts" , exs ) ; } sleepForAttempt ( retries ) ;
public class CmsDocumentDependency { /** * Read the information out of the given JSON object to fill * the values of the document . < p > * @ param json the JSON object with the information about this document * @ param rootPath the current path the home division */ public void fromJSON ( JSONObject json , String rootPath ) { } }
try { // language versions if ( json . has ( JSON_LANGUAGES ) ) { JSONArray jsonLanguages = json . getJSONArray ( JSON_LANGUAGES ) ; for ( int i = 0 ; i < jsonLanguages . length ( ) ; i ++ ) { JSONObject jsonLang = ( JSONObject ) jsonLanguages . get ( i ) ; CmsDocumentDependency lang = new CmsDocumentDependency ( null , jsonLang . getString ( JSON_PATH ) ) ; lang . fromJSON ( jsonLang , rootPath ) ; addVariant ( lang ) ; } } // attachments if ( json . has ( JSON_ATTACHMENTS ) ) { JSONArray jsonAttachments = json . getJSONArray ( JSON_ATTACHMENTS ) ; for ( int i = 0 ; i < jsonAttachments . length ( ) ; i ++ ) { try { JSONObject jsonAttachment = ( JSONObject ) jsonAttachments . get ( i ) ; CmsDocumentDependency att = new CmsDocumentDependency ( null , jsonAttachment . getString ( JSON_PATH ) ) ; att . fromJSON ( jsonAttachment , rootPath ) ; // language versions of attachment if ( jsonAttachment . has ( JSON_LANGUAGES ) ) { JSONArray jsonAttLanguages = jsonAttachment . getJSONArray ( JSON_LANGUAGES ) ; for ( int j = 0 ; j < jsonAttLanguages . length ( ) ; j ++ ) { JSONObject jsonAttLanguage = ( JSONObject ) jsonAttLanguages . get ( j ) ; CmsDocumentDependency attLang = new CmsDocumentDependency ( null , jsonAttLanguage . getString ( JSON_PATH ) ) ; attLang . fromJSON ( jsonAttLanguage , rootPath ) ; att . addVariant ( attLang ) ; } } addAttachment ( att ) ; } catch ( Exception e ) { LOG . error ( e ) ; } } } } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( ex . getLocalizedMessage ( ) , ex ) ; } }
public class ASTPrinter { /** * prints an arg as * type name * @ param arg Arg to be printed * @ return pretty string */ public static String print ( Arg arg ) { } }
return printArgModifiers ( arg . modifiers ) + print ( arg . type ) + " " + arg . name ;
public class JavaInlineExpressionCompiler { /** * Append the inline code for the given XReturnLiteral . * @ param expression the expression of the operation . * @ param parentExpression is the expression that contains this one , or { @ code null } if the current expression is * the root expression . * @ param feature the feature that contains the expression . * @ param output the output . * @ return { @ code true } if a text was appended . */ protected Boolean _generate ( XReturnExpression expression , XExpression parentExpression , XtendExecutable feature , InlineAnnotationTreeAppendable output ) { } }
return generate ( expression . getExpression ( ) , parentExpression , feature , output ) ;
public class JsonModelGenerator { /** * Initialization . * @ param env * @ author vvakame */ public static void init ( ProcessingEnvironment env ) { } }
processingEnv = env ; typeUtils = processingEnv . getTypeUtils ( ) ; elementUtils = processingEnv . getElementUtils ( ) ;
public class CeylonUtil { /** * Returns the directory part of path string . * @ param path Input path * @ return String The directory part of the path */ public static String dirpart ( final String path ) { } }
int s = path . lastIndexOf ( File . separatorChar ) ; if ( s != - 1 ) { return path . substring ( 0 , s ) ; } else { return null ; }
public class MemoryEntityLockStore { /** * Returns an IEntityLock [ ] based on the params , any or all of which may be null . A null param * means any value , so < code > find ( myType , myKey , null , null , null ) < / code > will return all < code > * IEntityLocks < / code > for myType and myKey . * @ return org . apereo . portal . concurrency . locking . IEntityLock [ ] * @ param entityType Class * @ param entityKey String * @ param lockType Integer - so we can accept a null value . * @ param expiration Date * @ param lockOwner String * @ exception LockingException - wraps an Exception specific to the store . */ @ Override public IEntityLock [ ] find ( Class entityType , String entityKey , Integer lockType , java . util . Date expiration , String lockOwner ) throws LockingException { } }
List locks = new ArrayList ( ) ; Map cache = null ; Collection caches = null ; Iterator cacheIterator = null ; Iterator cacheKeyIterator = null ; Iterator keyIterator = null ; IEntityLock lock = null ; if ( entityType == null ) { caches = getLockCache ( ) . values ( ) ; } else { caches = new ArrayList ( 1 ) ; caches . add ( getLockCache ( entityType ) ) ; } cacheIterator = caches . iterator ( ) ; while ( cacheIterator . hasNext ( ) ) { cache = ( Map ) cacheIterator . next ( ) ; cacheKeyIterator = cache . keySet ( ) . iterator ( ) ; List keys = new ArrayList ( ) ; // Synchronize on the cache only while collecting its keys . There is some // exposure here . synchronized ( cache ) { while ( cacheKeyIterator . hasNext ( ) ) { keys . add ( cacheKeyIterator . next ( ) ) ; } } keyIterator = keys . iterator ( ) ; while ( keyIterator . hasNext ( ) ) { lock = getLockFromCache ( keyIterator . next ( ) , cache ) ; if ( ( lock != null ) && ( ( entityKey == null ) || ( entityKey . equals ( lock . getEntityKey ( ) ) ) ) && ( ( lockType == null ) || ( lockType . intValue ( ) == lock . getLockType ( ) ) ) && ( ( lockOwner == null ) || ( lockOwner . equals ( lock . getLockOwner ( ) ) ) ) && ( ( expiration == null ) || ( expiration . equals ( lock . getExpirationTime ( ) ) ) ) ) { locks . add ( lock ) ; } } } return ( ( IEntityLock [ ] ) locks . toArray ( new IEntityLock [ locks . size ( ) ] ) ) ;
public class JournalRecoveryLog { /** * Helper for the { @ link # log ( ConsumerJournalEntry ) } method . Writes the * values of a context multi - map . */ private < T > String writeMapValues ( String mapName , MultiValueMap < T > map ) { } }
StringBuffer buffer = new StringBuffer ( ) ; buffer . append ( " " + mapName + "\n" ) ; for ( Iterator < T > names = map . names ( ) ; names . hasNext ( ) ; ) { T name = names . next ( ) ; buffer . append ( " " ) . append ( name . toString ( ) ) . append ( "\n" ) ; String [ ] values = map . getStringArray ( name ) ; for ( String element : values ) { buffer . append ( " " ) . append ( element ) . append ( "\n" ) ; } } return buffer . toString ( ) ;
public class AzureFirewallsInner { /** * Creates or updates the specified Azure Firewall . * @ param resourceGroupName The name of the resource group . * @ param azureFirewallName The name of the Azure Firewall . * @ param parameters Parameters supplied to the create or update Azure Firewall operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < AzureFirewallInner > createOrUpdateAsync ( String resourceGroupName , String azureFirewallName , AzureFirewallInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , azureFirewallName , parameters ) . map ( new Func1 < ServiceResponse < AzureFirewallInner > , AzureFirewallInner > ( ) { @ Override public AzureFirewallInner call ( ServiceResponse < AzureFirewallInner > response ) { return response . body ( ) ; } } ) ;
public class ApiOvhCdndedicated { /** * Add a backend IP * REST : POST / cdn / dedicated / { serviceName } / domains / { domain } / backends * @ param ip [ required ] IP to add to backends list * @ param serviceName [ required ] The internal name of your CDN offer * @ param domain [ required ] Domain of this object */ public OvhBackend serviceName_domains_domain_backends_POST ( String serviceName , String domain , String ip ) throws IOException { } }
String qPath = "/cdn/dedicated/{serviceName}/domains/{domain}/backends" ; StringBuilder sb = path ( qPath , serviceName , domain ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ip" , ip ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhBackend . class ) ;
public class Client { /** * Gets a list of Event resources . ( 50 elements ) * @ return List of Event * @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection * @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONResourceResponse and throwOAuthProblemException is enabled * @ throws URISyntaxException - if there is an error when generating the target URL at the URIBuilder constructor * @ see com . onelogin . sdk . model . Event * @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / events / get - events " > Get Events documentation < / a > */ public List < Event > getEvents ( ) throws OAuthSystemException , OAuthProblemException , URISyntaxException { } }
HashMap < String , String > queryParameters = new HashMap < String , String > ( ) ; return getEvents ( queryParameters ) ;
public class JDBCUserRealm { private void loadUser ( String username ) { } }
try { if ( null == _con ) connectDatabase ( ) ; if ( null == _con ) throw new SQLException ( "Can't connect to database" ) ; PreparedStatement stat = _con . prepareStatement ( _userSql ) ; stat . setObject ( 1 , username ) ; ResultSet rs = stat . executeQuery ( ) ; if ( rs . next ( ) ) { Object key = rs . getObject ( _userTableKey ) ; put ( username , rs . getString ( _userTablePasswordField ) ) ; stat . close ( ) ; stat = _con . prepareStatement ( _roleSql ) ; stat . setObject ( 1 , key ) ; rs = stat . executeQuery ( ) ; while ( rs . next ( ) ) addUserToRole ( username , rs . getString ( _roleTableRoleField ) ) ; stat . close ( ) ; } } catch ( SQLException e ) { log . warn ( "UserRealm " + getName ( ) + " could not load user information from database" , e ) ; connectDatabase ( ) ; }
public class Objects { /** * Returns long value of argument , if possible , wrapped in Optional * Interprets String as Number */ public static Optional < Long > toLong ( Object arg ) { } }
if ( arg instanceof Number ) { return Optional . of ( ( ( Number ) arg ) . longValue ( ) ) ; } else if ( arg instanceof String ) { Optional < ? extends Number > optional = toNumber ( arg ) ; if ( optional . isPresent ( ) ) { return Optional . of ( optional . get ( ) . longValue ( ) ) ; } else { return Optional . empty ( ) ; } } else { return Optional . empty ( ) ; }
public class StubInvocationBenchmark { /** * Performs a benchmark for a trivial class creation using javassist . * @ param blackHole A black hole for avoiding JIT erasure . */ @ Benchmark @ OperationsPerInvocation ( 20 ) public void benchmarkJavassist ( Blackhole blackHole ) { } }
blackHole . consume ( javassistInstance . method ( booleanValue ) ) ; blackHole . consume ( javassistInstance . method ( byteValue ) ) ; blackHole . consume ( javassistInstance . method ( shortValue ) ) ; blackHole . consume ( javassistInstance . method ( intValue ) ) ; blackHole . consume ( javassistInstance . method ( charValue ) ) ; blackHole . consume ( javassistInstance . method ( intValue ) ) ; blackHole . consume ( javassistInstance . method ( longValue ) ) ; blackHole . consume ( javassistInstance . method ( floatValue ) ) ; blackHole . consume ( javassistInstance . method ( doubleValue ) ) ; blackHole . consume ( javassistInstance . method ( stringValue ) ) ; blackHole . consume ( javassistInstance . method ( booleanValue , booleanValue , booleanValue ) ) ; blackHole . consume ( javassistInstance . method ( byteValue , byteValue , byteValue ) ) ; blackHole . consume ( javassistInstance . method ( shortValue , shortValue , shortValue ) ) ; blackHole . consume ( javassistInstance . method ( intValue , intValue , intValue ) ) ; blackHole . consume ( javassistInstance . method ( charValue , charValue , charValue ) ) ; blackHole . consume ( javassistInstance . method ( intValue , intValue , intValue ) ) ; blackHole . consume ( javassistInstance . method ( longValue , longValue , longValue ) ) ; blackHole . consume ( javassistInstance . method ( floatValue , floatValue , floatValue ) ) ; blackHole . consume ( javassistInstance . method ( doubleValue , doubleValue , doubleValue ) ) ; blackHole . consume ( javassistInstance . method ( stringValue , stringValue , stringValue ) ) ;
public class CmsContainerpageService { /** * Generates the model resource data list . < p > * @ param cms the cms context * @ param resourceType the resource type name * @ param modelResources the model resource * @ param contentLocale the content locale * @ return the model resources data * @ throws CmsException if something goes wrong reading the resource information */ public static List < CmsModelResourceInfo > generateModelResourceList ( CmsObject cms , String resourceType , List < CmsResource > modelResources , Locale contentLocale ) throws CmsException { } }
List < CmsModelResourceInfo > result = new ArrayList < CmsModelResourceInfo > ( ) ; Locale wpLocale = OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) ; CmsModelResourceInfo defaultInfo = new CmsModelResourceInfo ( Messages . get ( ) . getBundle ( wpLocale ) . key ( Messages . GUI_TITLE_DEFAULT_RESOURCE_CONTENT_0 ) , Messages . get ( ) . getBundle ( wpLocale ) . key ( Messages . GUI_DESCRIPTION_DEFAULT_RESOURCE_CONTENT_0 ) , null ) ; defaultInfo . setResourceType ( resourceType ) ; result . add ( defaultInfo ) ; for ( CmsResource model : modelResources ) { CmsGallerySearchResult searchInfo = CmsGallerySearch . searchById ( cms , model . getStructureId ( ) , contentLocale ) ; CmsModelResourceInfo modelInfo = new CmsModelResourceInfo ( searchInfo . getTitle ( ) , searchInfo . getDescription ( ) , null ) ; modelInfo . addAdditionalInfo ( Messages . get ( ) . getBundle ( wpLocale ) . key ( Messages . GUI_LABEL_PATH_0 ) , cms . getSitePath ( model ) ) ; modelInfo . setResourceType ( resourceType ) ; modelInfo . setStructureId ( model . getStructureId ( ) ) ; result . add ( modelInfo ) ; } return result ;
public class ConfigurationMBean { /** * uniqueObjectName * Make a unique jmx name for this configuration object * @ see org . browsermob . proxy . jetty . util . jmx . ModelMBeanImpl # uniqueObjectName ( javax . management . MBeanServer , java . lang . String ) */ public synchronized ObjectName uniqueObjectName ( MBeanServer server , String on ) { } }
ObjectName oName = null ; try { oName = new ObjectName ( on + ",config=" + _config . getClass ( ) . getName ( ) ) ; } catch ( Exception e ) { log . warn ( LogSupport . EXCEPTION , e ) ; } return oName ;
public class RequestParameterBuilder { /** * Adds a request parameter to the URL without specifying a data type of the given parameter value . Parameter ' s value is converted to JSON notation when * adding . Furthermore , it will be encoded according to the acquired encoding . * @ param name name of the request parameter * @ param value value of the request parameter * @ return RequestParameterBuilder updated this instance which can be reused * @ throws UnsupportedEncodingException DOCUMENT _ ME */ public RequestParameterBuilder paramJson ( String name , Object value ) throws UnsupportedEncodingException { } }
return paramJson ( name , value , null ) ;
public class AdminOperation { /** * create partitions in the broker * @ param topic topic name * @ param partitionNum partition numbers * @ param enlarge enlarge partition number if broker configuration has * setted * @ return partition number in the broker * @ throws IOException if an I / O error occurs */ public int createPartitions ( String topic , int partitionNum , boolean enlarge ) throws IOException { } }
KV < Receive , ErrorMapping > response = send ( new CreaterRequest ( topic , partitionNum , enlarge ) ) ; return Utils . deserializeIntArray ( response . k . buffer ( ) ) [ 0 ] ;
public class XExtensionManager { /** * Retrieves an extension instance by its unique URI . If the extension * has not been registered before , it is looked up in the local cache . * If it cannot be found in the cache , the manager attempts to download * it from its unique URI , and add it to the set of managed extensions . * @ param uri The unique URI of the requested extension . * @ return The requested extension . */ public XExtension getByUri ( URI uri ) { } }
XExtension extension = extensionMap . get ( uri ) ; if ( extension == null ) { try { extension = XExtensionParser . instance ( ) . parse ( uri ) ; register ( extension ) ; XLogging . log ( "Imported XES extension '" + extension . getUri ( ) + "' from remote source" , XLogging . Importance . DEBUG ) ; } catch ( IOException e ) { // Now do something if the Internet is down . . . } catch ( ParserConfigurationException | SAXException e ) { e . printStackTrace ( ) ; return null ; } cacheExtension ( uri ) ; } return extension ;
public class DynamicPipelineServiceImpl { /** * this is here for future expansion */ private Map < Environment , Collection < ArtifactIdentifier > > getArtifactIdentifiers ( List < Environment > environments ) { } }
Map < Environment , Collection < ArtifactIdentifier > > rt = new HashMap < > ( ) ; for ( Environment env : environments ) { Set < ArtifactIdentifier > ids = new HashSet < > ( ) ; if ( env . getUnits ( ) != null ) { for ( DeployableUnit du : env . getUnits ( ) ) { String artifactName = du . getName ( ) ; String artifactExtension = null ; int dotIdx = artifactName . lastIndexOf ( '.' ) ; if ( dotIdx > 0 ) { // If idx is 0 starts with a dot . . . in which case not an extension artifactName = artifactName . substring ( 0 , dotIdx ) ; artifactExtension = artifactName . substring ( dotIdx ) ; } ArtifactIdentifier id = new ArtifactIdentifier ( null , artifactName , du . getVersion ( ) , null , artifactExtension ) ; ids . add ( id ) ; } } rt . put ( env , new ArrayList < > ( ids ) ) ; } return rt ;
public class AuthenticationService { /** * Returns a formatted string containing an IP address , or list of IP * addresses , which represent the HTTP client and any involved proxies . As * the headers used to determine proxies can easily be forged , this data is * superficially validated to ensure that it at least looks like a list of * IPs . * @ param request * The HTTP request to format . * @ return * A formatted string containing one or more IP addresses . */ private String getLoggableAddress ( HttpServletRequest request ) { } }
// Log X - Forwarded - For , if present and valid String header = request . getHeader ( "X-Forwarded-For" ) ; if ( header != null && X_FORWARDED_FOR . matcher ( header ) . matches ( ) ) return "[" + header + ", " + request . getRemoteAddr ( ) + "]" ; // If header absent or invalid , just use source IP return request . getRemoteAddr ( ) ;
public class FeatureInfo { /** * Tells if any of the methods associated with this feature has the specified * name in bytecode . * @ param name a method name in bytecode * @ return < tt > true < / tt iff any of the methods associated with this feature * has the specified name in bytecode */ public boolean hasBytecodeName ( String name ) { } }
if ( featureMethod . hasBytecodeName ( name ) ) return true ; if ( dataProcessorMethod != null && dataProcessorMethod . hasBytecodeName ( name ) ) return true ; for ( DataProviderInfo provider : dataProviders ) if ( provider . getDataProviderMethod ( ) . hasBytecodeName ( name ) ) return true ; return false ;
public class DynamoDBTableMapper { /** * Transactionally writes objects specified by transactionWriteRequest by calling * { @ link com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # transactionWrite ( TransactionWriteRequest ) } API . * @ param transactionWriteRequest List of objects to write * @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # transactionWrite ( TransactionWriteRequest ) */ public void transactionWrite ( TransactionWriteRequest transactionWriteRequest ) { } }
for ( TransactionWriteRequest . TransactionWriteOperation transactionWriteOperation : transactionWriteRequest . getTransactionWriteOperations ( ) ) { if ( ! model . targetType ( ) . equals ( transactionWriteOperation . getObject ( ) . getClass ( ) ) ) { throw new DynamoDBMappingException ( "Input object is of the classType: " + transactionWriteOperation . getObject ( ) . getClass ( ) + " but tableMapper is declared with classType: " + model . targetType ( ) ) ; } } mapper . transactionWrite ( transactionWriteRequest ) ;
public class SamlObjectSignatureValidator { /** * Gets role descriptor resolver . * @ param resolver the resolver * @ param context the context * @ param profileRequest the profile request * @ return the role descriptor resolver * @ throws Exception the exception */ protected RoleDescriptorResolver getRoleDescriptorResolver ( final MetadataResolver resolver , final MessageContext context , final RequestAbstractType profileRequest ) throws Exception { } }
val idp = casProperties . getAuthn ( ) . getSamlIdp ( ) ; return SamlIdPUtils . getRoleDescriptorResolver ( resolver , idp . getMetadata ( ) . isRequireValidMetadata ( ) ) ;
public class RuleProviderSorter { /** * Perform the entire sort operation */ private void sort ( ) { } }
DefaultDirectedWeightedGraph < RuleProvider , DefaultEdge > graph = new DefaultDirectedWeightedGraph < > ( DefaultEdge . class ) ; for ( RuleProvider provider : providers ) { graph . addVertex ( provider ) ; } addProviderRelationships ( graph ) ; checkForCycles ( graph ) ; List < RuleProvider > result = new ArrayList < > ( this . providers . size ( ) ) ; TopologicalOrderIterator < RuleProvider , DefaultEdge > iterator = new TopologicalOrderIterator < > ( graph ) ; while ( iterator . hasNext ( ) ) { RuleProvider provider = iterator . next ( ) ; result . add ( provider ) ; } this . providers = Collections . unmodifiableList ( result ) ; int index = 0 ; for ( RuleProvider provider : this . providers ) { if ( provider instanceof AbstractRuleProvider ) ( ( AbstractRuleProvider ) provider ) . setExecutionIndex ( index ++ ) ; }
public class PreloadFontManager { /** * Create and add a new embedding { @ link PreloadFont } if it is not yet * contained . * @ param aFontResProvider * The font resource provider to be added for embedding . May not be * < code > null < / code > . * @ return The created { @ link PreloadFont } . Never < code > null < / code > . */ @ Nonnull public PreloadFont getOrAddEmbeddingPreloadFont ( @ Nonnull final IHasFontResource aFontResProvider ) { } }
ValueEnforcer . notNull ( aFontResProvider , "FontResProvider" ) ; return getOrAddEmbeddingPreloadFont ( aFontResProvider . getFontResource ( ) ) ;
public class UpdateManagedInstanceRoleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateManagedInstanceRoleRequest updateManagedInstanceRoleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateManagedInstanceRoleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateManagedInstanceRoleRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( updateManagedInstanceRoleRequest . getIamRole ( ) , IAMROLE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OmemoService { /** * Decrypt an OMEMO message . * @ param managerGuard authenticated OmemoManager . * @ param senderJid BareJid of the sender . * @ param omemoElement omemoElement . * @ return decrypted OmemoMessage object . * @ throws CorruptedOmemoKeyException if the identityKey of the sender is damaged . * @ throws CryptoFailedException if decryption fails . * @ throws NoRawSessionException if we have no session with the device and it sent a normal ( non - preKey ) message . */ OmemoMessage . Received decryptMessage ( OmemoManager . LoggedInOmemoManager managerGuard , BareJid senderJid , OmemoElement omemoElement ) throws CorruptedOmemoKeyException , CryptoFailedException , NoRawSessionException { } }
OmemoManager manager = managerGuard . get ( ) ; int senderId = omemoElement . getHeader ( ) . getSid ( ) ; OmemoDevice senderDevice = new OmemoDevice ( senderJid , senderId ) ; CipherAndAuthTag cipherAndAuthTag = getOmemoRatchet ( manager ) . retrieveMessageKeyAndAuthTag ( senderDevice , omemoElement ) ; // Retrieve senders fingerprint . TODO : Find a way to do this without the store . OmemoFingerprint senderFingerprint ; try { senderFingerprint = getOmemoStoreBackend ( ) . getFingerprint ( manager . getOwnDevice ( ) , senderDevice ) ; } catch ( NoIdentityKeyException e ) { throw new AssertionError ( "Cannot retrieve OmemoFingerprint of sender although decryption was successful: " + e ) ; } // Reset the message counter . omemoStore . storeOmemoMessageCounter ( manager . getOwnDevice ( ) , senderDevice , 0 ) ; if ( omemoElement . isMessageElement ( ) ) { // Use symmetric message key to decrypt message payload . String plaintext = OmemoRatchet . decryptMessageElement ( omemoElement , cipherAndAuthTag ) ; return new OmemoMessage . Received ( omemoElement , cipherAndAuthTag . getKey ( ) , cipherAndAuthTag . getIv ( ) , plaintext , senderFingerprint , senderDevice , cipherAndAuthTag . wasPreKeyEncrypted ( ) ) ; } else { // KeyTransportMessages don ' t require decryption of the payload . return new OmemoMessage . Received ( omemoElement , cipherAndAuthTag . getKey ( ) , cipherAndAuthTag . getIv ( ) , null , senderFingerprint , senderDevice , cipherAndAuthTag . wasPreKeyEncrypted ( ) ) ; }
public class AbstractLifeCycle { /** * Propagates a change of { @ link State } to all the * { @ link StateListener } registered with this life cycle * object . * @ see # changeState ( State , State ) * @ param from the old state * @ param to the new state * @ throws Exception if a listener fails to accept the change */ protected void publishState ( State from , State to ) throws Exception { } }
final Collection < StateListener > listeners = getStateListeners ( ) ; synchronized ( listeners ) { for ( StateListener listener : listeners ) { listener . stateChanged ( from , to ) ; } }
public class JobTrackerTraits { /** * Get the diagnostics for a given task * @ param taskId the id of the task * @ return an array of the diagnostic messages */ protected String [ ] getTaskDiagnosticsImpl ( TaskAttemptID taskId ) throws IOException { } }
List < String > taskDiagnosticInfo = null ; JobID jobId = taskId . getJobID ( ) ; TaskID tipId = taskId . getTaskID ( ) ; JobInProgressTraits job = getJobInProgress ( jobId ) ; if ( job != null && job . inited ( ) ) { TaskInProgress tip = job . getTaskInProgress ( tipId ) ; if ( tip != null ) { taskDiagnosticInfo = tip . getDiagnosticInfo ( taskId ) ; } } return ( ( taskDiagnosticInfo == null ) ? EMPTY_TASK_DIAGNOSTICS : taskDiagnosticInfo . toArray ( new String [ taskDiagnosticInfo . size ( ) ] ) ) ;
public class AFactoryAppBeansMysql { /** * < p > Get Service that prepare Database after full import * in lazy mode . < / p > * @ return IDelegator - preparator Database after full import . * @ throws Exception - an exception */ @ Override public final synchronized PrepareDbAfterGetCopy lazyGetPrepareDbAfterFullImport ( ) throws Exception { } }
String beanName = getPrepareDbAfterFullImportName ( ) ; PrepareDbAfterGetCopy prepareDbAfterGetCopyMysql = ( PrepareDbAfterGetCopy ) getBeansMap ( ) . get ( beanName ) ; if ( prepareDbAfterGetCopyMysql == null ) { prepareDbAfterGetCopyMysql = new PrepareDbAfterGetCopy ( ) ; prepareDbAfterGetCopyMysql . setLogger ( lazyGetLogger ( ) ) ; prepareDbAfterGetCopyMysql . setFactoryAppBeans ( this ) ; getBeansMap ( ) . put ( beanName , prepareDbAfterGetCopyMysql ) ; lazyGetLogger ( ) . info ( null , FactoryAppBeansMysql . class , beanName + " has been created." ) ; } return prepareDbAfterGetCopyMysql ;
public class Neo4JIndexManager { /** * Adds Node Index for all singular attributes ( including ID ) of a given * entity * @ param entityMetadata * @ param node * @ param metaModel * @ param nodeIndex */ private void addNodeIndex ( EntityMetadata entityMetadata , Node node , Index < Node > nodeIndex , MetamodelImpl metaModel ) { } }
// MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( // entityMetadata . getPersistenceUnit ( ) ) ; // ID attribute has to be indexed necessarily String idColumnName = ( ( AbstractAttribute ) entityMetadata . getIdAttribute ( ) ) . getJPAColumnName ( ) ; nodeIndex . add ( node , idColumnName , node . getProperty ( idColumnName ) ) ; // Index all other fields , for whom indexing is enabled for ( Attribute attribute : metaModel . entity ( entityMetadata . getEntityClazz ( ) ) . getSingularAttributes ( ) ) { Field field = ( Field ) attribute . getJavaMember ( ) ; if ( ! attribute . isCollection ( ) && ! attribute . isAssociation ( ) && entityMetadata . getIndexProperties ( ) . keySet ( ) . contains ( field . getName ( ) ) ) { String columnName = ( ( AbstractAttribute ) attribute ) . getJPAColumnName ( ) ; nodeIndex . add ( node , columnName , node . getProperty ( columnName ) ) ; } }
public class StatFsHelper { /** * Update stats for a single directory and return the StatFs object for that directory . If the * directory does not exist or the StatFs restat ( ) or constructor fails ( throws ) , a null StatFs * object is returned . */ private @ Nullable StatFs updateStatsHelper ( @ Nullable StatFs statfs , @ Nullable File dir ) { } }
if ( dir == null || ! dir . exists ( ) ) { // The path does not exist , do not track stats for it . return null ; } try { if ( statfs == null ) { // Create a new StatFs object for this path . statfs = createStatFs ( dir . getAbsolutePath ( ) ) ; } else { // Call restat and keep the existing StatFs object . statfs . restat ( dir . getAbsolutePath ( ) ) ; } } catch ( IllegalArgumentException ex ) { // Invalidate the StatFs object for this directory . The native StatFs implementation throws // IllegalArgumentException in the case that the statfs ( ) system call fails and it invalidates // its internal data structures so subsequent calls against the StatFs object will fail or // throw ( so we should make no more calls on the object ) . The most likely reason for this call // to fail is because the provided path no longer exists . The next call to updateStats ( ) will // a new statfs object if the path exists . This will handle the case that a path is unmounted // and later remounted ( but it has to have been mounted when this object was initialized ) . statfs = null ; } catch ( Throwable ex ) { // Any other exception types are not expected and should be propagated as runtime errors . throw Throwables . propagate ( ex ) ; } return statfs ;
public class QueryBuilder { /** * Shortcut for { @ link # deleteFrom ( CqlIdentifier ) deleteFrom ( CqlIdentifier . fromCql ( table ) ) } */ @ NonNull public static DeleteSelection deleteFrom ( @ NonNull String table ) { } }
return deleteFrom ( CqlIdentifier . fromCql ( table ) ) ;
public class FixedDelayBuilder { /** * Set the maximum number of attempts . * You can specify a direct value . For example : * < pre > * . maxRetries ( " 10 " ) ; * < / pre > * You can also specify one or several property keys . For example : * < pre > * . delay ( " $ { custom . property . high - priority } " , " $ { custom . property . low - priority } " ) ; * < / pre > * The properties are not immediately evaluated . The evaluation will be done * when the { @ link # build ( ) } method is called . * If you provide several property keys , evaluation will be done on the * first key and if the property exists ( see { @ link EnvironmentBuilder } ) , * its value is used . If the first property doesn ' t exist in properties , * then it tries with the second one and so on . * @ param maxRetries * one value , or one or several property keys * @ return this instance for fluent chaining */ public FixedDelayBuilder < P > maxRetries ( String ... maxRetries ) { } }
for ( String m : maxRetries ) { if ( m != null ) { maxRetriesProps . add ( m ) ; } } return this ;
public class MatrixToImageWriter { /** * Writes a { @ link BitMatrix } to a file with default configuration . * @ param matrix { @ link BitMatrix } to write * @ param format image format * @ param file file { @ link Path } to write image to * @ throws IOException if writes to the stream fail * @ see # toBufferedImage ( BitMatrix ) */ public static void writeToPath ( BitMatrix matrix , String format , Path file ) throws IOException { } }
writeToPath ( matrix , format , file , DEFAULT_CONFIG ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PointArrayPropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link PointArrayPropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "pointArrayProperty" ) public JAXBElement < PointArrayPropertyType > createPointArrayProperty ( PointArrayPropertyType value ) { } }
return new JAXBElement < PointArrayPropertyType > ( _PointArrayProperty_QNAME , PointArrayPropertyType . class , null , value ) ;
public class RemoveIpRoutesRequest { /** * IP address blocks that you want to remove . * @ return IP address blocks that you want to remove . */ public java . util . List < String > getCidrIps ( ) { } }
if ( cidrIps == null ) { cidrIps = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return cidrIps ;
public class IsoChronology { /** * Obtains an ISO local date from the era , year - of - era and day - of - year fields . * @ param era the ISO era , not null * @ param yearOfEra the ISO year - of - era * @ param dayOfYear the ISO day - of - year * @ return the ISO local date , not null * @ throws DateTimeException if unable to create the date */ @ Override // override with covariant return type public LocalDate dateYearDay ( Era era , int yearOfEra , int dayOfYear ) { } }
return dateYearDay ( prolepticYear ( era , yearOfEra ) , dayOfYear ) ;
public class Ransac { /** * Turns the current candidates into the best ones . */ protected void swapCandidateWithBest ( ) { } }
List < Point > tempPts = candidatePoints ; candidatePoints = bestFitPoints ; bestFitPoints = tempPts ; int tempIndex [ ] = matchToInput ; matchToInput = bestMatchToInput ; bestMatchToInput = tempIndex ; Model m = candidateParam ; candidateParam = bestFitParam ; bestFitParam = m ;
public class CSSPageRuleImpl { /** * { @ inheritDoc } */ @ Override public void setCssText ( final String cssText ) throws DOMException { } }
try { final CSSOMParser parser = new CSSOMParser ( ) ; final AbstractCSSRuleImpl r = parser . parseRule ( cssText ) ; // The rule must be a page rule if ( r instanceof CSSPageRuleImpl ) { pseudoPage_ = ( ( CSSPageRuleImpl ) r ) . pseudoPage_ ; style_ = ( ( CSSPageRuleImpl ) r ) . style_ ; } else { throw new DOMExceptionImpl ( DOMException . INVALID_MODIFICATION_ERR , DOMExceptionImpl . EXPECTING_PAGE_RULE ) ; } } catch ( final CSSException e ) { throw new DOMExceptionImpl ( DOMException . SYNTAX_ERR , DOMExceptionImpl . SYNTAX_ERROR , e . getMessage ( ) ) ; } catch ( final IOException e ) { throw new DOMExceptionImpl ( DOMException . SYNTAX_ERR , DOMExceptionImpl . SYNTAX_ERROR , e . getMessage ( ) ) ; }
public class NettyMessage { /** * Allocates a new ( header and contents ) buffer and adds some header information for the frame * decoder . * < p > If the < tt > contentLength < / tt > is unknown , you must write the actual length after adding * the contents as an integer to position < tt > 0 < / tt > ! * @ param allocator * byte buffer allocator to use * @ param id * { @ link NettyMessage } subclass ID * @ param contentLength * content length ( or < tt > - 1 < / tt > if unknown ) * @ return a newly allocated direct buffer with header data written for { @ link * NettyMessageDecoder } */ private static ByteBuf allocateBuffer ( ByteBufAllocator allocator , byte id , int contentLength ) { } }
return allocateBuffer ( allocator , id , 0 , contentLength , true ) ;