signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class IncrementalEmTrainer { /** * Trains { @ code bn } using { @ code trainingData } , returning the resulting * parameters . These parameters maximize the marginal likelihood of the * observed data , assuming that training is run for a sufficient number of * iterations , etc . * { @ code initialParameters } are used as the starting point for training . A * reasonable starting point is the uniform distribution ( add - one smoothing ) . * These parameters can be retrieved using * { @ code bn . getNewSufficientStatistics ( ) . increment ( 1 ) } . Incremental EM will * retain the smoothing throughout the training process . This method may * modify { @ code initialParameters } . * @ param bn * @ param initialParameters * @ param trainingData */ @ Override public SufficientStatistics train ( ParametricFactorGraph bn , SufficientStatistics initialParameters , Iterable < Example < DynamicAssignment , DynamicAssignment > > trainingDataExamples ) { } }
List < DynamicAssignment > trainingData = getOutputAssignments ( trainingDataExamples , true ) ; SufficientStatistics [ ] previousIterationStatistics = new SufficientStatistics [ trainingData . size ( ) ] ; for ( int i = 0 ; i < numIterations ; i ++ ) { log . notifyIterationStart ( i ) ; for ( int j = 0 ; j < trainingData . size ( ) ; j ++ ) { if ( i > 0 ) { // Subtract out old statistics if they exist . initialParameters . increment ( previousIterationStatistics [ j ] , - 1.0 ) ; } // Get the current training data point and the most recent factor graph // based on the current iteration . DynamicAssignment dynamicExample = trainingData . get ( j ) ; DynamicFactorGraph dynamicFactorGraph = bn . getModelFromParameters ( initialParameters ) ; FactorGraph currentFactorGraph = dynamicFactorGraph . getFactorGraph ( dynamicExample ) ; Assignment trainingExample = dynamicFactorGraph . getVariables ( ) . toAssignment ( dynamicExample ) ; log . log ( i , j , trainingExample , currentFactorGraph ) ; // Compute the marginal distribution of currentFactorGraph conditioned on // the current training example . FactorGraph conditionalFactorGraph = currentFactorGraph . conditional ( trainingExample ) ; MarginalSet marginals = inferenceEngine . computeMarginals ( conditionalFactorGraph ) ; // Update new sufficient statistics SufficientStatistics exampleStatistics = bn . getNewSufficientStatistics ( ) ; bn . incrementSufficientStatistics ( exampleStatistics , initialParameters , marginals , 1.0 ) ; previousIterationStatistics [ j ] = exampleStatistics ; initialParameters . increment ( exampleStatistics , 1.0 ) ; } log . notifyIterationEnd ( i ) ; } return initialParameters ;
public class ApplicationConversionService { /** * Add converters useful for most Spring Boot applications . * @ param registry the registry of converters to add to ( must also be castable to * ConversionService , e . g . being a { @ link ConfigurableConversionService } ) * @ throws ClassCastException if the given ConverterRegistry could not be cast to a * ConversionService */ public static void addApplicationConverters ( ConverterRegistry registry ) { } }
addDelimitedStringConverters ( registry ) ; registry . addConverter ( new StringToDurationConverter ( ) ) ; registry . addConverter ( new DurationToStringConverter ( ) ) ; registry . addConverter ( new NumberToDurationConverter ( ) ) ; registry . addConverter ( new DurationToNumberConverter ( ) ) ; registry . addConverter ( new StringToDataSizeConverter ( ) ) ; registry . addConverter ( new NumberToDataSizeConverter ( ) ) ; registry . addConverterFactory ( new StringToEnumIgnoringCaseConverterFactory ( ) ) ;
public class PdfContentByte { /** * Appends a B & # xea ; zier curve to the path , starting from the current point . * @ param x1 x - coordinate of the first control point * @ param y1 y - coordinate of the first control point * @ param x2 x - coordinate of the second control point * @ param y2 y - coordinate of the second control point * @ param x3 x - coordinate of the ending point ( = new current point ) * @ param y3 y - coordinate of the ending point ( = new current point ) */ public void curveTo ( float x1 , float y1 , float x2 , float y2 , float x3 , float y3 ) { } }
content . append ( x1 ) . append ( ' ' ) . append ( y1 ) . append ( ' ' ) . append ( x2 ) . append ( ' ' ) . append ( y2 ) . append ( ' ' ) . append ( x3 ) . append ( ' ' ) . append ( y3 ) . append ( " c" ) . append_i ( separator ) ;
public class CommerceShippingMethodUtil { /** * Returns the commerce shipping method where groupId = & # 63 ; and engineKey = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param groupId the group ID * @ param engineKey the engine key * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching commerce shipping method , or < code > null < / code > if a matching commerce shipping method could not be found */ public static CommerceShippingMethod fetchByG_E ( long groupId , String engineKey , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByG_E ( groupId , engineKey , retrieveFromCache ) ;
public class JdbcQueueFactory { /** * Setter for { @ link # defaultJdbcHelper } . * @ param jdbcHelper * @ param setMyOwnJdbcHelper * @ return * @ since 0.7.1 */ protected JdbcQueueFactory < T , ID , DATA > setDefaultJdbcHelper ( IJdbcHelper jdbcHelper , boolean setMyOwnJdbcHelper ) { } }
if ( myOwnJdbcHelper && this . defaultJdbcHelper != null && this . defaultJdbcHelper instanceof AbstractJdbcHelper ) { ( ( AbstractJdbcHelper ) this . defaultJdbcHelper ) . destroy ( ) ; } this . defaultJdbcHelper = jdbcHelper ; this . myOwnJdbcHelper = setMyOwnJdbcHelper ; return this ;
public class TransmitMessageRequest { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPRemoteMessageControllable # getState ( ) */ public String getState ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getState" ) ; String state = null ; TickRange r = getTickRange ( ) ; byte tickState = r . type ; switch ( tickState ) { case TickRange . Requested : { state = State . REQUEST . toString ( ) ; break ; } case TickRange . Accepted : { state = State . ACKNOWLEDGED . toString ( ) ; break ; } case TickRange . Completed : { state = State . ACKNOWLEDGED . toString ( ) ; break ; } case TickRange . Rejected : { state = State . REJECT . toString ( ) ; break ; } case TickRange . Value : { if ( ( ( AOValue ) r . value ) . removing ) state = State . REMOVING . toString ( ) ; else state = State . PENDING_ACKNOWLEDGEMENT . toString ( ) ; break ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getState" , state ) ; return state ;
public class AipContentCensor { /** * 色情识别接口 * @ param imgData 图片二进制数据 * @ return JSONObject */ public JSONObject antiPorn ( byte [ ] imgData ) { } }
AipRequest request = new AipRequest ( ) ; // check param JSONObject checkRet = checkParam ( imgData ) ; if ( ! "0" . equals ( checkRet . getString ( "error_code" ) ) ) { return checkRet ; } preOperation ( request ) ; // add API params String base64Content = Base64Util . encode ( imgData ) ; request . addBody ( "image" , base64Content ) ; request . setUri ( ContentCensorConsts . ANTI_PORN_URL ) ; postOperation ( request ) ; return requestServer ( request ) ;
public class GrafeasV1Beta1Client { /** * Creates a new occurrence . * < p > Sample code : * < pre > < code > * try ( GrafeasV1Beta1Client grafeasV1Beta1Client = GrafeasV1Beta1Client . create ( ) ) { * ProjectName parent = ProjectName . of ( " [ PROJECT ] " ) ; * Occurrence occurrence = Occurrence . newBuilder ( ) . build ( ) ; * Occurrence response = grafeasV1Beta1Client . createOccurrence ( parent . toString ( ) , occurrence ) ; * < / code > < / pre > * @ param parent The name of the project in the form of ` projects / [ PROJECT _ ID ] ` , under which the * occurrence is to be created . * @ param occurrence The occurrence to create . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Occurrence createOccurrence ( String parent , Occurrence occurrence ) { } }
CreateOccurrenceRequest request = CreateOccurrenceRequest . newBuilder ( ) . setParent ( parent ) . setOccurrence ( occurrence ) . build ( ) ; return createOccurrence ( request ) ;
public class CmsRenderer { /** * Creates a tab . < p > * @ return the created tab */ private FlowPanel createTab ( ) { } }
FlowPanel tabPanel ; tabPanel = new FlowPanel ( ) ; tabPanel . addStyleName ( ENTITY_CLASS ) ; tabPanel . addStyleName ( I_CmsLayoutBundle . INSTANCE . form ( ) . formParent ( ) ) ; tabPanel . getElement ( ) . getStyle ( ) . setMargin ( 0 , Unit . PX ) ; return tabPanel ;
public class Context { /** * Associates the specified value with the specified key in the session * attributes . If the session attributes previously contained a mapping for * this key , the old value is replaced by the specified value . * @ param _ key key name of the attribute to set * @ param _ value value of the attribute to set * @ return Object * @ see # sessionAttributes * @ see # containsSessionAttribute * @ see # getSessionAttribute */ public Object setSessionAttribute ( final String _key , final Object _value ) { } }
return this . sessionAttributes . put ( _key , _value ) ;
public class HttpCallInitiationServlet { /** * Handle the HTTP GET method by building a simple web page . */ public void doGet ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
String access = request . getParameter ( "access" ) ; String callAuthCode = getServletContext ( ) . getInitParameter ( "call.code" ) ; if ( callAuthCode . equals ( access ) ) { String toAddr = "sip:+" + request . getParameter ( "to" ) . trim ( ) + "@" + getServletContext ( ) . getInitParameter ( "domain.name" ) ; String fromAddr = "sip:" + getServletContext ( ) . getInitParameter ( "user1" ) + "@" + getServletContext ( ) . getInitParameter ( "domain.name" ) ; String secondParty = "sip:+" + request . getParameter ( "from" ) . trim ( ) + "@" + getServletContext ( ) . getInitParameter ( "domain.name" ) ; URI to = toAddr == null ? null : sipFactory . createAddress ( toAddr ) . getURI ( ) ; URI from = fromAddr == null ? null : sipFactory . createAddress ( fromAddr ) . getURI ( ) ; // Create app session and request SipApplicationSession appSession = // ( ( ConvergedHttpSession ) request . getSession ( ) ) . getApplicationSession ( ) ; sipFactory . createApplicationSession ( ) ; SipServletRequest req = sipFactory . createRequest ( appSession , "INVITE" , from , to ) ; // Set some attribute req . getSession ( ) . setAttribute ( "SecondPartyAddress" , sipFactory . createAddress ( secondParty ) ) ; req . getSession ( ) . setAttribute ( "FromAddr" , sipFactory . createAddress ( fromAddr ) ) ; req . getSession ( ) . setAttribute ( "user" , getServletContext ( ) . getInitParameter ( "user1" ) ) ; req . getSession ( ) . setAttribute ( "pass" , getServletContext ( ) . getInitParameter ( "pass1" ) ) ; logger . info ( "Sending request" + req ) ; // Send the INVITE request req . send ( ) ; } // Write the output html PrintWriter out ; response . setContentType ( "text/html" ) ; out = response . getWriter ( ) ; // Just redirect to the index out . println ( "<HTML><META HTTP-EQUIV=\"Refresh\"CONTENT=\"0; URL=index.jsp\"><HEAD><TITLE></HTML>" ) ; out . close ( ) ;
public class IconicsDrawable { /** * Set the size of the drawable . * @ param sizeDp The size in density - independent pixels ( dp ) . * @ return The current IconicsDrawable for chaining . */ @ NonNull public IconicsDrawable sizeDpY ( @ Dimension ( unit = DP ) int sizeDp ) { } }
return sizePxY ( Utils . convertDpToPx ( mContext , sizeDp ) ) ;
public class URLUtil { /** * 获得Reader * @ param url { @ link URL } * @ param charset 编码 * @ return { @ link BufferedReader } * @ since 3.2.1 */ public static BufferedReader getReader ( URL url , Charset charset ) { } }
return IoUtil . getReader ( getStream ( url ) , charset ) ;
public class ConcurrentSubList { /** * Replace the state of this object with the same object in some other * state . Used for to restore the before image if a transaction rolls back * or is read from the log during restart . * @ param other * is the object this object is to become a clone of . */ public void becomeCloneOf ( ManagedObject other ) { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "becomeCloneOf" , "Other=" + other ) ; super . becomeCloneOf ( other ) ; concurrentListToken = ( ( ConcurrentSubList ) other ) . concurrentListToken ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "becomeCloneOf" ) ;
public class CustomUserRegistryWrapper { /** * { @ inheritDoc } */ @ Override @ FFDCIgnore ( com . ibm . websphere . security . CertificateMapFailedException . class ) public String mapCertificate ( X509Certificate [ ] chain ) throws CertificateMapNotSupportedException , CertificateMapFailedException , RegistryException { } }
try { return customUserRegistry . mapCertificate ( chain ) ; } catch ( com . ibm . websphere . security . CertificateMapNotSupportedException e ) { throw new CertificateMapNotSupportedException ( e . getMessage ( ) ) ; } catch ( com . ibm . websphere . security . CertificateMapFailedException e ) { throw new CertificateMapFailedException ( e . getMessage ( ) , e ) ; } catch ( Exception e ) { throw new RegistryException ( e . getMessage ( ) , e ) ; }
public class SensorAggregatorInterface { /** * Cleans - up any session - related constructs when this interface will no longer * attempt connections to the aggregator . Notifies listener that the * connection has ended permanently . */ protected void finishConnection ( ) { } }
this . connector . dispose ( ) ; this . connector = null ; for ( ConnectionListener listener : this . connectionListeners ) { listener . connectionEnded ( this ) ; } if ( this . executors != null ) { this . executors . destroy ( ) ; }
public class GraphLoader { /** * Load a graph into memory , using a given EdgeLineProcessor . * Assume one edge per line * @ param path Path to the file containing the edges , one per line * @ param lineProcessor EdgeLineProcessor used to convert lines of text into a graph ( or null for comment lines etc ) * @ param vertexFactory Used to create vertices * @ param numVertices number of vertices in the graph * @ param allowMultipleEdges whether the graph should allow multiple edges between a given pair of vertices or not * @ return IGraph */ public static < V , E > Graph < V , E > loadGraph ( String path , EdgeLineProcessor < E > lineProcessor , VertexFactory < V > vertexFactory , int numVertices , boolean allowMultipleEdges ) throws IOException { } }
Graph < V , E > graph = new Graph < > ( numVertices , allowMultipleEdges , vertexFactory ) ; try ( BufferedReader br = new BufferedReader ( new FileReader ( new File ( path ) ) ) ) { String line ; while ( ( line = br . readLine ( ) ) != null ) { Edge < E > edge = lineProcessor . processLine ( line ) ; if ( edge != null ) { graph . addEdge ( edge ) ; } } } return graph ;
public class Saml10ObjectBuilder { /** * New subject element with given confirmation method . * @ param identifier the identifier * @ param confirmationMethod the confirmation method * @ return the subject */ public Subject newSubject ( final String identifier , final String confirmationMethod ) { } }
val confirmation = newSamlObject ( SubjectConfirmation . class ) ; val method = newSamlObject ( ConfirmationMethod . class ) ; method . setConfirmationMethod ( confirmationMethod ) ; confirmation . getConfirmationMethods ( ) . add ( method ) ; val nameIdentifier = newSamlObject ( NameIdentifier . class ) ; nameIdentifier . setValue ( identifier ) ; val subject = newSamlObject ( Subject . class ) ; subject . setNameIdentifier ( nameIdentifier ) ; subject . setSubjectConfirmation ( confirmation ) ; return subject ;
public class HttpUploadClient { /** * Standard usage of HTTP API in Netty without file Upload ( get is not able to achieve File upload * due to limitation on request size ) . * @ return the list of headers that will be used in every example after */ private static List < Entry < String , String > > formget ( Bootstrap bootstrap , String host , int port , String get , URI uriSimple ) throws Exception { } }
// XXX / formget // No use of HttpPostRequestEncoder since not a POST Channel channel = bootstrap . connect ( host , port ) . sync ( ) . channel ( ) ; // Prepare the HTTP request . QueryStringEncoder encoder = new QueryStringEncoder ( get ) ; // add Form attribute encoder . addParam ( "getform" , "GET" ) ; encoder . addParam ( "info" , "first value" ) ; encoder . addParam ( "secondinfo" , "secondvalue ���&" ) ; // not the big one since it is not compatible with GET size // encoder . addParam ( " thirdinfo " , textArea ) ; encoder . addParam ( "thirdinfo" , "third value\r\ntest second line\r\n\r\nnew line\r\n" ) ; encoder . addParam ( "Send" , "Send" ) ; URI uriGet = new URI ( encoder . toString ( ) ) ; HttpRequest request = new DefaultHttpRequest ( HttpVersion . HTTP_1_1 , HttpMethod . GET , uriGet . toASCIIString ( ) ) ; HttpHeaders headers = request . headers ( ) ; headers . set ( HttpHeaderNames . HOST , host ) ; headers . set ( HttpHeaderNames . CONNECTION , HttpHeaderValues . CLOSE ) ; headers . set ( HttpHeaderNames . ACCEPT_ENCODING , HttpHeaderValues . GZIP + "," + HttpHeaderValues . DEFLATE ) ; headers . set ( HttpHeaderNames . ACCEPT_CHARSET , "ISO-8859-1,utf-8;q=0.7,*;q=0.7" ) ; headers . set ( HttpHeaderNames . ACCEPT_LANGUAGE , "fr" ) ; headers . set ( HttpHeaderNames . REFERER , uriSimple . toString ( ) ) ; headers . set ( HttpHeaderNames . USER_AGENT , "Netty Simple Http Client side" ) ; headers . set ( HttpHeaderNames . ACCEPT , "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" ) ; // connection will not close but needed // headers . set ( " Connection " , " keep - alive " ) ; // headers . set ( " Keep - Alive " , " 300 " ) ; headers . set ( HttpHeaderNames . COOKIE , ClientCookieEncoder . STRICT . encode ( new DefaultCookie ( "my-cookie" , "foo" ) , new DefaultCookie ( "another-cookie" , "bar" ) ) ) ; // send request channel . writeAndFlush ( request ) ; // Wait for the server to close the connection . channel . closeFuture ( ) . sync ( ) ; // convert headers to list return headers . entries ( ) ;
public class WebSocketUtil { /** * Generates a pseudo - random number * @ param minimum The minimum allowable value * @ param maximum The maximum allowable value * @ return A pseudo - random number */ static int randomNumber ( int minimum , int maximum ) { } }
assert minimum < maximum ; double fraction = PlatformDependent . threadLocalRandom ( ) . nextDouble ( ) ; // the idea here is that nextDouble gives us a random value // 0 < = fraction < = 1 // the distance from min to max declared as // dist = max - min // satisfies the following // min + dist = max // taking into account // 0 < = fraction * dist < = dist // we ' ve got // min < = min + fraction * dist < = max return ( int ) ( minimum + fraction * ( maximum - minimum ) ) ;
public class ServerTableAuditingPoliciesInner { /** * Creates or updates a servers ' s table auditing policy . Table auditing is deprecated , use blob auditing instead . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param parameters The server table auditing policy . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ServerTableAuditingPolicyInner object */ public Observable < ServerTableAuditingPolicyInner > createOrUpdateAsync ( String resourceGroupName , String serverName , ServerTableAuditingPolicyInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . map ( new Func1 < ServiceResponse < ServerTableAuditingPolicyInner > , ServerTableAuditingPolicyInner > ( ) { @ Override public ServerTableAuditingPolicyInner call ( ServiceResponse < ServerTableAuditingPolicyInner > response ) { return response . body ( ) ; } } ) ;
public class IgnoredDomainResourceRoot { /** * call with lock on ' children ' held */ private void registerChildInternal ( IgnoreDomainResourceTypeResource child ) { } }
child . setParent ( this ) ; children . put ( child . getName ( ) , child ) ;
public class MoleculeFeaturesTool { /** * Checks whether all bonds have exactly two atoms . */ public static boolean hasGraphRepresentation ( IAtomContainer molecule ) { } }
for ( IBond bond : molecule . bonds ( ) ) if ( bond . getAtomCount ( ) != 2 ) return false ; return true ;
public class MathUtils { /** * This will merge the coordinates of the given coordinate system . * @ param x the x coordinates * @ param y the y coordinates * @ return a vector such that each ( x , y ) pair is at ret [ i ] , ret [ i + 1] */ public static double [ ] mergeCoords ( double [ ] x , double [ ] y ) { } }
if ( x . length != y . length ) throw new IllegalArgumentException ( "Sample sizes must be the same for each data applyTransformToDestination." ) ; double [ ] ret = new double [ x . length + y . length ] ; for ( int i = 0 ; i < x . length ; i ++ ) { ret [ i ] = x [ i ] ; ret [ i + 1 ] = y [ i ] ; } return ret ;
public class JdbcControlChecker { /** * Check the SQL method annotation . Lots to check here , stop checking as soon as an error is found . * @ param method Method to check . * @ param env Processor env . */ private void checkSQL ( MethodDeclaration method , AnnotationProcessorEnvironment env ) { } }
final JdbcControl . SQL methodSQL = method . getAnnotation ( JdbcControl . SQL . class ) ; if ( methodSQL == null ) { return ; } // check for empty SQL statement member if ( methodSQL . statement ( ) == null || methodSQL . statement ( ) . length ( ) == 0 ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.empty.statement" , method . getSimpleName ( ) ) ) ; return ; } // Make sure maxrows is not set to some negative number other than - 1 int maxRows = methodSQL . maxRows ( ) ; if ( maxRows < JdbcControl . MAXROWS_ALL ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.bad.maxrows" , method . getSimpleName ( ) , maxRows ) ) ; return ; } // parse the SQL SqlParser _p = new SqlParser ( ) ; SqlStatement _statement ; try { _statement = _p . parse ( methodSQL . statement ( ) ) ; } catch ( ControlException ce ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.bad.parse" , method . getSimpleName ( ) , ce . toString ( ) ) ) ; return ; } // Check that the any statement element params ( delimited by ' { ' and ' } ' can be // matched to method parameter names . NOTE : This check is only valid on non - compiled files , // once compiled to a class file method parameter names are replaced with ' arg0 ' , ' arg1 ' , etc . // and cannot be used for this check . try { ParameterChecker . checkReflectionParameters ( _statement , method ) ; } catch ( ControlException e ) { env . getMessager ( ) . printError ( method . getPosition ( ) , e . getMessage ( ) ) ; return ; } // check for case of generatedKeyColumns being set , when getGeneratedKeys is not set to true final boolean getGeneratedKeys = methodSQL . getGeneratedKeys ( ) ; final String [ ] generatedKeyColumnNames = methodSQL . generatedKeyColumnNames ( ) ; final int [ ] generatedKeyIndexes = methodSQL . generatedKeyColumnIndexes ( ) ; if ( ! getGeneratedKeys && ( generatedKeyColumnNames . length != 0 || generatedKeyIndexes . length != 0 ) ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.genkeys" , method . getSimpleName ( ) ) ) ; return ; } // check that both generatedKeyColumnNames and generatedKeyColumnIndexes are not set if ( generatedKeyColumnNames . length > 0 && generatedKeyIndexes . length > 0 ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.genkeycolumns" , method . getSimpleName ( ) ) ) ; return ; } // batch update methods must return int [ ] final boolean batchUpdate = methodSQL . batchUpdate ( ) ; final TypeMirror returnType = method . getReturnType ( ) ; if ( batchUpdate ) { if ( returnType instanceof ArrayType ) { final TypeMirror aType = ( ( ArrayType ) returnType ) . getComponentType ( ) ; if ( aType instanceof PrimitiveType == false || ( ( PrimitiveType ) aType ) . getKind ( ) != PrimitiveType . Kind . INT ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.batchupdate" , method . getSimpleName ( ) ) ) ; return ; } } else if ( returnType instanceof VoidType == false ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.batchupdate" , method . getSimpleName ( ) ) ) ; return ; } } // iterator type check match if ( returnType instanceof InterfaceType ) { String iName = ( ( InterfaceType ) returnType ) . getDeclaration ( ) . getQualifiedName ( ) ; if ( "java.util.Iterator" . equals ( iName ) ) { String iteratorClassName = null ; try { // this should always except methodSQL . iteratorElementType ( ) ; } catch ( MirroredTypeException mte ) { iteratorClassName = mte . getQualifiedName ( ) ; } if ( "org.apache.beehive.controls.system.jdbc.JdbcControl.UndefinedIteratorType" . equals ( iteratorClassName ) ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.iterator.returntype" , method . getSimpleName ( ) ) ) ; return ; } } } // scrollable result set check final JdbcControl . ScrollType scrollable = methodSQL . scrollableResultSet ( ) ; switch ( scrollable ) { case SCROLL_INSENSITIVE : case SCROLL_SENSITIVE : case SCROLL_INSENSITIVE_UPDATABLE : case SCROLL_SENSITIVE_UPDATABLE : case FORWARD_ONLY_UPDATABLE : String typeName = null ; if ( returnType instanceof DeclaredType ) { typeName = ( ( DeclaredType ) returnType ) . getDeclaration ( ) . getQualifiedName ( ) ; } if ( typeName == null || ! "java.sql.ResultSet" . equals ( typeName ) ) { env . getMessager ( ) . printError ( method . getPosition ( ) , getResourceString ( "jdbccontrol.scrollresultset" , method . getSimpleName ( ) ) ) ; return ; } case FORWARD_ONLY : default : break ; } return ;
public class LTSminUtil { /** * ( Re - ) Checks whether the required binaries for the { @ link AbstractLTSmin LTSmin modelchecker } can be executed , by * performing a version check . The results to these checks can be accessed by { @ link # isInstalled ( ) } and { @ link * # getVersion ( ) } . * @ see # ETF2LTS _ MC * @ see # LTSMIN _ CONVERT */ public static void detectLTSmin ( ) { } }
final LTSminVersion etf2ltsVersion = detectLTSmin ( ETF2LTS_MC ) ; final LTSminVersion ltsminConvertVersion = detectLTSmin ( LTSMIN_CONVERT ) ; if ( etf2ltsVersion != null && ltsminConvertVersion != null ) { if ( ! etf2ltsVersion . equals ( ltsminConvertVersion ) ) { LOGGER . warn ( "Found differing etf2lts version '{}' and lstminConver version '{}'. Choosing the former" , etf2ltsVersion , ltsminConvertVersion ) ; } detectedVersion = etf2ltsVersion ; }
public class Validate { /** * Method without varargs to increase performance */ public static CharSequence matchesPattern ( final CharSequence input , final String pattern , final String message ) { } }
return INSTANCE . matchesPattern ( input , pattern , message ) ;
public class JaxWsMetaDataManager { /** * Gets the metadata for the module */ public static ModuleMetaData getModuleMetaData ( ) { } }
ComponentMetaData cmd = getComponentMetaData ( ) ; ModuleMetaData mmd = null ; if ( cmd != null ) { mmd = cmd . getModuleMetaData ( ) ; } if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "ModuleMetaData object is " + ( mmd != null ? mmd . toString ( ) : "null!" ) ) ; } return mmd ;
public class H2StreamProcessor { /** * If this stream is receiving a DATA frame , the local read window needs to be updated . If the read window drops below a threshold , * a WINDOW _ UPDATE frame will be sent for both the connection and stream to update the windows . */ private void updateStreamReadWindow ( ) throws Http2Exception { } }
if ( currentFrame instanceof FrameData ) { long frameSize = currentFrame . getPayloadLength ( ) ; streamReadWindowSize -= frameSize ; // decrement stream read window muxLink . connectionReadWindowSize -= frameSize ; // decrement connection read window // if the stream or connection windows become too small , update the windows // TODO : decide how often we should update the read window via WINDOW _ UPDATE if ( streamReadWindowSize < ( muxLink . maxReadWindowSize / 2 ) || muxLink . connectionReadWindowSize < ( muxLink . maxReadWindowSize / 2 ) ) { int windowChange = ( int ) ( muxLink . maxReadWindowSize - this . streamReadWindowSize ) ; Frame savedFrame = currentFrame ; // save off the current frame if ( ! this . isStreamClosed ( ) ) { currentFrame = new FrameWindowUpdate ( myID , windowChange , false ) ; writeFrameSync ( ) ; currentFrame = savedFrame ; } long windowSizeIncrement = muxLink . getRemoteConnectionSettings ( ) . getMaxFrameSize ( ) ; FrameWindowUpdate wuf = new FrameWindowUpdate ( 0 , ( int ) windowSizeIncrement , false ) ; this . muxLink . getStream ( 0 ) . processNextFrame ( wuf , Direction . WRITING_OUT ) ; } }
public class ModuleUploads { /** * Create a new upload . * Once an upload is created , you can use it in { @ link ModuleAssets } through * { @ link CMAClient # assets ( ) } for creating an asset based on a local file . * This method will override the configuration specified through * { @ link CMAClient . Builder # setSpaceId ( String ) } and will ignore * { @ link CMAClient . Builder # setEnvironmentId ( String ) } . * @ param spaceId a nonnull id representing the space to add the upload to . * @ param stream the actual binary representation of the upload . Cannot be null . * @ return the upload created , containing the id to be used further on . * @ throws IllegalArgumentException if spaceId is null . * @ throws IllegalArgumentException if stream is null . * @ throws java . io . IOException if the stream could not be read . */ public CMAUpload create ( String spaceId , InputStream stream ) throws IOException { } }
assertNotNull ( spaceId , "spaceId" ) ; assertNotNull ( stream , "stream" ) ; final byte [ ] content = readAllBytes ( stream ) ; final RequestBody payload = RequestBody . create ( parse ( OCTET_STREAM_CONTENT_TYPE ) , content ) ; return service . create ( spaceId , payload ) . blockingFirst ( ) ;
public class CurveSegmentArrayPropertyType { /** * Gets the value of the curveSegment property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the curveSegment property . * For example , to add a new item , do as follows : * < pre > * get _ CurveSegment ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link ArcByBulgeType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link ArcStringByBulgeType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link GeodesicType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link GeodesicStringType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link OffsetCurveType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link CircleByCenterPointType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link ArcByCenterPointType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link LineStringSegmentType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link BezierType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link BSplineType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link CircleType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link ArcType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link ArcStringType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link CubicSplineType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link ClothoidType } { @ code > } * { @ link JAXBElement } { @ code < } { @ link AbstractCurveSegmentType } { @ code > } */ public List < JAXBElement < ? extends AbstractCurveSegmentType > > get_CurveSegment ( ) { } }
if ( _CurveSegment == null ) { _CurveSegment = new ArrayList < JAXBElement < ? extends AbstractCurveSegmentType > > ( ) ; } return this . _CurveSegment ;
public class ClassWriterImpl { /** * Get the class hierarchy tree for the given class . * @ param type the class to print the hierarchy for * @ return a content tree for class inheritence */ private Content getClassInheritenceTree ( Type type ) { } }
Type sup ; HtmlTree classTreeUl = new HtmlTree ( HtmlTag . UL ) ; classTreeUl . addStyle ( HtmlStyle . inheritance ) ; Content liTree = null ; do { sup = utils . getFirstVisibleSuperClass ( type instanceof ClassDoc ? ( ClassDoc ) type : type . asClassDoc ( ) , configuration ) ; if ( sup != null ) { HtmlTree ul = new HtmlTree ( HtmlTag . UL ) ; ul . addStyle ( HtmlStyle . inheritance ) ; ul . addContent ( getTreeForClassHelper ( type ) ) ; if ( liTree != null ) ul . addContent ( liTree ) ; Content li = HtmlTree . LI ( ul ) ; liTree = li ; type = sup ; } else classTreeUl . addContent ( getTreeForClassHelper ( type ) ) ; } while ( sup != null ) ; if ( liTree != null ) classTreeUl . addContent ( liTree ) ; return classTreeUl ;
public class Duration { /** * Converts this duration to the total length in nanoseconds expressed as a { @ code long } . * If this duration is too large to fit in a { @ code long } nanoseconds , then an * exception is thrown . * @ return the total length of the duration in nanoseconds * @ throws ArithmeticException if numeric overflow occurs */ public long toNanos ( ) { } }
long result = Jdk8Methods . safeMultiply ( seconds , NANOS_PER_SECOND ) ; result = Jdk8Methods . safeAdd ( result , nanos ) ; return result ;
public class NumericConverter { /** * Attempts to convert the provided string value to a numeric type , * trying Integer , Long and Double in order until successful . */ @ Override public Object convert ( String value ) { } }
if ( value == null || value . isEmpty ( ) ) { return value ; } Object result = Ints . tryParse ( value ) ; if ( result != null ) { return result ; } result = Longs . tryParse ( value ) ; if ( result != null ) { return result ; } result = Doubles . tryParse ( value ) ; if ( result != null ) { return result ; } return value ;
public class FaultTolerantScheduler { /** * Recovers a timer task that was running in another node . * @ param taskData */ private void recover ( TimerTaskData taskData ) { } }
TimerTask task = timerTaskFactory . newTimerTask ( taskData ) ; if ( task != null ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Recovering task with id " + taskData . getTaskID ( ) ) ; } task . beforeRecover ( ) ; // on recovery the task will already be in the cache so we don ' t check for it // or an IllegalStateException will be thrown schedule ( task , false ) ; }
public class Hashing { /** * Returns a hash function which computes its hash code by concatenating the hash codes of the * underlying hash functions together . This can be useful if you need to generate hash codes of a * specific length . * < p > For example , if you need 1024 - bit hash codes , you could join two { @ link Hashing # sha512 } hash * functions together : { @ code Hashing . concatenating ( Hashing . sha512 ( ) , Hashing . sha512 ( ) ) } . * @ since 19.0 */ public static HashFunction concatenating ( HashFunction first , HashFunction second , HashFunction ... rest ) { } }
// We can ' t use Lists . asList ( ) here because there ' s no hash - > collect dependency List < HashFunction > list = new ArrayList < HashFunction > ( ) ; list . add ( first ) ; list . add ( second ) ; for ( HashFunction hashFunc : rest ) { list . add ( hashFunc ) ; } return new ConcatenatedHashFunction ( list . toArray ( new HashFunction [ 0 ] ) ) ;
public class AnimaQuery { /** * Set in params * @ param list in param values * @ param < S > * @ return AnimaQuery */ public < S > AnimaQuery < T > in ( List < S > list ) { } }
return this . in ( list . toArray ( ) ) ;
public class PickerSpinner { /** * Push an item to be selected , but not shown in the dropdown menu . This is similar to calling * setText ( item . toString ( ) ) if a Spinner had such a method . * @ param item The item to select , or null to remove any temporary selection . */ public void selectTemporary ( TwinTextItem item ) { } }
// if we just want to clear the selection : if ( item == null ) { setSelection ( getLastItemPosition ( ) ) ; // the call is passed on to the adapter in setSelection . return ; } PickerSpinnerAdapter adapter = ( PickerSpinnerAdapter ) getAdapter ( ) ; // pass on the call to the adapter ( just stores the item ) : adapter . selectTemporary ( item ) ; final int tempItemPosition = adapter . getCount ( ) ; if ( getSelectedItemPosition ( ) == tempItemPosition ) { // this is quite a hack , first reset the position to 0 but intercept the callback , // then redo the selection : setSelectionQuietly ( 0 ) ; } super . setSelection ( tempItemPosition ) ; // during initialization the system might check our selected position and reset it , // thus we need to check after the message queue has been settled if ( ! restoreTemporarySelection ) { restoreTemporarySelection = true ; post ( new Runnable ( ) { @ Override public void run ( ) { if ( restoreTemporarySelection ) { restoreTemporarySelection = false ; reselectTemporaryItem = false ; final int tempItemPosition = getAdapter ( ) . getCount ( ) ; if ( getSelectedItemPosition ( ) != tempItemPosition ) superSetSelection ( tempItemPosition ) ; } } } ) ; }
public class LabeledFieldController { /** * Runs a validation on the user input and returns all the validation errors of this field . * Previous error messages are removed when calling { @ code validateInput ( ) } . * @ return a list containing all the validation errors */ public List < ValidationError > validateInput ( ) { } }
List < ValidationError > errors = new ArrayList < > ( ) ; Object value = getModel ( ) . getValue ( getName ( ) ) ; ValidationError error ; for ( InputValidator validator : validators ) { error = validator . validate ( value , getName ( ) , getLabel ( ) ) ; if ( error != null ) { errors . add ( error ) ; } } return errors ;
public class FeatureScopes { /** * Create a scope that returns nested types . * @ param featureCall the feature call that is currently processed by the scoping infrastructure * @ param enclosingType the enclosing type including type parameters for the nested type literal scope . * @ param rawEnclosingType the raw type that is used to query the nested types . * @ param parent the parent scope . Is never null . * @ param session the currently known scope session . Is never null . */ protected IScope createNestedTypeLiteralScope ( EObject featureCall , LightweightTypeReference enclosingType , JvmDeclaredType rawEnclosingType , IScope parent , IFeatureScopeSession session ) { } }
return new NestedTypeLiteralScope ( parent , session , asAbstractFeatureCall ( featureCall ) , enclosingType , rawEnclosingType ) ;
public class PolicyAssignmentsInner { /** * Deletes a policy assignment . * @ param scope The scope of the policy assignment . * @ param policyAssignmentName The name of the policy assignment to delete . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PolicyAssignmentInner object */ public Observable < ServiceResponse < PolicyAssignmentInner > > deleteWithServiceResponseAsync ( String scope , String policyAssignmentName ) { } }
if ( scope == null ) { throw new IllegalArgumentException ( "Parameter scope is required and cannot be null." ) ; } if ( policyAssignmentName == null ) { throw new IllegalArgumentException ( "Parameter policyAssignmentName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . delete ( scope , policyAssignmentName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < PolicyAssignmentInner > > > ( ) { @ Override public Observable < ServiceResponse < PolicyAssignmentInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PolicyAssignmentInner > clientResponse = deleteDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Classes { /** * Get optional class by name or null if class not found . Tries to load the class with given class name returning null * if not found . This method does not throw exceptions nor log missing class exception . It is supposed caller will * test for null returned value and take appropriate action . * Uses current thread context class loader to locate and load requested class . If current thread context class loader * is null or fails to find requested class try with this utility class class loader . * This logic is designed for Tomcat class loading algorithm . Libraries are loaded using a separated class loader and * every application has its own class loader . This method algorithm allows for a class used by an application to be * found either by current thread or by library class loader . * Considering above , note that there is a subtle difference compared with standard { @ link Class # forName ( String ) } * counterpart : this method uses < code > current thread context loader < / code > whereas Java standard uses * < code > current loader < / code > . Maybe not obvious , this ' semantic ' difference could lead to class not found on Java * standard while this utility method find the class . For example , a class defined by an web application could not be * found by Java < code > Class . forName < / code > method running inside a class defined by library . * @ param className requested class name , null tolerated . * @ param < T > class to auto - cast named class . * @ return class identified by name or null if not found or given class name argument is null . * @ throws ClassCastException if found class cannot cast to requested auto - cast type . */ @ SuppressWarnings ( "unchecked" ) public static < T > Class < T > forOptionalName ( String className ) { } }
if ( className == null ) { return null ; } ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( loader != null ) { try { return ( Class < T > ) Class . forName ( className , true , loader ) ; } catch ( ClassNotFoundException expected ) { } } // try this utility class class loader only if not set as current thread context class loader if ( loader == null || ! loader . equals ( Classes . class . getClassLoader ( ) ) ) { try { return ( Class < T > ) Class . forName ( className , true , Classes . class . getClassLoader ( ) ) ; } catch ( ClassNotFoundException unused ) { } } return null ;
public class CreateGrantRequest { /** * A list of operations that the grant permits . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setOperations ( java . util . Collection ) } or { @ link # withOperations ( java . util . Collection ) } if you want to * override the existing values . * @ param operations * A list of operations that the grant permits . * @ return Returns a reference to this object so that method calls can be chained together . * @ see GrantOperation */ public CreateGrantRequest withOperations ( String ... operations ) { } }
if ( this . operations == null ) { setOperations ( new com . ibm . cloud . objectstorage . internal . SdkInternalList < String > ( operations . length ) ) ; } for ( String ele : operations ) { this . operations . add ( ele ) ; } return this ;
public class ExportConfigurationsInner { /** * Get the Continuous Export configuration for this export id . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ param exportId The Continuous Export configuration ID . This is unique within a Application Insights component . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ApplicationInsightsComponentExportConfigurationInner object if successful . */ public ApplicationInsightsComponentExportConfigurationInner get ( String resourceGroupName , String resourceName , String exportId ) { } }
return getWithServiceResponseAsync ( resourceGroupName , resourceName , exportId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class XMLUtils { /** * Searches for a node within a DOM document with a given node path expression . * Elements are separated by ' . ' characters . * Example : Foo . Bar . Poo * @ param doc DOM Document to search for a node . * @ param pathExpression dot separated path expression * @ return Node element found in the DOM document . */ public static Node findNodeByName ( Document doc , String pathExpression ) { } }
final StringTokenizer tok = new StringTokenizer ( pathExpression , "." ) ; final int numToks = tok . countTokens ( ) ; NodeList elements ; if ( numToks == 1 ) { elements = doc . getElementsByTagNameNS ( "*" , pathExpression ) ; return elements . item ( 0 ) ; } String element = pathExpression . substring ( pathExpression . lastIndexOf ( '.' ) + 1 ) ; elements = doc . getElementsByTagNameNS ( "*" , element ) ; String attributeName = null ; if ( elements . getLength ( ) == 0 ) { // No element found , but maybe we are searching for an attribute attributeName = element ; // cut off attributeName and set element to next token and continue Node found = findNodeByName ( doc , pathExpression . substring ( 0 , pathExpression . length ( ) - attributeName . length ( ) - 1 ) ) ; if ( found != null ) { return found . getAttributes ( ) . getNamedItem ( attributeName ) ; } else { return null ; } } StringBuffer pathName ; Node parent ; for ( int j = 0 ; j < elements . getLength ( ) ; j ++ ) { int cnt = numToks - 1 ; pathName = new StringBuffer ( element ) ; parent = elements . item ( j ) . getParentNode ( ) ; do { if ( parent != null ) { pathName . insert ( 0 , '.' ) ; pathName . insert ( 0 , parent . getLocalName ( ) ) ; // getNodeName ( ) ) ; parent = parent . getParentNode ( ) ; } } while ( parent != null && -- cnt > 0 ) ; if ( pathName . toString ( ) . equals ( pathExpression ) ) { return elements . item ( j ) ; } } return null ;
public class AbstractProxyHandler { /** * called by connect listener in proxy service handler */ protected void flushQueuedMessages ( IoSession session , AttachedSessionManager attachedSessionManager ) { } }
Queue < Object > messageQueue = getMessageQueue ( session ) ; if ( messageQueue != null ) { flushQueuedMessages ( messageQueue , session , attachedSessionManager ) ; // Note : leave messageQueue intact on session to avoid race condition // will be cleared by next received message anyway }
public class CompileTask { /** * Set the compilation level . * @ param value The optimization level by string name . * ( whitespace , simple , advanced ) . */ public void setCompilationLevel ( String value ) { } }
if ( "simple" . equalsIgnoreCase ( value ) ) { this . compilationLevel = CompilationLevel . SIMPLE_OPTIMIZATIONS ; } else if ( "advanced" . equalsIgnoreCase ( value ) ) { this . compilationLevel = CompilationLevel . ADVANCED_OPTIMIZATIONS ; } else if ( "whitespace" . equalsIgnoreCase ( value ) ) { this . compilationLevel = CompilationLevel . WHITESPACE_ONLY ; } else { throw new BuildException ( "Unrecognized 'compilation' option value (" + value + ")" ) ; }
public class HttpMergeRequestFilter { /** * / * For logging and debugging only */ private Object toLogString ( HttpRequestMessage httpRequest ) { } }
StringBuilder b = new StringBuilder ( ) ; b . append ( httpRequest . getRequestURI ( ) ) . append ( '\n' ) ; final Map < String , List < String > > headers = httpRequest . getHeaders ( ) ; for ( String h : headers . keySet ( ) ) { b . append ( h ) . append ( ':' ) . append ( ' ' ) . append ( headers . get ( h ) ) . append ( '\n' ) ; } return b . toString ( ) ;
public class CompositeELResolver { /** * For a given base and property , attempts to determine whether a call to * { @ link # setValue ( ELContext , Object , Object , Object ) } will always fail . The result is obtained * by querying all component resolvers . If this resolver handles the given ( base , property ) * pair , the propertyResolved property of the ELContext object must be set to true by the * resolver , before returning . If this property is not true after this method is called , the * caller should ignore the return value . First , propertyResolved is set to false on the * provided ELContext . Next , for each component resolver in this composite : * < ol > * < li > The isReadOnly ( ) method is called , passing in the provided context , base and property . < / li > * < li > If the ELContext ' s propertyResolved flag is false then iteration continues . < / li > * < li > Otherwise , iteration stops and no more component resolvers are considered . The value * returned by isReadOnly ( ) is returned by this method . < / li > * < / ol > * If none of the component resolvers were able to perform this operation , the value false is * returned and the propertyResolved flag remains set to false . Any exception thrown by * component resolvers during the iteration is propagated to the caller of this method . * @ param context * The context of this evaluation . * @ param base * The base object to return the most general property type for , or null to enumerate * the set of top - level variables that this resolver can evaluate . * @ param property * The property or variable to return the acceptable type for . * @ return If the propertyResolved property of ELContext was set to true , then true if the * property is read - only or false if not ; otherwise undefined . * @ throws NullPointerException * if context is null * @ throws PropertyNotFoundException * if base is not null and the specified property does not exist or is not readable . * @ throws ELException * if an exception was thrown while performing the property or variable resolution . * The thrown exception must be included as the cause property of this exception , if * available . */ @ Override public boolean isReadOnly ( ELContext context , Object base , Object property ) { } }
context . setPropertyResolved ( false ) ; for ( ELResolver resolver : resolvers ) { boolean readOnly = resolver . isReadOnly ( context , base , property ) ; if ( context . isPropertyResolved ( ) ) { return readOnly ; } } return false ;
public class Client { /** * Gets a list of User resources . * @ param queryParameters Query parameters of the Resource * Parameters to filter the result of the list * @ return List of User * @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection * @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONResourceResponse and throwOAuthProblemException is enabled * @ throws URISyntaxException - if there is an error when generating the target URL at the getResource call * @ see com . onelogin . sdk . model . User * @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / users / get - users " > Get Users documentation < / a > */ public List < User > getUsers ( HashMap < String , String > queryParameters ) throws OAuthSystemException , OAuthProblemException , URISyntaxException { } }
return getUsers ( queryParameters , this . maxResults ) ;
public class GUIMainMenuAppState { /** * TOOLS OPTIONS */ public void toolsMenuClick ( int index , Object value , boolean isToggled ) { } }
System . out . println ( index + ":" + value + ":" + isToggled ) ; switch ( index ) { case 0 : // Call the xml parser to load your new components // screen . parseLayout ( " Interface / MainMenu . gui . xml " , this ) ; // screenShotState . takeScreenshot ( ) ; app . getStateManager ( ) . detach ( this ) ; takeScreenshot ( ) ; break ; case 1 : LoggingViewerAppState log = app . getStateManager ( ) . getState ( LoggingViewerAppState . class ) ; if ( log == null ) { log = new LoggingViewerAppState ( ) ; app . getStateManager ( ) . attach ( this ) ; } if ( log . isShown ( ) ) { log . hide ( ) ; } else { log . show ( ) ; } break ; case 2 : // Event Launcher if ( eventLauncherPanel == null ) { DevicesAppState devicesAppState = app . getStateManager ( ) . getState ( DevicesAppState . class ) ; if ( devicesAppState != null ) { EventLauncherPanel . createAndShowGUI ( devicesAppState ) ; } } else { eventLauncherPanel . setVisible ( eventLauncherPanel . isVisible ( ) ) ; } break ; }
public class ESGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . ESG__REG_NAME : setREGName ( REG_NAME_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class HazelcastConfigurationFactory { /** * Build map config map config . * @ param hz the hz * @ param mapName the storage name * @ param timeoutSeconds the timeoutSeconds * @ return the map config */ public MapConfig buildMapConfig ( final BaseHazelcastProperties hz , final String mapName , final long timeoutSeconds ) { } }
val cluster = hz . getCluster ( ) ; val evictionPolicy = EvictionPolicy . valueOf ( cluster . getEvictionPolicy ( ) ) ; LOGGER . trace ( "Creating Hazelcast map configuration for [{}] with idle timeoutSeconds [{}] second(s)" , mapName , timeoutSeconds ) ; val maxSizeConfig = new MaxSizeConfig ( ) . setMaxSizePolicy ( MaxSizeConfig . MaxSizePolicy . valueOf ( cluster . getMaxSizePolicy ( ) ) ) . setSize ( cluster . getMaxHeapSizePercentage ( ) ) ; val mergePolicyConfig = new MergePolicyConfig ( ) ; if ( StringUtils . hasText ( cluster . getMapMergePolicy ( ) ) ) { mergePolicyConfig . setPolicy ( cluster . getMapMergePolicy ( ) ) ; } return new MapConfig ( ) . setName ( mapName ) . setMergePolicyConfig ( mergePolicyConfig ) . setMaxIdleSeconds ( ( int ) timeoutSeconds ) . setBackupCount ( cluster . getBackupCount ( ) ) . setAsyncBackupCount ( cluster . getAsyncBackupCount ( ) ) . setEvictionPolicy ( evictionPolicy ) . setMaxSizeConfig ( maxSizeConfig ) ;
public class PremiseElement { /** * Gets the value of the premiseName property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the premiseName property . * For example , to add a new item , do as follows : * < pre > * getPremiseName ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link PremiseElement . PremiseName } */ public List < PremiseElement . PremiseName > getPremiseName ( ) { } }
if ( premiseName == null ) { premiseName = new ArrayList < PremiseElement . PremiseName > ( ) ; } return this . premiseName ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # normalize3x3 ( org . joml . Matrix4x3d ) */ public Matrix4x3d normalize3x3 ( Matrix4x3d dest ) { } }
double invXlen = 1.0 / Math . sqrt ( m00 * m00 + m01 * m01 + m02 * m02 ) ; double invYlen = 1.0 / Math . sqrt ( m10 * m10 + m11 * m11 + m12 * m12 ) ; double invZlen = 1.0 / Math . sqrt ( m20 * m20 + m21 * m21 + m22 * m22 ) ; dest . m00 = m00 * invXlen ; dest . m01 = m01 * invXlen ; dest . m02 = m02 * invXlen ; dest . m10 = m10 * invYlen ; dest . m11 = m11 * invYlen ; dest . m12 = m12 * invYlen ; dest . m20 = m20 * invZlen ; dest . m21 = m21 * invZlen ; dest . m22 = m22 * invZlen ; return dest ;
public class IPAddressDivisionGrouping { /** * Returns the number of consecutive leading one or zero bits . * If network is true , returns the number of consecutive leading one bits . * Otherwise , returns the number of consecutive leading zero bits . * @ param network * @ return */ public int getLeadingBitCount ( boolean network ) { } }
int count = getDivisionCount ( ) ; if ( count == 0 ) { return 0 ; } long front = network ? getDivision ( 0 ) . getMaxValue ( ) : 0 ; int prefixLen = 0 ; for ( int i = 0 ; i < count ; i ++ ) { IPAddressDivision seg = getDivision ( i ) ; long value = seg . getDivisionValue ( ) ; if ( value != front ) { return prefixLen + seg . getLeadingBitCount ( network ) ; } prefixLen += seg . getBitCount ( ) ; } return prefixLen ;
public class EventLog { /** * Completes the given response with the next event . * @ return a future to be completed with the next event */ public CompletableFuture < E > nextEvent ( ) { } }
E event = events . poll ( ) ; if ( event != null ) { return CompletableFuture . completedFuture ( event ) ; } else { CompletableFuture < E > future = new CompletableFuture < > ( ) ; futures . add ( future ) ; return future ; }
public class UTF8Reader { /** * Throws an exception for invalid byte . */ private void invalidByte ( int position , int count , int c ) throws UTFDataFormatException { } }
String msg = JspCoreException . getMsg ( "jsp.error.xml.invalidByte" , new Object [ ] { Integer . toString ( position ) , Integer . toString ( count ) } ) ; throw new UTFDataFormatException ( msg ) ;
public class Watchdog { /** * / * ( non - Javadoc ) * @ see org . csc . phynixx . watchdog . IWatchog # deactivate ( ) */ public synchronized void deactivate ( ) { } }
synchronized ( this . conditions ) { IWatchedCondition cond = null ; for ( Iterator iterator = this . conditions . iterator ( ) ; iterator . hasNext ( ) ; ) { IObjectReference objRef = ( IObjectReference ) iterator . next ( ) ; if ( objRef . get ( ) != null ) { cond = ( IWatchedCondition ) objRef . get ( ) ; cond . setActive ( false ) ; } } }
public class PubSubOutputHandler { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . UpstreamControl # sendAckMessage ( com . ibm . ws . sib . trm . topology . Cellule , long , int , com . ibm . websphere . sib . Reliability , com . ibm . ws . sib . utils . SIBUuid12) */ public void sendAckMessage ( SIBUuid8 meUuid , SIBUuid12 destUuid , SIBUuid8 busUuid , long ackPrefix , int priority , Reliability reliability , SIBUuid12 streamID , boolean consolidate ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendAckMessage" , new Object [ ] { meUuid , new Long ( ackPrefix ) , new Integer ( priority ) , reliability , streamID } ) ; ControlAck newAckMsg = createControlAckMessage ( priority , reliability , streamID ) ; newAckMsg . setAckPrefix ( ackPrefix ) ; try { _parentInputHandler . handleControlMessage ( null , newAckMsg ) ; } catch ( SIException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PubSubOutputHandler.sendAckMessage" , "1:1731:1.164.1.5" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; SibTr . exit ( tc , "sendAckMessage" , e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendAckMessage" ) ;
public class LToLongBiFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T1 , T2 > LToLongBiFunction < T1 , T2 > toLongBiFunctionFrom ( Consumer < LToLongBiFunctionBuilder < T1 , T2 > > buildingFunction ) { } }
LToLongBiFunctionBuilder builder = new LToLongBiFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class Classpath { /** * Returns a resource name as defined by { @ link ClassLoader # getResource ( String ) } * @ param uri to resource * @ return resource name */ public static String resourceName ( URI uri ) { } }
if ( ! CLASSPATH_SCHEME . equals ( uri . getScheme ( ) ) ) { throw new IllegalArgumentException ( "uri must have classpath scheme " + uri ) ; } String resourceName = uri . getSchemeSpecificPart ( ) ; if ( resourceName . startsWith ( "/" ) ) { return resourceName . substring ( 1 ) ; } return resourceName ;
public class ClockTileSkin { /** * * * * * * Initialization * * * * * */ @ Override protected void initGraphics ( ) { } }
super . initGraphics ( ) ; currentValueListener = o -> { if ( tile . isRunning ( ) ) { return ; } // Update time only if clock is not already running updateTime ( ZonedDateTime . ofInstant ( Instant . ofEpochSecond ( tile . getCurrentTime ( ) ) , ZoneId . of ( ZoneId . systemDefault ( ) . getId ( ) ) ) ) ; } ; timeListener = o -> updateTime ( tile . getTime ( ) ) ; timeFormatter = DateTimeFormatter . ofPattern ( "HH:mm" , tile . getLocale ( ) ) ; dateFormatter = DateTimeFormatter . ofPattern ( "dd MMM YYYY" , tile . getLocale ( ) ) ; dayOfWeekFormatter = DateTimeFormatter . ofPattern ( "EEEE" , tile . getLocale ( ) ) ; titleText = new Text ( "" ) ; titleText . setTextOrigin ( VPos . TOP ) ; Helper . enableNode ( titleText , ! tile . getTitle ( ) . isEmpty ( ) ) ; text = new Text ( tile . getText ( ) ) ; text . setFill ( tile . getUnitColor ( ) ) ; Helper . enableNode ( text , tile . isTextVisible ( ) ) ; timeRect = new Rectangle ( ) ; timeText = new Text ( timeFormatter . format ( tile . getTime ( ) ) ) ; timeText . setTextOrigin ( VPos . CENTER ) ; dateText = new Text ( dateFormatter . format ( tile . getTime ( ) ) ) ; dayOfWeekText = new Text ( dayOfWeekFormatter . format ( tile . getTime ( ) ) ) ; getPane ( ) . getChildren ( ) . addAll ( titleText , text , timeRect , timeText , dateText , dayOfWeekText ) ;
public class HttpEndpointBasedTokenMapSupplier { /** * Tries multiple nodes , and it only bubbles up the last node ' s exception . * We want to bubble up the exception in order for the last node to be * removed from the connection pool . * @ param activeHosts * @ return the topology from cluster _ describe */ private String getTopologyFromRandomNodeWithRetry ( Set < Host > activeHosts ) { } }
int count = NUM_RETRIES_PER_NODE ; String nodeResponse ; Exception lastEx ; final Host randomHost = getRandomHost ( activeHosts ) ; do { try { lastEx = null ; nodeResponse = getResponseViaHttp ( randomHost . getHostName ( ) ) ; if ( nodeResponse != null ) { Logger . info ( "Received topology from " + randomHost ) ; return nodeResponse ; } } catch ( Exception e ) { Logger . info ( "cannot get topology from : " + randomHost ) ; lastEx = e ; } finally { count -- ; } } while ( ( count > 0 ) ) ; if ( lastEx != null ) { if ( lastEx instanceof ConnectTimeoutException ) { throw new TimeoutException ( "Unable to obtain topology" , lastEx ) . setHost ( randomHost ) ; } throw new DynoException ( String . format ( "Unable to obtain topology from %s" , randomHost ) , lastEx ) ; } else { throw new DynoException ( String . format ( "Could not contact dynomite manager for token map on %s" , randomHost ) ) ; }
public class WriteRecordClass { /** * Read thru the classes until you get a Physical data class . */ public void getBaseDataClass ( FieldStuff fieldStuff ) { } }
Record recClassInfo2 = m_recClassInfo2 ; FieldData recFieldData = ( FieldData ) this . getRecord ( FieldData . FIELD_DATA_FILE ) ; String strRecordClass = recFieldData . getField ( FieldData . FIELD_CLASS ) . getString ( ) ; fieldStuff . strBaseFieldClass = null ; try { while ( true ) { recClassInfo2 . setKeyArea ( ClassInfo . CLASS_NAME_KEY ) ; recClassInfo2 . getField ( ClassInfo . CLASS_NAME ) . setString ( strRecordClass ) ; // Class of this record if ( ( ! recClassInfo2 . seek ( "=" ) ) || ( strRecordClass == null ) || ( strRecordClass . length ( ) == 0 ) ) { if ( fieldStuff . strBaseFieldClass == null ) fieldStuff . strBaseFieldClass = recFieldData . getField ( FieldData . FIELD_CLASS ) . getString ( ) ; // Never return ; } if ( fieldStuff . strBaseFieldClass == null ) { String packageName = ( ( ClassInfo ) recClassInfo2 ) . getPackageName ( null ) ; if ( packageName . endsWith ( ".base.field" ) ) fieldStuff . strBaseFieldClass = recClassInfo2 . getField ( ClassInfo . CLASS_NAME ) . toString ( ) ; } if ( strRecordClass . indexOf ( "Field" ) != - 1 ) { String strType = strRecordClass . substring ( 0 , strRecordClass . indexOf ( "Field" ) ) ; if ( ( strType . equals ( "DateTime" ) ) || ( strType . equals ( "Time" ) ) ) strType = "Date" ; if ( strType . length ( ) > 0 ) if ( "Short Integer Double Float Boolean String Date" . indexOf ( strType ) != - 1 ) { if ( ( fieldStuff . strDataClass == null ) || ( fieldStuff . strDataClass . length ( ) == 0 ) ) fieldStuff . strDataClass = strType ; // End of based records - not found return ; } } strRecordClass = recClassInfo2 . getField ( ClassInfo . BASE_CLASS_NAME ) . getString ( ) ; } } catch ( DBException ex ) { ex . printStackTrace ( ) ; }
public class CmsDependentSelectWidget { /** * Updates the select options from the given entity . < p > * @ param entity a top - level content entity */ public void update ( CmsEntity entity ) { } }
List < Object > baseObjects = CmsEntity . getValuesForPath ( entity , m_basePath ) ; LinkedHashMap < String , String > options = Maps . newLinkedHashMap ( ) ; for ( Object baseObject : baseObjects ) { List < Object > valueValues = CmsEntity . getValuesForPath ( baseObject , m_valuePath ) ; List < Object > descriptionValues = CmsEntity . getValuesForPath ( baseObject , m_descriptionPath ) ; if ( valueValues . size ( ) > 0 ) { String value = ( String ) valueValues . get ( 0 ) ; String description = value ; if ( descriptionValues . size ( ) > 0 ) { description = ( String ) descriptionValues . get ( 0 ) ; } options . put ( value , description ) ; } } replaceOptions ( options ) ;
public class AcroFields { /** * Verifies a signature . An example usage is : * < pre > * KeyStore kall = PdfPKCS7 . loadCacertsKeyStore ( ) ; * PdfReader reader = new PdfReader ( " my _ signed _ doc . pdf " ) ; * AcroFields af = reader . getAcroFields ( ) ; * ArrayList names = af . getSignatureNames ( ) ; * for ( int k = 0 ; k & lt ; names . size ( ) ; + + k ) { * String name = ( String ) names . get ( k ) ; * System . out . println ( " Signature name : " + name ) ; * System . out . println ( " Signature covers whole document : " + af . signatureCoversWholeDocument ( name ) ) ; * PdfPKCS7 pk = af . verifySignature ( name ) ; * Calendar cal = pk . getSignDate ( ) ; * Certificate pkc [ ] = pk . getCertificates ( ) ; * System . out . println ( " Subject : " + PdfPKCS7 . getSubjectFields ( pk . getSigningCertificate ( ) ) ) ; * System . out . println ( " Document modified : " + ! pk . verify ( ) ) ; * Object fails [ ] = PdfPKCS7 . verifyCertificates ( pkc , kall , null , cal ) ; * if ( fails = = null ) * System . out . println ( " Certificates verified against the KeyStore " ) ; * else * System . out . println ( " Certificate failed : " + fails [ 1 ] ) ; * < / pre > * @ param name the signature field name * @ param provider the provider or < code > null < / code > for the default provider * @ return a < CODE > PdfPKCS7 < / CODE > class to continue the verification */ public PdfPKCS7 verifySignature ( String name , String provider ) { } }
PdfDictionary v = getSignatureDictionary ( name ) ; if ( v == null ) return null ; try { PdfName sub = v . getAsName ( PdfName . SUBFILTER ) ; PdfString contents = v . getAsString ( PdfName . CONTENTS ) ; PdfPKCS7 pk = null ; if ( sub . equals ( PdfName . ADBE_X509_RSA_SHA1 ) ) { PdfString cert = v . getAsString ( PdfName . CERT ) ; pk = new PdfPKCS7 ( contents . getOriginalBytes ( ) , cert . getBytes ( ) , provider ) ; } else pk = new PdfPKCS7 ( contents . getOriginalBytes ( ) , provider ) ; updateByteRange ( pk , v ) ; PdfString str = v . getAsString ( PdfName . M ) ; if ( str != null ) pk . setSignDate ( PdfDate . decode ( str . toString ( ) ) ) ; PdfObject obj = PdfReader . getPdfObject ( v . get ( PdfName . NAME ) ) ; if ( obj != null ) { if ( obj . isString ( ) ) pk . setSignName ( ( ( PdfString ) obj ) . toUnicodeString ( ) ) ; else if ( obj . isName ( ) ) pk . setSignName ( PdfName . decodeName ( obj . toString ( ) ) ) ; } str = v . getAsString ( PdfName . REASON ) ; if ( str != null ) pk . setReason ( str . toUnicodeString ( ) ) ; str = v . getAsString ( PdfName . LOCATION ) ; if ( str != null ) pk . setLocation ( str . toUnicodeString ( ) ) ; return pk ; } catch ( Exception e ) { throw new ExceptionConverter ( e ) ; }
public class JDBCResultSet { /** * # ifdef JAVA4 */ public void updateRef ( String columnLabel , java . sql . Ref x ) throws SQLException { } }
throw Util . notSupported ( ) ;
public class StochasticMinimizer { /** * This is used to smooth the gradients , providing a more robust calculation which * generally leads to a better routine . */ protected static double [ ] smooth ( List < double [ ] > toSmooth ) { } }
double [ ] smoothed = new double [ toSmooth . get ( 0 ) . length ] ; for ( double [ ] thisArray : toSmooth ) { ArrayMath . pairwiseAddInPlace ( smoothed , thisArray ) ; } ArrayMath . multiplyInPlace ( smoothed , 1 / ( ( double ) toSmooth . size ( ) ) ) ; return smoothed ;
public class Reflect { /** * Get all methods from both class static and object instance namespaces */ public static BshMethod [ ] getDeclaredMethods ( Class < ? > type ) { } }
if ( ! isGeneratedClass ( type ) ) return new BshMethod [ 0 ] ; if ( type . isInterface ( ) ) return getMethods ( type ) ; return getMethods ( getNewInstance ( type ) ) ;
public class ClassScaner { /** * 扫描Java指定的ClassPath路径 * @ return 扫描到的类 */ private void scanJavaClassPaths ( ) { } }
final String [ ] javaClassPaths = ClassUtil . getJavaClassPaths ( ) ; for ( String classPath : javaClassPaths ) { // bug修复 , 由于路径中空格和中文导致的Jar找不到 classPath = URLUtil . decode ( classPath , CharsetUtil . systemCharsetName ( ) ) ; scanFile ( new File ( classPath ) , null ) ; }
public class QEmuCommandLine { /** * Executes this QEmuCommandLine and returns a new { @ link Process } . */ @ Nonnull public Process exec ( ) throws IOException { } }
List < String > commandWords = toCommandWords ( ) ; ProcessBuilder builder = new ProcessBuilder ( commandWords ) ; // TODO : Use Redirect to send the I / O somewhere useful . QEmuCommandLineUtils . redirectIO ( builder ) ; return builder . start ( ) ;
public class AbstractAlpineQueryManager { /** * Persists the specified PersistenceCapable object . * @ param object a PersistenceCapable object * @ param < T > the type to return * @ return the persisted object */ @ SuppressWarnings ( "unchecked" ) public < T > T persist ( T object ) { } }
pm . currentTransaction ( ) . begin ( ) ; pm . makePersistent ( object ) ; pm . currentTransaction ( ) . commit ( ) ; pm . getFetchPlan ( ) . setDetachmentOptions ( FetchPlan . DETACH_LOAD_FIELDS ) ; pm . refresh ( object ) ; return object ;
public class ProxyFactory { /** * { @ inheritDoc } */ @ Override protected void generateClass ( ) { } }
classFile . addField ( AccessFlag . PRIVATE , INVOCATION_HANDLER_FIELD , InvocationHandler . class ) ; classFile . addField ( AccessFlag . PRIVATE , CONSTRUCTED_GUARD , "Z" ) ; if ( serializableProxyClass != null ) { createWriteReplace ( ) ; } MethodBodyCreator creator = getDefaultMethodOverride ( ) ; overrideAllMethods ( creator ) ; for ( Class < ? > iface : additionalInterfaces ) { addInterface ( creator , iface ) ; } overrideToString ( creator ) ; overrideEquals ( creator ) ; overrideHashcode ( creator ) ; createConstructorDelegates ( new ProxyConstructorBodyCreator ( ) ) ; finalizeStaticConstructor ( ) ; for ( Annotation annotation : this . getSuperClass ( ) . getDeclaredAnnotations ( ) ) { classFile . getRuntimeVisibleAnnotationsAttribute ( ) . addAnnotation ( annotation ) ; }
public class BackendlessSerializer { /** * Returns a serializer for the class * @ param clazz * @ return */ private static IObjectSerializer getSerializer ( Class clazz ) { } }
Iterator < Map . Entry < Class , IObjectSerializer > > iterator = serializers . entrySet ( ) . iterator ( ) ; IObjectSerializer serializer = DEFAULT_SERIALIZER ; while ( iterator . hasNext ( ) ) { Map . Entry < Class , IObjectSerializer > entry = iterator . next ( ) ; if ( entry . getKey ( ) . isAssignableFrom ( clazz ) ) { serializer = entry . getValue ( ) ; break ; } } return serializer ;
public class AdaptedAction { /** * ( non - Javadoc ) * @ see com . sporniket . libre . ui . action . UserInterfaceAction # setKeyboardShortcut ( java . lang . String ) */ @ Override public void setKeyboardShortcut ( String keyboardShortcut ) { } }
if ( IS_NOT_EMPTY . test ( keyboardShortcut ) ) { myUserInterfaceAction . setKeyboardShortcut ( keyboardShortcut ) ; putValue ( Action . ACCELERATOR_KEY , KeyboardRepresentationTable . getKeyStroke ( getKeyboardShortcut ( ) ) ) ; } else { putValue ( Action . ACCELERATOR_KEY , null ) ; }
public class JobsInner { /** * Gets a list of Jobs within the specified Experiment . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param jobsListByExperimentOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobInner & gt ; object */ public Observable < ServiceResponse < Page < JobInner > > > listByExperimentWithServiceResponseAsync ( final String resourceGroupName , final String workspaceName , final String experimentName , final JobsListByExperimentOptions jobsListByExperimentOptions ) { } }
return listByExperimentSinglePageAsync ( resourceGroupName , workspaceName , experimentName , jobsListByExperimentOptions ) . concatMap ( new Func1 < ServiceResponse < Page < JobInner > > , Observable < ServiceResponse < Page < JobInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < JobInner > > > call ( ServiceResponse < Page < JobInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByExperimentNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class ApplicationConfig { /** * Do not modify addRestResourceClasses ( ) method . It is automatically * populated with all resources defined in the project . If required , comment * out calling this method in getClasses ( ) . */ private void addRestResourceClasses ( Set < Class < ? > > resources ) { } }
resources . add ( pl . setblack . chatsample . web . ChatService . class ) ;
public class PortletTypeImpl { /** * If not already created , a new < code > supported - publishing - event < / code > element will be created and returned . * Otherwise , the first existing < code > supported - publishing - event < / code > element will be returned . * @ return the instance defined for the element < code > supported - publishing - event < / code > */ public EventDefinitionReferenceType < PortletType < T > > getOrCreateSupportedPublishingEvent ( ) { } }
List < Node > nodeList = childNode . get ( "supported-publishing-event" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new EventDefinitionReferenceTypeImpl < PortletType < T > > ( this , "supported-publishing-event" , childNode , nodeList . get ( 0 ) ) ; } return createSupportedPublishingEvent ( ) ;
public class Date { /** * Returns the millisecond value of this < code > Date < / code > object * without affecting its internal state . */ static final long getMillisOf ( Date date ) { } }
if ( date . cdate == null || date . cdate . isNormalized ( ) ) { return date . fastTime ; } BaseCalendar . Date d = ( BaseCalendar . Date ) date . cdate . clone ( ) ; return gcal . getTime ( d ) ;
public class LayerBuildsAccessor { /** * Like { @ link Map # entrySet ( ) } except that the returned entrySet * contains only entries belonging to this layer * @ return entry set of map entries belonging to this layer */ public Set < Map . Entry < String , CacheEntry > > entrySet ( ) { } }
Set < Map . Entry < String , CacheEntry > > result = Collections . emptySet ( ) ; NavigableMap < String , CacheEntry > navMap = new TreeMap < String , CacheEntry > ( map ) ; String from = navMap . ceilingKey ( keyPrefix ) , to = navMap . lowerKey ( keyPrefixUpperBound ) ; if ( from != null ) { result = navMap . subMap ( from , true , to , true ) . entrySet ( ) ; } return result ;
public class UISelect { /** * Gets the currently selected option . * @ return the selected option */ public T selected ( ) { } }
return selectedIndex < 0 || selectedIndex >= options ( ) . size ( ) ? null : options ( ) . get ( selectedIndex ) ;
public class DL4JResources { /** * Reset to the default directory , or the directory set via the { @ link DL4JSystemProperties # DL4J _ RESOURCES _ DIR _ PROPERTY } system property , * org . deeplearning4j . resources . directory */ public static void resetBaseDirectoryLocation ( ) { } }
String property = System . getProperty ( DL4JSystemProperties . DL4J_RESOURCES_DIR_PROPERTY ) ; if ( property != null ) { baseDirectory = new File ( property ) ; } else { baseDirectory = new File ( System . getProperty ( "user.home" ) , ".deeplearning4j" ) ; } if ( ! baseDirectory . exists ( ) ) { baseDirectory . mkdirs ( ) ; }
public class FragmentTask { /** * Produce a rejoining response . */ @ Override public void runForRejoin ( SiteProcedureConnection siteConnection , TaskLog taskLog ) throws IOException { } }
if ( ! m_txnState . isReadOnly ( ) ) { taskLog . logTask ( m_fragmentMsg ) ; } final FragmentResponseMessage response = new FragmentResponseMessage ( m_fragmentMsg , m_initiator . getHSId ( ) ) ; response . setRecovering ( true ) ; response . setStatus ( FragmentResponseMessage . SUCCESS , null ) ; // Set the dependencies even if this is a dummy response . This site could be the master // on elastic join , so the fragment response message is actually going to the MPI . for ( int frag = 0 ; frag < m_fragmentMsg . getFragmentCount ( ) ; frag ++ ) { final int outputDepId = m_fragmentMsg . getOutputDepId ( frag ) ; response . addDependency ( new DependencyPair . BufferDependencyPair ( outputDepId , m_rawDummyResponse , 0 , m_rawDummyResponse . length ) ) ; } deliverResponse ( response ) ; completeFragment ( ) ;
public class RuleImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } }
switch ( featureID ) { case SimpleAntlrPackage . RULE__PARAMETERS : return ( ( InternalEList < ? > ) getParameters ( ) ) . basicRemove ( otherEnd , msgs ) ; case SimpleAntlrPackage . RULE__BODY : return basicSetBody ( null , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ;
public class IonReaderTextRawX { /** * we ' re not really in a struct , we at the top level */ public boolean isInStruct ( ) { } }
boolean in_struct = false ; IonType container = getContainerType ( ) ; if ( IonType . STRUCT . equals ( container ) ) { if ( getDepth ( ) > 0 ) { in_struct = true ; } else { assert ( IonType . STRUCT . equals ( _nesting_parent ) == true ) ; } } return in_struct ;
public class VariantMerger { /** * Update a key * @ param from * @ param to * @ return true if there was a value to be moved . */ public boolean updateDefaultKeys ( String from , String to ) { } }
if ( null == from ) { return false ; } if ( null == to ) { return false ; } if ( StringUtils . equals ( from , to ) ) { return false ; } String value = this . defaultValues . remove ( from ) ; if ( null == value ) { return false ; } this . defaultValues . put ( to , value ) ; return true ;
public class GraphEncoder { /** * Convert a Graph fields into an opaque String that can later be * re - assembled into a Graph object . Note that GraphConfiguration * information is NOT encoded into the opaque String . * @ param width of the Graph * @ param height of the Graph * @ param data array of RegionData for the graph * @ return opaque String which can later be used with decode ( ) */ public static String encode ( int width , int height , RegionData data [ ] ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( width ) . append ( DELIM ) ; sb . append ( height ) ; boolean first = false ; for ( RegionData datum : data ) { if ( first ) { first = false ; } else { sb . append ( DELIM ) ; } sb . append ( datum . getLabel ( ) ) . append ( REGION_DELIM ) ; sb . append ( datum . getHighlightedValue ( ) ) . append ( REGION_DELIM ) ; sb . append ( encodeHex ( datum . getValues ( ) ) ) ; } return sb . toString ( ) ;
public class AbstractStoreConfigurationBuilder { /** * { @ inheritDoc } */ @ Override public S addProperty ( String key , String value ) { } }
TypedProperties properties = attributes . attribute ( PROPERTIES ) . get ( ) ; properties . put ( key , value ) ; attributes . attribute ( PROPERTIES ) . set ( properties ) ; XmlConfigHelper . setAttributes ( attributes , properties , false , false ) ; this . properties = properties ; return self ( ) ;
public class SQLPatternPartElement { /** * = = = = = Part interface */ @ Override public String generateExpression ( int flagsAdd ) { } }
int flags = m_flags | flagsAdd ; StringBuilder sb = new StringBuilder ( ) ; if ( m_leader != null ) { sb . append ( m_leader ) ; } // Need a non - capturing group when either an explicit non - capturing group is // requested or it is optional . The only case where a non - capturing group isn ' t // needed for an optional is an explicit capture without leading space . boolean captureGroup = ( flags & SQLPatternFactory . CAPTURE ) != 0 ; boolean explicitNonCaptureGroup = ! captureGroup && ( flags & SQLPatternFactory . GROUP ) != 0 ; boolean optional = ( ( flags & SQLPatternFactory . OPTIONAL ) != 0 ) ; // Suppress the leading space at this level when it should be pushed down to the child . boolean leadingSpace = ( ( flags & SQLPatternFactory . LEADING_SPACE ) != 0 ) ; boolean leadingSpaceToChild = ( ( flags & SQLPatternFactory . ADD_LEADING_SPACE_TO_CHILD ) != 0 ) ; boolean childLeadingSpace = ( ( flags & SQLPatternFactory . CHILD_SPACE_SEPARATOR ) != 0 || ( leadingSpace && leadingSpaceToChild ) ) ; boolean nonCaptureGroup = ( explicitNonCaptureGroup || ( optional && ( ! captureGroup || leadingSpace ) ) ) ; boolean innerOptional = optional && captureGroup && ! nonCaptureGroup ; boolean outerOptional = optional && nonCaptureGroup ; if ( nonCaptureGroup ) { sb . append ( "(?:" ) ; } if ( leadingSpace && ! leadingSpaceToChild ) { // Protect something like an OR sequence by using an inner group sb . append ( "\\s+(?:" ) ; } if ( captureGroup ) { if ( m_captureLabel != null ) { sb . append ( String . format ( "(?<%s>" , m_captureLabel ) ) ; } else { sb . append ( "(" ) ; } } for ( int i = 0 ; i < m_parts . length ; ++ i ) { int flagsAddChild = 0 ; if ( i > 0 ) { if ( m_separator != null ) { sb . append ( m_separator ) ; } if ( childLeadingSpace ) { flagsAddChild |= SQLPatternFactory . LEADING_SPACE ; } } else if ( childLeadingSpace && leadingSpaceToChild ) { flagsAddChild |= SQLPatternFactory . LEADING_SPACE ; } sb . append ( m_parts [ i ] . generateExpression ( flagsAddChild ) ) ; } if ( captureGroup ) { sb . append ( ")" ) ; } if ( innerOptional ) { sb . append ( "?" ) ; } if ( leadingSpace && ! leadingSpaceToChild ) { sb . append ( ")" ) ; } if ( nonCaptureGroup ) { sb . append ( ")" ) ; } if ( outerOptional ) { sb . append ( "?" ) ; } if ( m_trailer != null ) { sb . append ( m_trailer ) ; } return sb . toString ( ) ;
public class FilterUtils { /** * Check the given extended attribute in propList to see if it was excluded . * @ param propList attribute group names , from most common to most specific * @ param attValue attribute group values * @ return { @ code true } if should be excluded , otherwise { @ code false } */ @ VisibleForTesting boolean extCheckExclude ( final QName [ ] propList , final List < String > attValue ) { } }
for ( int propListIndex = propList . length - 1 ; propListIndex >= 0 ; propListIndex -- ) { final QName attName = propList [ propListIndex ] ; checkRuleMapping ( attName , attValue ) ; boolean hasNonExcludeAction = false ; boolean hasExcludeAction = false ; for ( final String attSubValue : attValue ) { final FilterKey filterKey = new FilterKey ( attName , attSubValue ) ; final Action filterAction = filterMap . get ( filterKey ) ; // no action will be considered as ' not exclude ' if ( filterAction == null ) { // check Specified DefaultAction mapping this attribute ' s name final Action defaultAction = filterMap . get ( new FilterKey ( attName , null ) ) ; if ( defaultAction != null ) { if ( defaultAction instanceof Exclude ) { hasExcludeAction = true ; } else { return false ; } } else { if ( hasExcludeAction ) { if ( ! isDefaultExclude ( ) ) { return false ; } } else { hasNonExcludeAction = true ; } } } else if ( filterAction instanceof Exclude ) { hasExcludeAction = true ; if ( hasNonExcludeAction ) { if ( isDefaultExclude ( ) ) { hasNonExcludeAction = false ; } else { return false ; } } } else { return false ; } } // if there is exclude action but not all value should be excluded if ( hasNonExcludeAction ) { // under the condition of default action also not exist or not excluded if ( 0 == propListIndex ) { // the ancient parent on the top level return isDefaultExclude ( ) ; } // if all of the value should be excluded } else if ( hasExcludeAction ) { return true ; } // If no action for this extended prop has been found , we need to check the parent prop action } return false ;
public class GenericMessageImpl { /** * Parse the first line of a message . * @ param buff * @ return boolean ( true means parsed entire line ) * @ throws Exception */ public boolean parseLine ( WsByteBuffer buff ) throws Exception { } }
final boolean bTrace = TraceComponent . isAnyTracingEnabled ( ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "parseLine called for " + this ) ; } // PI34161 - Record the start of the request at the time of parsing if ( ( this instanceof HttpRequestMessageImpl ) && ( ( HttpRequestMessageImpl ) this ) . getServiceContext ( ) . getHttpConfig ( ) . isAccessLoggingEnabled ( ) ) { this . startTime = System . nanoTime ( ) ; } boolean rc = false ; int startpos = ( bTrace && tc . isDebugEnabled ( ) ) ? buff . position ( ) : 0 ; TokenCodes tcRC = TokenCodes . TOKEN_RC_MOREDATA ; // stop parsing the FirstLine when we either hit the end of it ( CRLF ) // or we hit the end of the buffer and need to read more while ( ! rc ) { if ( 0 == this . numFirstLineTokensRead ) { // first token , skip leading CRLFs ( up to 16 blank lines ) tcRC = skipCRLFs ( buff ) ; if ( TokenCodes . TOKEN_RC_DELIM . equals ( tcRC ) ) { try { tcRC = parseTokenExtract ( buff , SPACE , false , LOG_FULL ) ; if ( ! tcRC . equals ( TokenCodes . TOKEN_RC_MOREDATA ) ) { setParsedFirstToken ( getParsedToken ( ) ) ; } } catch ( MalformedMessageException mme ) { // no FFDC required // debug print this failing first token to help figure // out why the error happened , usually buffer corruption // Note : if this was discrimination , then it should just // mean it wasn ' t our data , but we can ' t tell here which // path it was ( i . e . should it have worked ) if ( bTrace && tc . isDebugEnabled ( ) ) { int curpos = buff . position ( ) ; buff . position ( startpos ) ; byte [ ] data ; int offset = 0 ; if ( null != getParsedToken ( ) ) { offset = getParsedToken ( ) . length ; data = new byte [ ( curpos - startpos ) + offset ] ; System . arraycopy ( getParsedToken ( ) , 0 , data , 0 , offset ) ; } else { data = new byte [ ( curpos - startpos ) ] ; } buff . get ( data , offset , data . length - offset ) ; Tr . debug ( tc , "Initial parse of message failed, (128) of buffer: \n" + GenericUtils . getHexDump ( data , 128 ) ) ; } throw mme ; } } else if ( TokenCodes . TOKEN_RC_MOREDATA . equals ( tcRC ) ) { // ran out of data resetByteCache ( ) ; break ; // out of while } else if ( TokenCodes . TOKEN_RC_CRLF . equals ( tcRC ) ) { throw new MalformedMessageException ( "Too many leading CRLFs" ) ; } } else if ( 1 == this . numFirstLineTokensRead ) { // second token tcRC = parseTokenExtract ( buff , SPACE , true , LOG_PARTIAL ) ; if ( ! tcRC . equals ( TokenCodes . TOKEN_RC_MOREDATA ) ) { setParsedSecondToken ( getParsedToken ( ) ) ; if ( tcRC . equals ( TokenCodes . TOKEN_RC_CRLF ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Received CRLF after second token" ) ; } rc = true ; } } } else if ( 2 == this . numFirstLineTokensRead ) { // third token tcRC = parseCRLFTokenExtract ( buff , LOG_FULL ) ; if ( ! tcRC . equals ( TokenCodes . TOKEN_RC_MOREDATA ) ) { setParsedThirdToken ( getParsedToken ( ) ) ; rc = true ; } } else { // stop coming here rc = true ; } // scenarios where we need more data result in the MOREDATA RC above if ( tcRC . equals ( TokenCodes . TOKEN_RC_MOREDATA ) ) { resetByteCache ( ) ; break ; // out of while } // otherwise we finished parsing a single token setParsedToken ( null ) ; this . numFirstLineTokensRead ++ ; } // end of while loop if ( rc ) { parsingComplete ( ) ; setFirstLineComplete ( true ) ; // PK15898 // go back one from the delimiter , unless it was an LF ( if first // line ends with just an LF and not CRLF then we don ' t want to // change position as the header parsing above would consider it the // end of headers ) decrementBytePositionIgnoringLFs ( ) ; } if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "parseLine returning " + rc + " for " + this ) ; } return rc ;
public class SegmentChunk { /** * Creates a new instance of the SegmentChunk class with the same information as this one , but with a new offset . * @ param newOffset The new offset . * @ return A new SegmentChunk . */ SegmentChunk withNewOffset ( long newOffset ) { } }
SegmentChunk ns = new SegmentChunk ( this . name , newOffset ) ; ns . setLength ( getLength ( ) ) ; if ( isSealed ( ) ) { ns . markSealed ( ) ; } if ( ! exists ( ) ) { ns . markInexistent ( ) ; } return ns ;
public class AWSServerlessApplicationRepositoryClient { /** * Creates an AWS CloudFormation change set for the given application . * @ param createCloudFormationChangeSetRequest * @ return Result of the CreateCloudFormationChangeSet operation returned by the service . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ throws BadRequestException * One of the parameters in the request is invalid . * @ throws InternalServerErrorException * The AWS Serverless Application Repository service encountered an internal error . * @ throws ForbiddenException * The client is not authenticated . * @ sample AWSServerlessApplicationRepository . CreateCloudFormationChangeSet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / serverlessrepo - 2017-09-08 / CreateCloudFormationChangeSet " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateCloudFormationChangeSetResult createCloudFormationChangeSet ( CreateCloudFormationChangeSetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateCloudFormationChangeSet ( request ) ;
public class HiveDataset { /** * Finds all files read by the table and generates CopyableFiles . * For the specific semantics see { @ link HiveCopyEntityHelper # getCopyEntities } . */ @ Override public Iterator < FileSet < CopyEntity > > getFileSetIterator ( FileSystem targetFs , CopyConfiguration configuration , Comparator < FileSet < CopyEntity > > prioritizer , PushDownRequestor < FileSet < CopyEntity > > requestor ) throws IOException { } }
if ( ! canCopyTable ( ) ) { return Iterators . emptyIterator ( ) ; } try { List < FileSet < CopyEntity > > fileSetList = Lists . newArrayList ( new HiveCopyEntityHelper ( this , configuration , targetFs ) . getCopyEntities ( configuration , prioritizer , requestor ) ) ; Collections . sort ( fileSetList , prioritizer ) ; return fileSetList . iterator ( ) ; } catch ( IOException ioe ) { log . error ( "Failed to copy table " + this . table , ioe ) ; return Iterators . emptyIterator ( ) ; }
public class CmsColor { /** * Converts the HSV into the RGB . < p > * @ param hue value * @ param sat the saturation value * @ param bri the brightness value */ private void HSVtoRGB ( float hue , float sat , float bri ) { } }
int i ; float f ; float p ; float q ; float t ; if ( sat == 0 ) { m_red = bri ; m_green = bri ; m_blue = bri ; return ; } hue /= 60 ; i = ( int ) Math . floor ( hue ) ; f = hue - i ; p = bri * ( 1 - sat ) ; q = bri * ( 1 - ( sat * f ) ) ; t = bri * ( 1 - ( sat * ( 1 - f ) ) ) ; switch ( i ) { case 0 : m_red = bri ; m_green = t ; m_blue = p ; break ; case 1 : m_red = q ; m_green = bri ; m_blue = p ; break ; case 2 : m_red = p ; m_green = bri ; m_blue = t ; break ; case 3 : m_red = p ; m_green = q ; m_blue = bri ; break ; case 4 : m_red = t ; m_green = p ; m_blue = bri ; break ; default : // case 5: m_red = bri ; m_green = p ; m_blue = q ; break ; }
public class JTrees { /** * Return all descendants of the given node in the given tree model * ( not including the given node ! ) * @ param treeModel The tree model * @ param node The node * @ return The descendants */ public static List < Object > getAllDescendants ( TreeModel treeModel , Object node ) { } }
List < Object > result = new ArrayList < Object > ( ) ; getAllDescendants ( treeModel , node , result ) ; result . remove ( node ) ; return result ;
public class MacOutputStream { /** * Updates the Mac ( if the function is on ) using the specified subarray , and * in any case writes the subarray to the output stream . That is , if the * digest function is on ( see { @ link # setOn ( boolean ) on } ) , this method calls * < code > update < / code > on the Mac associated with this stream , passing it the * subarray specifications . This method then writes the subarray bytes to the * output stream , blocking until the bytes are actually written . * @ param aBuf * the array containing the subarray to be used for updating and * writing to the output stream . * @ param nOfs * the offset into { @ code b } of the first byte to be updated and * written . * @ param nLen * the number of bytes of data to be updated and written from * { @ code b } , starting at offset { @ code off } . * @ exception IOException * if an I / O error occurs . * @ see Mac # update ( byte [ ] , int , int ) */ @ Override public void write ( @ Nonnull final byte [ ] aBuf , @ Nonnegative final int nOfs , @ Nonnegative final int nLen ) throws IOException { } }
out . write ( aBuf , nOfs , nLen ) ; if ( m_bOn ) { m_aMac . update ( aBuf , nOfs , nLen ) ; }
public class UtilIO { /** * Returns an absolute path to the file that is relative to the example directory * @ param path File path relative to root directory * @ return Absolute path to file */ public static URL pathExampleURL ( String path ) { } }
try { File fpath = new File ( path ) ; if ( fpath . isAbsolute ( ) ) return fpath . toURI ( ) . toURL ( ) ; // Assume we are running inside of the project come String pathToBase = getPathToBase ( ) ; if ( pathToBase != null ) { File pathExample = new File ( pathToBase , "data/example/" ) ; if ( pathExample . exists ( ) ) { return new File ( pathExample . getPath ( ) , path ) . getAbsoluteFile ( ) . toURL ( ) ; } } // System . out . println ( " - - - - - " ) ; // maybe we are running inside an app and all data is stored inside as a resource // System . out . println ( " Attempting to load resource " + path ) ; URL url = UtilIO . class . getClassLoader ( ) . getResource ( path ) ; if ( url == null ) { System . err . println ( ) ; System . err . println ( "Can't find data/example directory! There are three likely causes for this problem." ) ; System . err . println ( ) ; System . err . println ( "1) You checked out the source code from git and did not pull the data submodule too." ) ; System . err . println ( "2) You are trying to run an example from outside the BoofCV directory tree." ) ; System . err . println ( "3) You are trying to pass in your own image." ) ; System . err . println ( ) ; System . err . println ( "Solutions:" ) ; System . err . println ( "1) Follow instructions in the boofcv/readme.md file to grab the data directory." ) ; System . err . println ( "2) Launch the example from inside BoofCV's directory tree!" ) ; System . err . println ( "3) Don't use this function and just pass in the path directly" ) ; System . exit ( 1 ) ; } return url ; } catch ( MalformedURLException e ) { throw new RuntimeException ( e ) ; }
public class KeyVaultClientCustomImpl { /** * Creates a new certificate version . If this is the first version , the * certificate resource is created . * @ param createCertificateRequest * the grouped properties for creating a certificate request * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < CertificateOperation > createCertificateAsync ( CreateCertificateRequest createCertificateRequest , final ServiceCallback < CertificateOperation > serviceCallback ) { } }
return createCertificateAsync ( createCertificateRequest . vaultBaseUrl ( ) , createCertificateRequest . certificateName ( ) , createCertificateRequest . certificatePolicy ( ) , createCertificateRequest . certificateAttributes ( ) , createCertificateRequest . tags ( ) , serviceCallback ) ;