signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class RestApiClient { /** * Gets the user .
* @ param username
* the username
* @ return the user */
public UserEntity getUser ( String username ) { } } | UserEntity userEntity = restClient . get ( "users/" + username , UserEntity . class , new HashMap < String , String > ( ) ) ; return userEntity ; |
public class HttpHelper { /** * Perform an HTTP POST request to the URL .
* @ param content string containing the content to be posted
* @ return string containing the response from the server */
public byte [ ] postBytes ( byte [ ] content ) throws IOException { } } | if ( ! connection . isOpen ( ) ) connection . open ( ) ; connection . prepare ( "POST" ) ; OutputStream os = connection . getOutputStream ( ) ; os . write ( content ) ; response = connection . readInput ( ) ; os . close ( ) ; return getResponseBytes ( ) ; |
public class CharsetIssues { /** * used by external tools , lists the method signature checked for , for unreplaceable encoding methods
* @ return a map of these methods */
@ PublicAPI ( "Used by fb-contrib-eclipse-quickfixes to determine type of fix to apply" ) public static Map < String , Integer > getUnreplaceableCharsetEncodings ( ) { } } | Map < String , Integer > encodings = new HashMap < > ( ( int ) ( UNREPLACEABLE_ENCODING_METHODS . size ( ) * 1.6 ) ) ; for ( Map . Entry < FQMethod , Integer > entry : UNREPLACEABLE_ENCODING_METHODS . entrySet ( ) ) { encodings . put ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) ) ; } return encodings ; |
public class Iteration { /** * Begin an { @ link Iteration } over the named selection of the given type . Also sets the name and type of the variable for this iteration ' s
* " current element " . The type serves for automatic type check . */
public static IterationBuilderOver over ( Class < ? extends WindupVertexFrame > sourceType , String source ) { } } | Iteration iterationImpl = new Iteration ( new TypedNamedFramesSelector ( sourceType , source ) ) ; iterationImpl . setPayloadManager ( new TypedNamedIterationPayloadManager ( sourceType , singleVariableIterationName ( source ) ) ) ; return iterationImpl ; |
public class SymmetricDifferenceMatcher { /** * Remove all mappings to target
* @ param target */
public void unmap ( T target ) { } } | Set < I > items = reverseMap . get ( target ) ; items . forEach ( ( i ) -> mapSet . removeItem ( i , target ) ) ; reverseMap . remove ( target ) ; |
public class LabelledTriple { /** * Get the object of the triple as a string .
* @ return the string form of the object */
public String getObject ( ) { } } | if ( triple . getObject ( ) instanceof Literal ) { return ( ( Literal ) triple . getObject ( ) ) . getLexicalForm ( ) ; } else if ( triple . getObject ( ) instanceof IRI ) { return ( ( IRI ) triple . getObject ( ) ) . getIRIString ( ) ; } return triple . getObject ( ) . ntriplesString ( ) ; |
public class DebugUtil { /** * Builds a prefix message to be used in front of < i > debug < / i > messages for identification purposes .
* The message format is :
* < pre >
* * * * * external debug : [ timestamp ] [ class name ] :
* < / pre >
* @ param pObject the { @ code java . lang . Object } to be debugged . If the object ia a { @ code java . lang . String } object , it is assumed that it is the class name given directly .
* @ return a prefix for a debug message . */
public static String getPrefixDebugMessage ( final Object pObject ) { } } | StringBuilder buffer = new StringBuilder ( ) ; buffer . append ( DEBUG ) ; buffer . append ( getTimestamp ( ) ) ; buffer . append ( " " ) ; if ( pObject == null ) { buffer . append ( "[unknown class]" ) ; } else { if ( pObject instanceof String ) { buffer . append ( ( String ) pObject ) ; } else { buffer . append ( getClassName ( pObject ) ) ; } } buffer . append ( ": " ) ; return buffer . toString ( ) ; |
public class SchemasInner { /** * Get the content callback url .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ param schemaName The integration account schema name .
* @ param listContentCallbackUrl the GetCallbackUrlParameters value
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the WorkflowTriggerCallbackUrlInner object */
public Observable < ServiceResponse < WorkflowTriggerCallbackUrlInner > > listContentCallbackUrlWithServiceResponseAsync ( String resourceGroupName , String integrationAccountName , String schemaName , GetCallbackUrlParameters listContentCallbackUrl ) { } } | if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( integrationAccountName == null ) { throw new IllegalArgumentException ( "Parameter integrationAccountName is required and cannot be null." ) ; } if ( schemaName == null ) { throw new IllegalArgumentException ( "Parameter schemaName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( listContentCallbackUrl == null ) { throw new IllegalArgumentException ( "Parameter listContentCallbackUrl is required and cannot be null." ) ; } Validator . validate ( listContentCallbackUrl ) ; return service . listContentCallbackUrl ( this . client . subscriptionId ( ) , resourceGroupName , integrationAccountName , schemaName , this . client . apiVersion ( ) , listContentCallbackUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < WorkflowTriggerCallbackUrlInner > > > ( ) { @ Override public Observable < ServiceResponse < WorkflowTriggerCallbackUrlInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < WorkflowTriggerCallbackUrlInner > clientResponse = listContentCallbackUrlDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class AbstractRestClient { /** * Build a PoolingHttpClientConnectionManager that trusts certificates loaded from specified resource with specified trust strategy .
* If you want the REST client to trust some specific server certificates , you can override { @ link # buildConnectionManager ( ) } method
* and use this method to build a custom connection manager .
* @ param trustStrategyThe trust strategy , can be null if the default one should be used .
* To always trust self - signed server certificates , use < code > TrustSelfSignedStrategy < / code > .
* @ param hostnameVerifierThe verifier of hostnames , can be null if the default one should be used .
* To skip hostname verification , use < code > NoopHostnameVerifier < / code >
* @ param certResourcesResources that contains certificates in binary or base64 DER / . crt format .
* @ returna PoolingHttpClientConnectionManager */
protected PoolingHttpClientConnectionManager buildConnectionManager ( TrustStrategy trustStrategy , HostnameVerifier hostnameVerifier , String ... certResources ) { } } | try { KeyStore trustStore = certResources == null || certResources . length == 0 ? null : buildKeyStoreFromResources ( certResources ) ; SSLContext sslContext = SSLContexts . custom ( ) . loadTrustMaterial ( trustStore , trustStrategy ) . build ( ) ; SSLConnectionSocketFactory sslsf = hostnameVerifier == null ? new SSLConnectionSocketFactory ( sslContext ) : new SSLConnectionSocketFactory ( sslContext , hostnameVerifier ) ; Registry < ConnectionSocketFactory > socketFactoryRegistry = RegistryBuilder . < ConnectionSocketFactory > create ( ) . register ( "https" , sslsf ) . register ( "http" , PlainConnectionSocketFactory . getSocketFactory ( ) ) . build ( ) ; return new PoolingHttpClientConnectionManager ( socketFactoryRegistry ) ; } catch ( Exception e ) { throw Throwables . propagate ( e ) ; } |
public class XsdAsmUtils { /** * Generates the required methods for adding a given attribute and creates the
* respective class , if needed .
* @ param createdAttributes Information about attributes that were already created .
* @ param classWriter The { @ link ClassWriter } to write the methods .
* @ param elementAttribute The attribute element .
* @ param returnType The method return type .
* @ param className The name of the class which will contain the method to add the attribute .
* @ param apiName The name of the generated fluent interface . */
static void generateMethodsAndCreateAttribute ( Map < String , List < XsdAttribute > > createdAttributes , ClassWriter classWriter , XsdAttribute elementAttribute , String returnType , String className , String apiName ) { } } | XsdAsmAttributes . generateMethodsForAttribute ( classWriter , elementAttribute , returnType , className , apiName ) ; createAttribute ( createdAttributes , elementAttribute ) ; |
public class BoxRequestEvent { /** * Serialize object .
* @ serialData The capacity ( int ) , followed by elements ( each an { @ code Object } ) in the proper order , followed by a null
* @ param s
* the stream */
private void writeObject ( java . io . ObjectOutputStream s ) throws java . io . IOException { } } | // Write out capacity and any hidden stuff
s . defaultWriteObject ( ) ; |
public class AbstractSQLQuery { /** * Get the results as a JDBC ResultSet
* @ return results as ResultSet */
public ResultSet getResults ( ) { } } | final SQLListenerContextImpl context = startContext ( connection ( ) , queryMixin . getMetadata ( ) ) ; String queryString = null ; List < Object > constants = ImmutableList . of ( ) ; try { listeners . preRender ( context ) ; SQLSerializer serializer = serialize ( false ) ; queryString = serializer . toString ( ) ; logQuery ( queryString , serializer . getConstants ( ) ) ; context . addSQL ( getSQL ( serializer ) ) ; listeners . rendered ( context ) ; listeners . notifyQuery ( queryMixin . getMetadata ( ) ) ; constants = serializer . getConstants ( ) ; listeners . prePrepare ( context ) ; final PreparedStatement stmt = getPreparedStatement ( queryString ) ; setParameters ( stmt , constants , serializer . getConstantPaths ( ) , getMetadata ( ) . getParams ( ) ) ; context . addPreparedStatement ( stmt ) ; listeners . prepared ( context ) ; listeners . preExecute ( context ) ; final ResultSet rs = stmt . executeQuery ( ) ; listeners . executed ( context ) ; return new ResultSetAdapter ( rs ) { @ Override public void close ( ) throws SQLException { try { super . close ( ) ; } finally { stmt . close ( ) ; reset ( ) ; endContext ( context ) ; } } } ; } catch ( SQLException e ) { onException ( context , e ) ; reset ( ) ; endContext ( context ) ; throw configuration . translate ( queryString , constants , e ) ; } |
public class MainMenuBar { /** * This method initializes menuFileProperties
* @ return javax . swing . JMenuItem */
private ZapMenuItem getMenuFileProperties ( ) { } } | if ( menuFileProperties == null ) { menuFileProperties = new ZapMenuItem ( "menu.file.properties" , View . getSingleton ( ) . getMenuShortcutKeyStroke ( KeyEvent . VK_P , KeyEvent . ALT_DOWN_MASK , false ) ) ; menuFileProperties . setText ( Constant . messages . getString ( "menu.file.properties" ) ) ; // ZAP : i18n
menuFileProperties . addActionListener ( new java . awt . event . ActionListener ( ) { @ Override public void actionPerformed ( java . awt . event . ActionEvent e ) { getMenuFileControl ( ) . properties ( ) ; } } ) ; } return menuFileProperties ; |
public class TreeUtils { /** * Performs the following transformation on the given MutableBinaryTreeNode :
* < pre >
* o1 o2
* A o2 = = = = > o1 C
* B C A B
* < / pre >
* @ param node the node to transform
* @ return the new root after the transformation , which is either the right sub node of the original root
* or the original root , if the right sub node is null */
public static < N extends MutableBinaryTreeNode < N > > N toLeftAssociativity ( N node ) { } } | checkArgNotNull ( node , "node" ) ; N right = node . right ( ) ; if ( right == null ) return node ; node . setRight ( right . left ( ) ) ; right . setLeft ( node ) ; return right ; |
public class TasksBase { /** * Adds a tag to a task . Returns an empty data block .
* @ param task The task to add a tag to .
* @ return Request object */
public ItemRequest < Task > addTag ( String task ) { } } | String path = String . format ( "/tasks/%s/addTag" , task ) ; return new ItemRequest < Task > ( this , Task . class , path , "POST" ) ; |
public class ConverterManager { /** * Removes a converter from the set of converters . If the converter was
* not in the set , no changes are made .
* @ param converter the converter to remove , null ignored
* @ return replaced converter , or null */
public DurationConverter removeDurationConverter ( DurationConverter converter ) throws SecurityException { } } | checkAlterDurationConverters ( ) ; if ( converter == null ) { return null ; } DurationConverter [ ] removed = new DurationConverter [ 1 ] ; iDurationConverters = iDurationConverters . remove ( converter , removed ) ; return removed [ 0 ] ; |
public class DiscreteDistributions { /** * Returns the cumulative probability of k of a specific number of tries n and probability p
* @ param k
* @ param p
* @ param n
* @ return */
public static double binomialCdf ( int k , double p , int n ) { } } | if ( k < 0 || p < 0 || n < 1 ) { throw new IllegalArgumentException ( "All the parameters must be positive and n larger than 1." ) ; } k = Math . min ( k , n ) ; double probabilitySum = approxBinomialCdf ( k , p , n ) ; return probabilitySum ; |
public class StorePackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getObjectIDMPluginConfiguration ( ) { } } | if ( objectIDMPluginConfigurationEClass == null ) { objectIDMPluginConfigurationEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 13 ) ; } return objectIDMPluginConfigurationEClass ; |
public class LDAPController { /** * Gets the form for the creation of a mapping */
@ RequestMapping ( value = "ldap-mapping/create" , method = RequestMethod . GET ) public Form getMappingCreationForm ( ) { } } | securityService . checkGlobalFunction ( AccountGroupManagement . class ) ; return AccountGroupMapping . form ( accountService . getAccountGroups ( ) ) ; |
public class V3ProxyValidateController { /** * Handle model and view .
* @ param request the request
* @ param response the response
* @ return the model and view
* @ throws Exception the exception */
@ GetMapping ( path = CasProtocolConstants . ENDPOINT_PROXY_VALIDATE_V3 ) @ Override protected ModelAndView handle ( final HttpServletRequest request , final HttpServletResponse response ) throws Exception { } } | return super . handleRequestInternal ( request , response ) ; |
public class License { /** * Sign the license .
* The license is signed the following way :
* < ol >
* < li > Add the digest algorithm string to the license as a feature . The feature name is { @ code signatureDigest }
* ( name is defined in the constant { @ link # DIGEST _ KEY } in this class ) .
* < / li >
* < li > The license is converted to binary format < / li >
* < li > A digest is created from the binary license using the message digest algorithm named by the { @ code digest }
* parameter < / li >
* < li > The digest is encrypted using the key ( which also has the information about the algorithm ) . < / li >
* < li > The encrypted digest is added to the license as a new { @ code BINARY } feature as signature . < / li >
* < / ol >
* @ param key the private key to be used to create the signature
* @ param digest the name of the digest algorithm
* @ throws NoSuchAlgorithmException this exception comes from the underlying encryption library
* @ throws NoSuchPaddingException this exception comes from the underlying encryption library
* @ throws InvalidKeyException this exception comes from the underlying encryption library
* @ throws BadPaddingException this exception comes from the underlying encryption library
* @ throws IllegalBlockSizeException this exception comes from the underlying encryption library */
public void sign ( PrivateKey key , String digest ) throws NoSuchAlgorithmException , NoSuchPaddingException , InvalidKeyException , BadPaddingException , IllegalBlockSizeException { } } | add ( Feature . Create . stringFeature ( DIGEST_KEY , digest ) ) ; final var digester = MessageDigest . getInstance ( digest ) ; final var ser = unsigned ( ) ; final var digestValue = digester . digest ( ser ) ; final var cipher = Cipher . getInstance ( key . getAlgorithm ( ) ) ; cipher . init ( Cipher . ENCRYPT_MODE , key ) ; final var signature = cipher . doFinal ( digestValue ) ; add ( signature ) ; |
public class PersonNameIndexRenderer { /** * { @ inheritDoc } */
@ Override public final String getIndexName ( ) { } } | final Person person = personRenderer . getGedObject ( ) ; if ( ! person . isSet ( ) ) { return "" ; } if ( personRenderer . isConfidential ( ) ) { return "Confidential" ; } if ( personRenderer . isHiddenLiving ( ) ) { return "Living" ; } final GedRenderer < ? extends GedObject > renderer = personRenderer . createGedRenderer ( person . getName ( ) ) ; final String nameHtml = renderer . getIndexName ( ) ; final GetDateVisitor birthVisitor = new GetDateVisitor ( "Birth" ) ; person . accept ( birthVisitor ) ; final String birthYear = birthVisitor . getYear ( ) ; final GetDateVisitor deathVisitor = new GetDateVisitor ( "Death" ) ; person . accept ( deathVisitor ) ; final String deathYear = deathVisitor . getYear ( ) ; return "<a href=\"person?db=" + person . getDbName ( ) + "&id=" + person . getString ( ) + "\" class=\"name\">" + nameHtml + dateRangeString ( birthYear , deathYear ) + " (" + person . getString ( ) + ")</a>" ; |
public class ConversationMetaDataImpl { /** * begin F206161.5 */
public InetAddress getRemoteAddress ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getRemoteAddress" ) ; InetAddress result = null ; TCPConnectionContext tcpContext = null ; ConnectionLink connLinkRef = baseLink . getDeviceLink ( ) ; if ( connLinkRef != null ) { tcpContext = ( TCPConnectionContext ) connLinkRef . getChannelAccessor ( ) ; if ( tcpContext != null ) result = tcpContext . getRemoteAddress ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getRemoteAddress" , result ) ; return result ; |
public class WebSocketHandler { /** * Handles websocket transport errors
* @ param webSocketSession websocket session where the error appeared
* @ param exception exception that occured
* @ throws Exception transport error exception */
@ Override public void handleTransportError ( final WebSocketSession webSocketSession , final Throwable exception ) throws Exception { } } | if ( exception != null ) { logger . error ( "[handleTransportError]" , exception ) ; } |
public class DFSClient { /** * Create a new dfs file with the specified block replication
* with write - progress reporting and return an output stream for writing
* into the file .
* @ param src stream name
* @ param permission The permission of the directory being created .
* If permission = = null , use { @ link FsPermission # getDefault ( ) } .
* @ param overwrite do not check for file existence if true
* @ param createParent create missing parent directory if true
* @ param replication block replication
* @ return output stream
* @ throws IOException
* @ see ClientProtocol # create ( String , FsPermission , String , boolean , short , long ) */
public OutputStream create ( String src , FsPermission permission , boolean overwrite , boolean createParent , short replication , long blockSize , Progressable progress , int buffersize ) throws IOException { } } | return create ( src , permission , overwrite , createParent , replication , blockSize , progress , buffersize , conf . getInt ( "io.bytes.per.checksum" , 512 ) ) ; |
public class BarcodePostnet { /** * Creates the bars for Postnet .
* @ param text the code to be created without checksum
* @ return the bars */
public static byte [ ] getBarsPostnet ( String text ) { } } | int total = 0 ; for ( int k = text . length ( ) - 1 ; k >= 0 ; -- k ) { int n = text . charAt ( k ) - '0' ; total += n ; } text += ( char ) ( ( ( 10 - ( total % 10 ) ) % 10 ) + '0' ) ; byte bars [ ] = new byte [ text . length ( ) * 5 + 2 ] ; bars [ 0 ] = 1 ; bars [ bars . length - 1 ] = 1 ; for ( int k = 0 ; k < text . length ( ) ; ++ k ) { int c = text . charAt ( k ) - '0' ; System . arraycopy ( BARS [ c ] , 0 , bars , k * 5 + 1 , 5 ) ; } return bars ; |
public class Dispatcher { /** * Schedules status update of given event for all registered targets . */
@ MainThread private void scheduleStatusUpdates ( Event event , EventStatus status ) { } } | for ( EventTarget target : targets ) { for ( EventMethod method : target . methods ) { if ( event . getKey ( ) . equals ( method . eventKey ) && method . type == EventMethod . Type . STATUS ) { Utils . log ( event . getKey ( ) , method , "Scheduling status update" ) ; executionQueue . add ( Task . create ( this , target , method , event , status ) ) ; } } } |
public class GraphGeneratorUtils { /** * Generates { @ link Vertex vertices } present in the given set of { @ link Edge } s .
* @ param edges source { @ link DataSet } of { @ link Edge } s
* @ param parallelism operator parallelism
* @ param < K > label type
* @ param < EV > edge value type
* @ return { @ link DataSet } of discovered { @ link Vertex vertices }
* @ see Graph # fromDataSet ( DataSet , DataSet , ExecutionEnvironment ) */
public static < K , EV > DataSet < Vertex < K , NullValue > > vertexSet ( DataSet < Edge < K , EV > > edges , int parallelism ) { } } | DataSet < Vertex < K , NullValue > > vertexSet = edges . flatMap ( new EmitSrcAndTarget < > ( ) ) . setParallelism ( parallelism ) . name ( "Emit source and target labels" ) ; return vertexSet . distinct ( ) . setCombineHint ( CombineHint . HASH ) . setParallelism ( parallelism ) . name ( "Emit vertex labels" ) ; |
public class ManagerUtil { /** * Strips the internal action id from the given action id .
* @ param actionId the action id prefixed by the internal action id as
* received from Asterisk .
* @ return the original action id , that is the action id as it was before
* the internal action id was added .
* @ see # addInternalActionId ( String , String ) */
public static String stripInternalActionId ( String actionId ) { } } | int delimiterIndex ; delimiterIndex = actionId . indexOf ( INTERNAL_ACTION_ID_DELIMITER ) ; if ( delimiterIndex > 0 ) { if ( actionId . length ( ) > delimiterIndex + 1 ) { return actionId . substring ( delimiterIndex + 1 ) ; } return null ; } return null ; |
public class DataUtil { /** * little - endian or intel format . */
public static void writeLongLittleEndian ( byte [ ] buffer , int offset , long value ) { } } | buffer [ offset ++ ] = ( byte ) ( value & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 8 ) & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 16 ) & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 24 ) & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 32 ) & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 40 ) & 0xFF ) ; buffer [ offset ++ ] = ( byte ) ( ( value >> 48 ) & 0xFF ) ; buffer [ offset ] = ( byte ) ( ( value >> 56 ) & 0xFF ) ; |
public class DeploymentsInner { /** * Exports the template used for specified deployment .
* @ param resourceGroupName The name of the resource group . The name is case insensitive .
* @ param deploymentName The name of the deployment from which to get the template .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DeploymentExportResultInner object */
public Observable < ServiceResponse < DeploymentExportResultInner > > exportTemplateWithServiceResponseAsync ( String resourceGroupName , String deploymentName ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( deploymentName == null ) { throw new IllegalArgumentException ( "Parameter deploymentName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . exportTemplate ( resourceGroupName , deploymentName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < DeploymentExportResultInner > > > ( ) { @ Override public Observable < ServiceResponse < DeploymentExportResultInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < DeploymentExportResultInner > clientResponse = exportTemplateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class PasswordUtil { /** * Encode the provided string by using the specified encoding algorithm and properties
* @ param decoded _ string the string to be encoded .
* @ param crypto _ algorithm the algorithm to be used for encoding . The supported values are xor , aes , or hash .
* @ param properties the properties for the encryption .
* @ return The encoded string . null if there is any failure during encoding , or invalid or null decoded _ string */
public static String encode_password ( String decoded_string , String crypto_algorithm , Map < String , String > properties ) { } } | /* * encoding process :
* - - check for empty algorithm tag
* - - convert input String to byte [ ] UTF8 conversion code
* - - encipher byte [ ]
* - - convert byte [ ] to String using using base64 encoding */
StringBuilder buffer = new StringBuilder ( ) ; buffer . append ( CRYPTO_ALGORITHM_STARTED ) ; if ( crypto_algorithm . length ( ) == 0 ) { // crypto algorithm is empty . . . don ' t encode password
buffer . append ( CRYPTO_ALGORITHM_STOPPED ) . append ( decoded_string ) ; } else { // encode password with specified crypto algorithm
String encoded_string = null ; EncryptedInfo info = null ; if ( decoded_string . length ( ) > 0 ) { // convert decoded password string to byte [ ]
byte [ ] decrypted_bytes = convert_to_bytes ( decoded_string ) ; if ( decrypted_bytes . length > 0 ) { // encrypt decrypted password byte [ ] with specified crypto algorithm
byte [ ] encrypted_bytes = null ; boolean done = false ; while ( ! done ) { try { info = PasswordCipherUtil . encipher_internal ( decrypted_bytes , crypto_algorithm , properties ) ; if ( info != null ) { encrypted_bytes = info . getEncryptedBytes ( ) ; } done = true ; } catch ( InvalidPasswordCipherException e ) { logger . logp ( Level . SEVERE , PasswordUtil . class . getName ( ) , "encode_password" , "PASSWORDUTIL_CYPHER_EXCEPTION" , e ) ; return null ; } catch ( UnsupportedCryptoAlgorithmException e ) { logger . logp ( Level . SEVERE , PasswordUtil . class . getName ( ) , "encode_password" , "PASSWORDUTIL_UNKNOWN_ALGORITHM_EXCEPTION" , e ) ; return null ; } } if ( ( encrypted_bytes != null ) && ( encrypted_bytes . length > 0 ) ) { // convert encrypted password byte [ ] to viewable string
encoded_string = convert_viewable_to_string ( encrypted_bytes ) ; if ( encoded_string == null ) { // base64 encoding failed
return null ; } } } } buffer . append ( crypto_algorithm ) ; String alias = ( null == info ) ? null : info . getKeyAlias ( ) ; if ( alias != null && 0 < alias . length ( ) ) { buffer . append ( ':' ) . append ( alias ) ; } buffer . append ( CRYPTO_ALGORITHM_STOPPED ) ; if ( ( encoded_string != null ) && ( encoded_string . length ( ) > 0 ) ) { // append encoded string
buffer . append ( encoded_string ) ; } } return buffer . toString ( ) ; |
public class Out { /** * syck _ out _ mark */
public static void outMark ( IRubyObject emitter , IRubyObject node ) { } } | Emitter emitterPtr = ( Emitter ) emitter . dataGetStructChecked ( ) ; YEmitter . Extra bonus = ( YEmitter . Extra ) emitterPtr . bonus ; ( ( RubyObject ) node ) . fastSetInstanceVariable ( "@emitter" , emitter ) ; if ( ! bonus . oid . isNil ( ) ) { ( ( RubyHash ) bonus . data ) . fastASet ( bonus . oid , node ) ; } |
public class DE { /** * anlegen eines de beim parsen funktioniert analog zum
* anlegen eines de bei der message - synthese */
private void parseValue ( StringBuffer res , HashMap < String , String > predefs , char preDelim , HashMap < String , String > valids ) { } } | int len = res . length ( ) ; if ( preDelim != ( char ) 0 && res . charAt ( 0 ) != preDelim ) { if ( len == 0 ) { throw new ParseErrorException ( HBCIUtils . getLocMsg ( "EXCMSG_ENDOFSTRG" , getPath ( ) ) ) ; } // log . ( " error string : " + res . toString ( ) , log . _ ERR ) ;
// log . ( " current : " + getPath ( ) + " : " + type + " ( " + minsize + " , " + maxsize + " ) = " + value , log . _ ERR ) ;
// log . ( " predelimiter mismatch ( required : " + getPreDelim ( ) + " found : " + temp . charAt ( 0 ) + " ) " , log . _ ERR ) ;
throw new PredelimErrorException ( getPath ( ) , Character . toString ( preDelim ) , Character . toString ( res . charAt ( 0 ) ) ) ; } this . value = SyntaxDEFactory . createSyntaxDE ( getType ( ) , getPath ( ) , res , minsize , maxsize ) ; String valueString = value . toString ( 0 ) ; String predefined = predefs . get ( getPath ( ) ) ; if ( predefined != null ) { if ( ! valueString . equals ( predefined ) ) { throw new ParseErrorException ( HBCIUtils . getLocMsg ( "EXCMSG_PREDEFERR" , new Object [ ] { getPath ( ) , predefined , value } ) ) ; } } boolean atLeastOne = false ; boolean ok = false ; if ( valids != null ) { String header = getPath ( ) + ".value" ; for ( String key : valids . keySet ( ) ) { if ( key . startsWith ( header ) && key . indexOf ( "." , header . length ( ) ) == - 1 ) { atLeastOne = true ; String validValue = valids . get ( key ) ; if ( valueString . equals ( validValue ) ) { ok = true ; break ; } } } } if ( atLeastOne && ! ok ) { throw new NoValidValueException ( getPath ( ) , valueString ) ; } |
public class AbstractGitFlowMojo { /** * Executes git for - each - ref with < code > refname : short < / code > format .
* @ param branchName
* Branch name to find .
* @ param firstMatch
* Return first match .
* @ return Branch names which matches < code > refs / heads / { branchName } * < / code > .
* @ throws MojoFailureException
* @ throws CommandLineException */
protected String gitFindBranches ( final String branchName , final boolean firstMatch ) throws MojoFailureException , CommandLineException { } } | String wildcard = "*" ; if ( branchName . endsWith ( "/" ) ) { wildcard = "**" ; } String branches ; if ( firstMatch ) { branches = executeGitCommandReturn ( "for-each-ref" , "--count=1" , "--format=\"%(refname:short)\"" , "refs/heads/" + branchName + wildcard ) ; } else { branches = executeGitCommandReturn ( "for-each-ref" , "--format=\"%(refname:short)\"" , "refs/heads/" + branchName + wildcard ) ; } // on * nix systems return values from git for - each - ref are wrapped in
// quotes
// https : / / github . com / aleksandr - m / gitflow - maven - plugin / issues / 3
branches = removeQuotes ( branches ) ; return branches ; |
public class OneStepDistributedRowLock { /** * Read all the lock columns . Will also ready data columns if withDataColumns ( true ) was called
* @ param readDataColumns
* @ return
* @ throws Exception */
private Map < C , Long > readLockColumns ( boolean readDataColumns ) throws Exception { } } | Map < C , Long > result = Maps . newLinkedHashMap ( ) ; // Read all the columns
if ( readDataColumns ) { columns = new OrderedColumnMap < C > ( ) ; ColumnList < C > lockResult = keyspace . prepareQuery ( columnFamily ) . setConsistencyLevel ( consistencyLevel ) . getKey ( key ) . execute ( ) . getResult ( ) ; for ( Column < C > c : lockResult ) { if ( columnStrategy . isLockColumn ( c . getName ( ) ) ) result . put ( c . getName ( ) , readTimeoutValue ( c ) ) ; else columns . add ( c ) ; } } // Read only the lock columns
else { ColumnList < C > lockResult = keyspace . prepareQuery ( columnFamily ) . setConsistencyLevel ( consistencyLevel ) . getKey ( key ) . withColumnRange ( columnStrategy . getLockColumnRange ( ) ) . execute ( ) . getResult ( ) ; for ( Column < C > c : lockResult ) { result . put ( c . getName ( ) , readTimeoutValue ( c ) ) ; } } return result ; |
public class JPAExpressions { /** * Create a max ( col ) expression
* @ param left collection
* @ return max ( col ) */
public static < A extends Comparable < ? super A > > ComparableExpression < A > max ( CollectionExpression < ? , A > left ) { } } | return Expressions . comparableOperation ( ( Class ) left . getParameter ( 0 ) , Ops . QuantOps . MAX_IN_COL , ( Expression < ? > ) left ) ; |
public class MissingFail { /** * Returns a string describing the exception type caught by the given try tree ' s catch
* statement ( s ) , defaulting to { @ code " Exception " } if more than one exception type is caught . */
private static String exceptionToString ( TryTree tree , VisitorState state ) { } } | if ( tree . getCatches ( ) . size ( ) != 1 ) { return "Exception" ; } Tree exceptionType = tree . getCatches ( ) . iterator ( ) . next ( ) . getParameter ( ) . getType ( ) ; Type type = ASTHelpers . getType ( exceptionType ) ; if ( type != null && type . isUnion ( ) ) { return "Exception" ; } return state . getSourceForNode ( exceptionType ) ; |
public class FileCachingJobServiceClassLoaderStrategy { /** * Gets the < code > File < / code > in which to store the given class definition .
* @ param name The fully qualified name of the class .
* @ param digest The MD5 digest of the class definition .
* @ param createDirectory A value indicating whether the directory
* containing the file should be created if it does not yet exist .
* @ return The < code > File < / code > to use for storing the cached class
* definition . */
private File getCacheEntryFile ( String name , byte [ ] digest , boolean createDirectory ) { } } | File entryDirectory = new File ( directory , name . replace ( '.' , '/' ) ) ; if ( createDirectory && ! entryDirectory . isDirectory ( ) ) { entryDirectory . mkdirs ( ) ; } return new File ( entryDirectory , StringUtil . toHex ( digest ) ) ; |
public class NoticeReference { /** * Describe < code > toASN1Object < / code > method here .
* @ return a < code > DERObject < / code > value */
public DERObject toASN1Object ( ) { } } | ASN1EncodableVector av = new ASN1EncodableVector ( ) ; av . add ( organization ) ; av . add ( noticeNumbers ) ; return new DERSequence ( av ) ; |
public class Futures { /** * Create a composite { @ link CompletableFuture } is composed from the given { @ code futures } .
* @ param futures must not be { @ literal null } .
* @ return the composed { @ link CompletableFuture } .
* @ since 5.1.1 */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) public static CompletableFuture < Void > allOf ( Collection < ? extends CompletableFuture < ? > > futures ) { LettuceAssert . notNull ( futures , "Futures must not be null" ) ; return CompletableFuture . allOf ( futures . toArray ( new CompletableFuture [ 0 ] ) ) ; |
public class HybridizationFingerprinter { /** * Gets the bond Symbol attribute of the Fingerprinter class .
* @ return The bondSymbol value */
protected String getBondSymbol ( IBond bond ) { } } | String bondSymbol = "" ; if ( bond . getOrder ( ) == IBond . Order . SINGLE ) { if ( isSP2Bond ( bond ) ) { bondSymbol = ":" ; } else { bondSymbol = "-" ; } } else if ( bond . getOrder ( ) == IBond . Order . DOUBLE ) { if ( isSP2Bond ( bond ) ) { bondSymbol = ":" ; } else { bondSymbol = "=" ; } } else if ( bond . getOrder ( ) == IBond . Order . TRIPLE ) { bondSymbol = "#" ; } else if ( bond . getOrder ( ) == IBond . Order . QUADRUPLE ) { bondSymbol = "*" ; } return bondSymbol ; |
public class HBaseParserScreen { /** * Print the top nav menu .
* @ exception DBException File exception . */
public void printHtmlLogo ( PrintWriter out , ResourceBundle reg ) throws DBException { } } | String strXml = this . getProperty ( "xml" ) ; // Html page
if ( ( strXml != null ) && ( strXml . length ( ) > 0 ) ) ( ( BaseParserScreen ) this . getScreenField ( ) ) . setURL ( ( ( BaseParserScreen ) this . getScreenField ( ) ) . addURLParam ( Constants . BLANK , "xml" , strXml ) ) ; super . printHtmlLogo ( out , reg ) ; |
public class CleaneLingSolver { /** * Dumps redundant clauses with eliminated variables . */
private void dumpEliminatedRedundant ( ) { } } | for ( final CLClause c : this . clauses ) { if ( ! c . redundant ( ) || c . satisfied ( ) || c . dumped ( ) ) { continue ; } if ( containsEliminated ( c ) ) { dumpClause ( c ) ; } } |
public class SansOrm { /** * Use this one to use simple embedded { @ link TransactionManager } implementation for tx handling .
* @ param dataSource the { @ link DataSource } to use by the default
* @ return dataSource that will be used for queries */
public static DataSource initializeTxSimple ( DataSource dataSource ) { } } | TxTransactionManager txManager = new TxTransactionManager ( dataSource ) ; return initializeTxCustom ( txManager . getTxDataSource ( ) , txManager , txManager ) ; |
public class TaskAnnouncement { /** * nullable for backward compatibility */
public static TaskAnnouncement create ( Task task , TaskStatus status , TaskLocation location ) { } } | return create ( task . getId ( ) , task . getType ( ) , task . getTaskResource ( ) , status , location , task . getDataSource ( ) ) ; |
public class OidcAuthorizationRequestSupport { /** * Gets oidc prompt from authorization request .
* @ param url the url
* @ return the oidc prompt from authorization request */
@ SneakyThrows public static Set < String > getOidcPromptFromAuthorizationRequest ( final @ NonNull String url ) { } } | return new URIBuilder ( url ) . getQueryParams ( ) . stream ( ) . filter ( p -> OidcConstants . PROMPT . equals ( p . getName ( ) ) ) . map ( param -> param . getValue ( ) . split ( " " ) ) . flatMap ( Arrays :: stream ) . collect ( Collectors . toSet ( ) ) ; |
public class PluginWrapper { /** * Gets the URL that shows more information about this plugin .
* @ return
* null if this information is unavailable .
* @ since 1.283 */
@ Exported public String getUrl ( ) { } } | // first look for the manifest entry . This is new in maven - hpi - plugin 1.30
String url = manifest . getMainAttributes ( ) . getValue ( "Url" ) ; if ( url != null ) return url ; // fallback to update center metadata
UpdateSite . Plugin ui = getInfo ( ) ; if ( ui != null ) return ui . wiki ; return null ; |
public class SSOTokenCredentialProvider { /** * { @ inheritDoc } */
@ Override public void setCredential ( Subject subject ) throws CredentialException { } } | Set < WSPrincipal > principals = subject . getPrincipals ( WSPrincipal . class ) ; if ( principals . isEmpty ( ) ) { return ; } if ( principals . size ( ) != 1 ) { throw new CredentialException ( "Too many WSPrincipals in the subject" ) ; } WSPrincipal principal = principals . iterator ( ) . next ( ) ; CredentialsService cs = credentialsServiceRef . getService ( ) ; String unauthenticatedUserid = cs . getUnauthenticatedUserid ( ) ; if ( principal . getName ( ) != null && unauthenticatedUserid != null && principal . getName ( ) . equals ( unauthenticatedUserid ) ) { return ; } setSsoTokenCredential ( subject , principal . getAccessId ( ) ) ; |
public class ResourceManager { /** * Retrieve resource with specified basename .
* @ param baseName the basename
* @ param classLoader the classLoader to load resources from
* @ param locale the locale of the resources requested .
* @ return the Resources */
public static Resources getBaseResources ( String baseName , Locale locale , ClassLoader classLoader ) { } } | synchronized ( ResourceManager . class ) { Resources resources = getCachedResource ( baseName + "_" + locale . hashCode ( ) ) ; if ( null == resources ) { resources = new Resources ( baseName , locale , classLoader ) ; putCachedResource ( baseName + "_" + locale . hashCode ( ) , resources ) ; } return resources ; } |
public class TimeZoneNamesImpl { /** * Returns a set of names for the given time zone ID . This method loads
* the set of names into the internal map and trie for future references .
* @ param tzID the canonical time zone ID
* @ return An instance of ZNames that includes a set of time zone display names . */
private synchronized ZNames loadTimeZoneNames ( String tzID ) { } } | ZNames tznames = _tzNamesMap . get ( tzID ) ; if ( tznames == null ) { ZNamesLoader loader = new ZNamesLoader ( ) ; loader . loadTimeZone ( _zoneStrings , tzID ) ; tznames = ZNames . createTimeZoneAndPutInCache ( _tzNamesMap , loader . getNames ( ) , tzID ) ; } return tznames ; |
public class AuditableVisitor { /** * Visits Auditable objects in an object graph / hierarchy setting auditable information ( created / modified
* by / date - time / process ) .
* @ param visitable the Visitable object visited by this Visitor .
* @ see org . cp . elements . lang . Auditable */
@ Override @ SuppressWarnings ( "unchecked" ) public void visit ( Visitable visitable ) { } } | if ( visitable instanceof Auditable ) { Auditable < USER , PROCESS , ? > auditable = ( Auditable < USER , PROCESS , ? > ) visitable ; if ( auditable . isNew ( ) || isCreatedUnset ( auditable ) ) { auditable . setCreatedBy ( getUser ( ) ) ; auditable . setCreatedOn ( getDateTime ( ) ) ; auditable . setCreatedWith ( getProcess ( ) ) ; } if ( auditable . isModified ( ) ) { auditable . setModifiedBy ( getUser ( ) ) ; auditable . setModifiedOn ( getDateTime ( ) ) ; auditable . setModifiedWith ( getProcess ( ) ) ; } } |
public class BuildWrapper { /** * Provides an opportunity for a { @ link BuildWrapper } to decorate the { @ link BuildListener } logger to be used by the build .
* This hook is called very early on in the build ( even before { @ link # setUp ( AbstractBuild , Launcher , BuildListener ) } is invoked . )
* The default implementation is no - op , which just returns the { @ code logger } parameter as - is .
* < p > ( { @ link ArgumentListBuilder # add ( String , boolean ) } is a simpler way to suppress a single password . )
* @ param build
* The build in progress for which this { @ link BuildWrapper } is called . Never null .
* @ param logger
* The default logger . Never null . This method is expected to wrap this logger .
* This makes sure that when multiple { @ link BuildWrapper } s attempt to decorate the same logger
* it will sort of work .
* @ return
* Must not be null . If a fatal error happens , throw an exception .
* @ throws RunnerAbortedException
* If a fatal error is detected but the implementation handled it gracefully , throw this exception
* to suppress stack trace .
* @ since 1.374
* @ see ConsoleLogFilter */
public OutputStream decorateLogger ( AbstractBuild build , OutputStream logger ) throws IOException , InterruptedException , RunnerAbortedException { } } | return logger ; |
public class SystemPropertyUtil { /** * Returns the value of the Java system property with the specified
* { @ code key } , while falling back to the specified default value if
* the property access fails .
* @ return the property value .
* { @ code def } if there ' s no such property or if an access to the
* specified property is not allowed . */
public static int getInt ( String key , int def ) { } } | String value = get ( key ) ; if ( value == null ) { return def ; } value = value . trim ( ) ; try { return Integer . parseInt ( value ) ; } catch ( Exception e ) { // Ignore
} logger . warn ( "Unable to parse the integer system property '{}':{} - using the default value: {}" , key , value , def ) ; return def ; |
public class ManagedServiceFactoryTracker { /** * Processes registered ManagedServiceFactory and updates each with their own
* configuration properties .
* @ param reference
* - ServiceReference for MangedServiceFactory */
@ Override public ManagedServiceFactory addingService ( ServiceReference < ManagedServiceFactory > reference ) { } } | String [ ] factoryPids = getServicePid ( reference ) ; if ( factoryPids == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "handleRegistration(): Invalid service.pid type: " + reference ) ; } return null ; } ManagedServiceFactory msf = context . getService ( reference ) ; if ( msf == null ) return null ; synchronized ( caFactory . getConfigurationStore ( ) ) { for ( String factoryPid : factoryPids ) { add ( reference , factoryPid , msf ) ; } } return msf ; |
public class VariableNumMap { /** * Returns { @ code true } if the values in { @ code assignment } are
* possible values for the variables in { @ code this } .
* { @ code assignment } must contain a subset of the variables in
* { @ code this } .
* @ param assignment
* @ return */
public boolean isValidAssignment ( Assignment assignment ) { } } | Preconditions . checkArgument ( containsAll ( assignment . getVariableNumsArray ( ) ) ) ; for ( int varNum : assignment . getVariableNumsArray ( ) ) { int index = getVariableIndex ( varNum ) ; if ( ! vars [ index ] . canTakeValue ( assignment . getValue ( varNum ) ) ) { return false ; } } return true ; |
public class Result { /** * Accept Lists and mark as empty or not
* @ param value
* @ return */
public static < R > Result < List < R > > ok ( List < R > value ) { } } | return new Result < List < R > > ( value , OK , SUCCESS , null ) . emptyList ( value . size ( ) == 0 ) ; |
public class PasswordPolicyDto { /** * transformers */
public static PasswordPolicyDto fromPasswordPolicy ( PasswordPolicy policy ) { } } | PasswordPolicyDto policyDto = new PasswordPolicyDto ( ) ; for ( PasswordPolicyRule rule : policy . getRules ( ) ) { policyDto . rules . add ( new PasswordPolicyRuleDto ( rule ) ) ; } return policyDto ; |
public class PackageInfo { /** * Get the { @ link ClassInfo } objects for all classes that are members of this package or a sub - package .
* @ return the the { @ link ClassInfo } objects for all classes that are members of this package or a sub - package . */
public ClassInfoList getClassInfoRecursive ( ) { } } | final Set < ClassInfo > reachableClassInfo = new HashSet < > ( ) ; obtainClassInfoRecursive ( reachableClassInfo ) ; return new ClassInfoList ( reachableClassInfo , /* sortByName = */
true ) ; |
public class RewriteAsRangeCriteria { /** * Find all references in the supplied list that match those supplied and set them to null .
* @ param comparisons the collection in which null references are to be placed
* @ param comparisonsToNull the comparisons that are to be found and nulled in the collection */
protected void nullReference ( List < Comparison > comparisons , Iterable < Comparison > comparisonsToNull ) { } } | for ( Comparison comparisonToNull : comparisonsToNull ) { nullReference ( comparisons , comparisonToNull ) ; } |
public class ObjectMetadata { /** * Gets a map of the raw metadata / headers for the associated object .
* @ return A map of the raw metadata / headers for the associated object . */
public Map < String , Object > getRawMetadata ( ) { } } | Map < String , Object > copy = new TreeMap < String , Object > ( String . CASE_INSENSITIVE_ORDER ) ; copy . putAll ( metadata ) ; return Collections . unmodifiableMap ( copy ) ; |
public class Annotations { /** * Get the config - property - type for an annotation
* @ param annotation The annotation
* @ param type An optional declared type
* @ param classLoader The class loader to use
* @ return The fully qualified classname
* @ exception ClassNotFoundException Thrown if a class cannot be found
* @ exception ValidateException Thrown if a ConfigProperty type isn ' t correct */
@ SuppressWarnings ( "unchecked" ) private String getConfigPropertyType ( Annotation annotation , Class < ? > type , ClassLoader classLoader ) throws ClassNotFoundException , ValidateException { } } | if ( annotation . isOnField ( ) ) { Class clz = Class . forName ( annotation . getClassName ( ) , true , classLoader ) ; while ( ! Object . class . equals ( clz ) ) { try { Field field = SecurityActions . getDeclaredField ( clz , annotation . getMemberName ( ) ) ; if ( type == null || type . equals ( Object . class ) || type . equals ( field . getType ( ) ) ) { return field . getType ( ) . getName ( ) ; } else { throw new ValidateException ( bundle . wrongAnnotationType ( annotation ) ) ; } } catch ( NoSuchFieldException nsfe ) { clz = clz . getSuperclass ( ) ; } } } else if ( annotation . isOnMethod ( ) ) { Class clz = Class . forName ( annotation . getClassName ( ) , true , classLoader ) ; Class [ ] parameters = null ; if ( annotation . getParameterTypes ( ) != null ) { parameters = new Class [ annotation . getParameterTypes ( ) . size ( ) ] ; for ( int i = 0 ; i < annotation . getParameterTypes ( ) . size ( ) ; i ++ ) { String parameter = annotation . getParameterTypes ( ) . get ( i ) ; parameters [ i ] = Class . forName ( parameter , true , classLoader ) ; } } while ( ! Object . class . equals ( clz ) ) { try { Method method = SecurityActions . getDeclaredMethod ( clz , annotation . getMemberName ( ) , parameters ) ; if ( void . class . equals ( method . getReturnType ( ) ) ) { if ( parameters != null && parameters . length > 0 ) { if ( type == null || type . equals ( Object . class ) || type . equals ( parameters [ 0 ] ) ) { return parameters [ 0 ] . getName ( ) ; } else { throw new ValidateException ( bundle . wrongAnnotationType ( annotation ) ) ; } } } else { if ( type == null || type . equals ( Object . class ) || type . equals ( method . getReturnType ( ) ) ) { return method . getReturnType ( ) . getName ( ) ; } else { throw new ValidateException ( bundle . wrongAnnotationType ( annotation ) ) ; } } } catch ( NoSuchMethodException nsme ) { clz = clz . getSuperclass ( ) ; } } } throw new IllegalArgumentException ( bundle . unknownAnnotation ( annotation ) ) ; |
public class MonitorWebController { /** * Start monitoring now .
* @ return text describing the result . */
protected String startMonitoring ( ) { } } | String result ; if ( ! this . started . getAndSet ( true ) ) { synchronized ( this . brokerPollerMap ) { for ( ActiveMQBrokerPoller onePoller : this . brokerPollerMap . values ( ) ) { onePoller . start ( ) ; } } result = "started" ; } else { result = "already running" ; } return result ; |
public class ResourceUtils { /** * Return a stream of resources from a response
* @ param response the response
* @ param < R > the resource type
* @ param < U > the response type
* @ return a stream of resources from the response */
public static < R extends Resource < ? > , U extends PaginatedResponse < R > > Flux < R > getResources ( U response ) { } } | return Flux . fromIterable ( response . getResources ( ) ) ; |
public class BpmPlatformXmlParser { /** * create an configure the { @ link ProcessesXmlParse } object . */
public BpmPlatformXmlParse createParse ( ) { } } | BpmPlatformXmlParse parse = new BpmPlatformXmlParse ( this ) ; parse . setSchemaResource ( ReflectUtil . getResourceUrlAsString ( BPM_PLATFORM_XSD ) ) ; return parse ; |
public class HttpBuilder { /** * Executes an asynchronous DELETE request on the configured URI ( an asynchronous alias to the ` delete ( Closure ) ` method ) , with additional configuration
* provided by the configuration closure .
* [ source , groovy ]
* def http = HttpBuilder . configure {
* request . uri = ' http : / / localhost : 10101'
* def result = http . deleteAsync ( ) {
* request . uri . path = ' / something '
* The configuration ` closure ` allows additional configuration for this request based on the { @ link HttpConfig } interface .
* @ param closure the additional configuration closure ( delegated to { @ link HttpConfig } )
* @ return the { @ link CompletableFuture } containing the future result data */
public CompletableFuture < Object > deleteAsync ( @ DelegatesTo ( HttpConfig . class ) final Closure closure ) { } } | return CompletableFuture . supplyAsync ( ( ) -> delete ( closure ) , getExecutor ( ) ) ; |
public class AnimatedDrawable2 { /** * Start the animation . */
@ Override public void start ( ) { } } | if ( mIsRunning || mAnimationBackend == null || mAnimationBackend . getFrameCount ( ) <= 1 ) { return ; } mIsRunning = true ; mStartTimeMs = now ( ) ; mExpectedRenderTimeMs = mStartTimeMs ; mLastFrameAnimationTimeMs = - 1 ; mLastDrawnFrameNumber = - 1 ; invalidateSelf ( ) ; mAnimationListener . onAnimationStart ( this ) ; |
public class LinkState { /** * Method getRemoteMEUuid
* < p > Returns the Uuid of the ME on the remote end of this Link */
public SIBUuid8 getRemoteMEUuid ( SIBUuid12 linkUuid ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getRemoteMEUuid" , linkUuid ) ; LinkSelection s = null ; SIBUuid8 remoteMEUuid = null ; s = _localisationManager . getTRMFacade ( ) . chooseLink ( linkUuid ) ; // If the link is not available in WLM return null as we don ' t know where to send this
if ( s == null ) { remoteMEUuid = null ; } else { remoteMEUuid = s . getInboundMeUuid ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getRemoteMEUuid" , remoteMEUuid ) ; return remoteMEUuid ; |
public class LauncherConfig { /** * Import the launcher data from node .
* @ param node The node reference ( must not be < code > null < / code > ) .
* @ return The launcher data .
* @ throws LionEngineException If unable to read node . */
public static LauncherConfig imports ( Xml node ) { } } | Check . notNull ( node ) ; final Collection < Xml > children = node . getChildren ( LaunchableConfig . NODE_LAUNCHABLE ) ; final Collection < LaunchableConfig > launchables = new ArrayList < > ( children . size ( ) ) ; for ( final Xml launchable : children ) { launchables . add ( LaunchableConfig . imports ( launchable ) ) ; } final int level = node . readInteger ( 0 , ATT_RATE ) ; final int rate = node . readInteger ( ATT_RATE ) ; return new LauncherConfig ( level , rate , launchables ) ; |
public class ElasticSearch { /** * Format the query url template according to the parameters .
* The format will be / { index } / { type } / { id } ? { query } if query is not empty ( or null ) otherwise the format will be / { index } / { type } / { id }
* @ param index
* @ param type
* @ param id
* @ param query
* @ return */
private String formatQueryUrl ( String index , String type , String id , Object query ) { } } | String queryUrl = String . format ( fullQueryTemplate , index == null ? "_all" : index , type == null ? "_all" : type , id == null ? "" : id , toQueryParams ( query ) ) ; return queryUrl . endsWith ( "?" ) ? queryUrl . substring ( 0 , queryUrl . length ( ) - 1 ) : queryUrl ; |
public class ClassReflectionIndex { /** * Get a method declared on this object .
* @ param returnType the method return type
* @ param name the name of the method
* @ param paramTypes the parameter types of the method
* @ return the method , or { @ code null } if no method of that description exists */
public Method getMethod ( Class < ? > returnType , String name , Class < ? > ... paramTypes ) { } } | final Map < ParamList , Map < Class < ? > , Method > > nameMap = methods . get ( name ) ; if ( nameMap == null ) { return null ; } final Map < Class < ? > , Method > paramsMap = nameMap . get ( createParamList ( paramTypes ) ) ; if ( paramsMap == null ) { return null ; } return paramsMap . get ( returnType ) ; |
public class StorageComponent { /** * Returns child StorageComponent by given key .
* @ param key
* the key of children StorageComponent .
* @ return a StorageComponent object . */
public StorageComponent < KeyType , ValueType > getStorageComponent ( final KeyType key ) { } } | KeyToStorageComponent < KeyType , ValueType > storage = getkeyToStorage ( ) ; StorageComponent < KeyType , ValueType > storageComponent = storage . get ( key ) ; if ( storageComponent == null ) { storageComponent = new StorageComponent < KeyType , ValueType > ( ) ; storage . put ( key , storageComponent ) ; } return storageComponent ; |
public class CPOptionCategoryUtil { /** * Returns all the cp option categories where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ return the matching cp option categories */
public static List < CPOptionCategory > findByUuid_C ( String uuid , long companyId ) { } } | return getPersistence ( ) . findByUuid_C ( uuid , companyId ) ; |
public class MessageFieldUtil { /** * Returns a field value getter by index name for proto repeated enum field . */
public static String javaRepeatedEnumValueGetterByIndexName ( Field field ) { } } | if ( field . isRepeated ( ) ) { return GETTER_PREFIX + Formatter . toPascalCase ( field . getName ( ) ) + VALUE ; } throw new IllegalArgumentException ( field . toString ( ) ) ; |
public class Search { /** * Obtain the { @ link SearchManager } object for a cache . */
public static SearchManager getSearchManager ( Cache < ? , ? > cache ) { } } | if ( cache == null || cache . getAdvancedCache ( ) == null ) { throw new IllegalArgumentException ( "cache parameter shall not be null" ) ; } AdvancedCache < ? , ? > advancedCache = cache . getAdvancedCache ( ) ; ensureAccessPermissions ( advancedCache ) ; return new SearchManagerImpl ( advancedCache ) ; |
public class Type1Font { /** * Reads the font metrics
* @ param rf the AFM file
* @ throws DocumentException the AFM file is invalid
* @ throws IOException the AFM file could not be read */
public void process ( RandomAccessFileOrArray rf ) throws DocumentException , IOException { } } | String line ; boolean isMetrics = false ; while ( ( line = rf . readLine ( ) ) != null ) { StringTokenizer tok = new StringTokenizer ( line , " ,\n\r\t\f" ) ; if ( ! tok . hasMoreTokens ( ) ) continue ; String ident = tok . nextToken ( ) ; if ( ident . equals ( "FontName" ) ) FontName = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "FullName" ) ) FullName = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "FamilyName" ) ) FamilyName = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "Weight" ) ) Weight = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "ItalicAngle" ) ) ItalicAngle = Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "IsFixedPitch" ) ) IsFixedPitch = tok . nextToken ( ) . equals ( "true" ) ; else if ( ident . equals ( "CharacterSet" ) ) CharacterSet = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "FontBBox" ) ) { llx = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; lly = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; urx = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; ury = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; } else if ( ident . equals ( "UnderlinePosition" ) ) UnderlinePosition = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "UnderlineThickness" ) ) UnderlineThickness = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "EncodingScheme" ) ) EncodingScheme = tok . nextToken ( "\u00ff" ) . substring ( 1 ) ; else if ( ident . equals ( "CapHeight" ) ) CapHeight = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "XHeight" ) ) XHeight = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "Ascender" ) ) Ascender = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "Descender" ) ) Descender = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "StdHW" ) ) StdHW = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "StdVW" ) ) StdVW = ( int ) Float . parseFloat ( tok . nextToken ( ) ) ; else if ( ident . equals ( "StartCharMetrics" ) ) { isMetrics = true ; break ; } } if ( ! isMetrics ) throw new DocumentException ( "Missing StartCharMetrics in " + fileName ) ; while ( ( line = rf . readLine ( ) ) != null ) { StringTokenizer tok = new StringTokenizer ( line ) ; if ( ! tok . hasMoreTokens ( ) ) continue ; String ident = tok . nextToken ( ) ; if ( ident . equals ( "EndCharMetrics" ) ) { isMetrics = false ; break ; } Integer C = Integer . valueOf ( - 1 ) ; Integer WX = Integer . valueOf ( 250 ) ; String N = "" ; int B [ ] = null ; tok = new StringTokenizer ( line , ";" ) ; while ( tok . hasMoreTokens ( ) ) { StringTokenizer tokc = new StringTokenizer ( tok . nextToken ( ) ) ; if ( ! tokc . hasMoreTokens ( ) ) continue ; ident = tokc . nextToken ( ) ; if ( ident . equals ( "C" ) ) C = Integer . valueOf ( tokc . nextToken ( ) ) ; else if ( ident . equals ( "WX" ) ) WX = Integer . valueOf ( ( int ) Float . parseFloat ( tokc . nextToken ( ) ) ) ; else if ( ident . equals ( "N" ) ) N = tokc . nextToken ( ) ; else if ( ident . equals ( "B" ) ) { B = new int [ ] { Integer . parseInt ( tokc . nextToken ( ) ) , Integer . parseInt ( tokc . nextToken ( ) ) , Integer . parseInt ( tokc . nextToken ( ) ) , Integer . parseInt ( tokc . nextToken ( ) ) } ; } } Object metrics [ ] = new Object [ ] { C , WX , N , B } ; if ( C . intValue ( ) >= 0 ) CharMetrics . put ( C , metrics ) ; CharMetrics . put ( N , metrics ) ; } if ( isMetrics ) throw new DocumentException ( "Missing EndCharMetrics in " + fileName ) ; if ( ! CharMetrics . containsKey ( "nonbreakingspace" ) ) { Object [ ] space = ( Object [ ] ) CharMetrics . get ( "space" ) ; if ( space != null ) CharMetrics . put ( "nonbreakingspace" , space ) ; } while ( ( line = rf . readLine ( ) ) != null ) { StringTokenizer tok = new StringTokenizer ( line ) ; if ( ! tok . hasMoreTokens ( ) ) continue ; String ident = tok . nextToken ( ) ; if ( ident . equals ( "EndFontMetrics" ) ) return ; if ( ident . equals ( "StartKernPairs" ) ) { isMetrics = true ; break ; } } if ( ! isMetrics ) throw new DocumentException ( "Missing EndFontMetrics in " + fileName ) ; while ( ( line = rf . readLine ( ) ) != null ) { StringTokenizer tok = new StringTokenizer ( line ) ; if ( ! tok . hasMoreTokens ( ) ) continue ; String ident = tok . nextToken ( ) ; if ( ident . equals ( "KPX" ) ) { String first = tok . nextToken ( ) ; String second = tok . nextToken ( ) ; Integer width = Integer . valueOf ( ( int ) Float . parseFloat ( tok . nextToken ( ) ) ) ; Object relates [ ] = ( Object [ ] ) KernPairs . get ( first ) ; if ( relates == null ) KernPairs . put ( first , new Object [ ] { second , width } ) ; else { int n = relates . length ; Object relates2 [ ] = new Object [ n + 2 ] ; System . arraycopy ( relates , 0 , relates2 , 0 , n ) ; relates2 [ n ] = second ; relates2 [ n + 1 ] = width ; KernPairs . put ( first , relates2 ) ; } } else if ( ident . equals ( "EndKernPairs" ) ) { isMetrics = false ; break ; } } if ( isMetrics ) throw new DocumentException ( "Missing EndKernPairs in " + fileName ) ; rf . close ( ) ; |
public class ResultExporter { /** * footer group band ( gc ! = null & hasFunction = true ) */
private void printBand ( GroupCache gc , Band staticBand , boolean hasFunction , boolean usePrevious ) throws QueryException { } } | Band band ; List < FunctionCache > fCache = null ; isDetail = false ; boolean isPageHeaderFooter = false ; if ( gc == null ) { if ( staticBand != null ) { band = staticBand ; if ( ReportLayout . PAGE_HEADER_BAND_NAME . equals ( band . getName ( ) ) || ReportLayout . PAGE_FOOTER_BAND_NAME . equals ( band . getName ( ) ) ) { isPageHeaderFooter = true ; } } else { isDetail = true ; band = getReportLayout ( ) . getDetailBand ( ) ; } } else { fCache = gc . getFuncCache ( ) ; if ( hasFunction ) { band = gc . getFgBand ( ) ; } else { band = gc . getHgBand ( ) ; } } currentBandName = band . getName ( ) ; int rows = band . getRowCount ( ) ; int cols = band . getColumnCount ( ) ; Set < CellElement > ignored = getIgnoredCells ( band ) ; int lastRow = - 1 ; for ( int i = 0 ; i < rows ; i ++ ) { // hide when expression
// a hidden cell is considered to be rendered with null value ( but value is taken
// into account in functions )
// if all cells from a row are hidden , we consider the entire row hidden ( hideAll )
// and no cell from that row is rendered
boolean [ ] hide = new boolean [ cols ] ; boolean hideAll = false ; int count = 0 ; boolean rowWithHideExpression = false ; boolean hideExpressionForEmptyData = false ; for ( int j = 0 ; j < cols ; j ++ ) { BandElement bandElement = band . getElementAt ( i , j ) ; if ( ( bandElement != null ) && ( bandElement . getHideWhenExpression ( ) != null ) ) { rowWithHideExpression = true ; if ( hideExpressionForEmptyData ( bandElement . getHideWhenExpression ( ) ) ) { hideExpressionForEmptyData = true ; } } } if ( rowWithHideExpression ) { for ( int j = 0 ; j < cols ; j ++ ) { BandElement bandElement = band . getElementAt ( i , j ) ; if ( ( bandElement != null ) && ( bandElement . getHideWhenExpression ( ) != null ) ) { String expression = bandElement . getHideWhenExpression ( ) ; Boolean result = ( Boolean ) evaluateExpression ( "" , expression , currentBandName , null , gc ) ; hide [ j ] = result ; } else { // bandElement can be null in older version of reports ( previous to 4.1)
hide [ j ] = ( bandElement == null ) || "" . equals ( bandElement . getText ( ) ) ; } if ( hide [ j ] ) { count ++ ; } } if ( count == cols ) { hideAll = true ; } } if ( printRowsForEmptyData && ! hideExpressionForEmptyData ) { continue ; } for ( int j = 0 ; j < cols ; j ++ ) { if ( findIgnoredCellElement ( ignored , i , j ) ) { // System . out . println ( " * * * header ignored i = " + i + " j = " + j ) ;
continue ; } // newRow is computed relative to cells that are renedered through exportCell
// ignored cells are not taken into account
if ( i > lastRow ) { newRow = true ; } else { newRow = false ; } BandElement bandElement = band . getElementAt ( i , j ) ; if ( bandElement != null ) { newRowCount = Math . max ( newRowCount , bandElement . getRowSpan ( ) ) ; } if ( newRow ) { int gridRow = getReportLayout ( ) . getGridRow ( band . getName ( ) , i ) ; RowElement re = getRowElement ( getReportLayout ( ) , gridRow ) ; // if new page is put for the first row in the layout , we should not create a new page
if ( re . isStartOnNewPage ( ) && ! start ) { if ( ! startNewPage ) { // if header on every page , must not use header rows inside pageRow count
if ( getReportLayout ( ) . isHeaderOnEveryPage ( ) ) { if ( pageRow - headerRow > 0 ) { createNewPage ( ) ; } } else { createNewPage ( ) ; } } else { // we create new page excepting only for first group or in case there are no header rows
if ( ( getReportLayout ( ) . getHeaderBand ( ) . getRowCount ( ) == 0 ) || ! ( ReportLayout . GROUP_HEADER_BAND_NAME_PREFIX + "1" ) . equals ( band . getName ( ) ) ) { createNewPage ( ) ; } startNewPage = false ; } } } // subreports with parameters can be used only inside detail band
// we must update values for subreport parameters
// parameter name used in subreport must be the column alias from parent report !
// ! ! ! we may have a subreport with parameters inside another subreport , so we always try yo add parameters from subreports
if ( isDetail && ( ( bandElement instanceof ReportBandElement ) || ( bandElement instanceof ChartBandElement ) ) ) { Map < String , QueryParameter > params = bean . getParametersBean ( ) . getSubreportParams ( ) ; if ( params . size ( ) == 0 ) { // first time we have to look for subreports and add parameters of subreports that are not yet found in master report
List < Report > subreports = ReportUtil . getDetailSubreports ( bean . getReportLayout ( ) ) ; for ( Report subreport : subreports ) { bean . getParametersBean ( ) . addNotFoundSubreportParameters ( subreport . getParameters ( ) ) ; } // similar for charts
List < Chart > charts = ReportUtil . getDetailCharts ( bean . getReportLayout ( ) ) ; for ( Chart chart : charts ) { bean . getParametersBean ( ) . addNotFoundSubreportParameters ( chart . getReport ( ) . getParameters ( ) ) ; } } for ( QueryParameter qp : params . values ( ) ) { try { Object pValue = getResult ( ) . nextValue ( qp . getName ( ) ) ; bean . getParametersBean ( ) . setParameterValue ( qp . getName ( ) , pValue ) ; } catch ( QueryException ex ) { // if parameter is in third level ( report - > subreport - > subreport : param ) it won ' t be found in first level report
if ( ! bean . isSubreport ( ) ) { throw new QueryException ( "Invalid column for parameter: " + qp . getName ( ) , ex ) ; } } } } Object value = getBandElementValue ( fCache , gc , staticBand , hasFunction , usePrevious , bandElement ) ; // hide when expression
if ( ! hideAll && hide [ j ] ) { value = null ; } int rowSpan = 1 , colSpan = 1 ; if ( bandElement != null ) { rowSpan = bandElement . getRowSpan ( ) ; colSpan = bandElement . getColSpan ( ) ; } int gridRow = getReportLayout ( ) . getGridRow ( band . getName ( ) , i ) ; boolean isImage = bandElement instanceof ImageBandElement ; if ( ! hideAll ) { exportCell ( band . getName ( ) , bandElement , value , gridRow , i , j , cols , rowSpan , colSpan , isImage ) ; } lastRow = i ; // after exportCell where we may use newRow and newRowCount variables
// we need to reset newRowCount
if ( newRow ) { newRowCount = 1 ; } } // page header and page footer do not count for row computation
if ( ! isPageHeaderFooter ) { exporterRow ++ ; if ( ! hideAll ) { if ( ReportLayout . HEADER_BAND_NAME . equals ( band . getName ( ) ) ) { headerRow ++ ; } pageRow ++ ; } start = false ; } } |
public class JBBPBitInputStream { /** * Read number of long items from the input stream .
* @ param items number of items to be read from the input stream , if less than
* zero then all stream till the end will be read
* @ param byteOrder the order of bytes to be used to decode values
* @ return read items as a long array
* @ throws IOException it will be thrown for any transport problem during the
* operation
* @ see JBBPByteOrder # BIG _ ENDIAN
* @ see JBBPByteOrder # LITTLE _ ENDIAN */
public long [ ] readLongArray ( final int items , final JBBPByteOrder byteOrder ) throws IOException { } } | int pos = 0 ; if ( items < 0 ) { long [ ] buffer = new long [ INITIAL_ARRAY_BUFFER_SIZE ] ; // till end
while ( hasAvailableData ( ) ) { final long next = readLong ( byteOrder ) ; if ( buffer . length == pos ) { final long [ ] newbuffer = new long [ buffer . length << 1 ] ; System . arraycopy ( buffer , 0 , newbuffer , 0 , buffer . length ) ; buffer = newbuffer ; } buffer [ pos ++ ] = next ; } if ( buffer . length == pos ) { return buffer ; } final long [ ] result = new long [ pos ] ; System . arraycopy ( buffer , 0 , result , 0 , pos ) ; return result ; } else { // number
final long [ ] buffer = new long [ items ] ; for ( int i = 0 ; i < items ; i ++ ) { buffer [ i ] = readLong ( byteOrder ) ; } return buffer ; } |
public class QueryParserKraken { /** * Parses the show . */
private ShowQueryBuilder parseShow ( ShowQueryBuilder query ) { } } | Token token = scanToken ( ) ; if ( token == Token . TABLE ) { } else if ( token == Token . IDENTIFIER && _lexeme . equalsIgnoreCase ( "tableinfo" ) ) { query . method ( "tableinfo" ) ; } else { throw error ( "Expected TABLE at {0}" , token ) ; } token = scanToken ( ) ; if ( token != Token . IDENTIFIER ) { throw error ( "Expected IDENTIFIER at {0}" , token ) ; } String pod = _lexeme ; String name ; if ( peekToken ( ) == Token . DOT ) { scanToken ( ) ; if ( ( token = scanToken ( ) ) != Token . IDENTIFIER ) { throw error ( "Expected IDENTIFIER at {0}" , token ) ; } name = _lexeme ; } else { name = pod ; pod = getPodName ( ) ; } query . setTableName ( pod + '.' + name ) ; return query ; |
public class Radar { /** * < editor - fold defaultstate = " collapsed " desc = " Visualization " > */
@ Override protected void paintComponent ( Graphics g ) { } } | if ( ! isInitialized ( ) ) { return ; } final Graphics2D G2 = ( Graphics2D ) g . create ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_RENDERING , RenderingHints . VALUE_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; G2 . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; // Translate the coordinate system related to insets
G2 . translate ( getFramelessOffset ( ) . getX ( ) , getFramelessOffset ( ) . getY ( ) ) ; final AffineTransform OLD_TRANSFORM = G2 . getTransform ( ) ; // Draw combined background image
G2 . drawImage ( bImage , 0 , 0 , null ) ; // Draw blips
G2 . setColor ( BLIP_TEXT_COLOR ) ; G2 . setFont ( BLIP_FONT ) ; for ( Poi poi : blips . values ( ) ) { if ( poi . distanceTo ( MY_LOCATION ) < this . range ) { G2 . drawImage ( poi . getPoiImage ( ) , ( int ) ( CENTER . getX ( ) - poi . getPoiImage ( ) . getWidth ( ) / 2.0 + ( poi . getLocationXY ( ) . getX ( ) - CENTER_XY . getX ( ) ) / pixelScaleX ) , ( int ) ( CENTER . getY ( ) - poi . getPoiImage ( ) . getWidth ( ) / 2.0 + ( poi . getLocationXY ( ) . getY ( ) - CENTER_XY . getY ( ) ) / pixelScaleY ) , null ) ; G2 . drawString ( poi . getName ( ) , ( int ) ( CENTER . getX ( ) - poi . getPoiImage ( ) . getWidth ( ) + ( poi . getLocationXY ( ) . getX ( ) - CENTER_XY . getX ( ) ) / pixelScaleX ) , ( int ) ( CENTER . getY ( ) - poi . getPoiImage ( ) . getWidth ( ) + ( poi . getLocationXY ( ) . getY ( ) - CENTER_XY . getY ( ) ) / pixelScaleY ) ) ; } } // Draw the beam
G2 . rotate ( rotationAngle , CENTER . getX ( ) , CENTER . getY ( ) ) ; G2 . drawImage ( beamImage , 0 , 0 , null ) ; G2 . setTransform ( OLD_TRANSFORM ) ; G2 . rotate ( Math . toRadians ( - 135 ) + rotationAngle , CENTER . getX ( ) , CENTER . getY ( ) ) ; G2 . setColor ( BEAM_COLOR ) ; G2 . draw ( BEAM ) ; G2 . setTransform ( OLD_TRANSFORM ) ; if ( ! isEnabled ( ) ) { G2 . drawImage ( disabledImage , 0 , 0 , null ) ; } G2 . dispose ( ) ; |
public class Observable { /** * Returns an Observable that reverses the effect of { @ link # materialize materialize } by transforming the
* { @ link Notification } objects emitted by the source ObservableSource into the items or notifications they
* represent .
* < img width = " 640 " height = " 335 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / dematerialize . png " alt = " " >
* When the upstream signals an { @ link Notification # createOnError ( Throwable ) onError } or
* { @ link Notification # createOnComplete ( ) onComplete } item , the
* returned Observable disposes of the flow and terminates with that type of terminal event :
* < pre > < code >
* Observable . just ( createOnNext ( 1 ) , createOnComplete ( ) , createOnNext ( 2 ) )
* . doOnDispose ( ( ) - & gt ; System . out . println ( " Disposed ! " ) ) ;
* . test ( )
* . assertResult ( 1 ) ;
* < / code > < / pre >
* If the upstream signals { @ code onError } or { @ code onComplete } directly , the flow is terminated
* with the same event .
* < pre > < code >
* Observable . just ( createOnNext ( 1 ) , createOnNext ( 2 ) )
* . test ( )
* . assertResult ( 1 , 2 ) ;
* < / code > < / pre >
* If this behavior is not desired , the completion can be suppressed by applying { @ link # concatWith ( ObservableSource ) }
* with a { @ link # never ( ) } source .
* < dl >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > { @ code dematerialize } does not operate by default on a particular { @ link Scheduler } . < / dd >
* < / dl >
* @ param < T2 > the output value type
* @ return an Observable that emits the items and notifications embedded in the { @ link Notification } objects
* emitted by the source ObservableSource
* @ see < a href = " http : / / reactivex . io / documentation / operators / materialize - dematerialize . html " > ReactiveX operators documentation : Dematerialize < / a > */
@ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public final < T2 > Observable < T2 > dematerialize ( ) { } } | @ SuppressWarnings ( "unchecked" ) Observable < Notification < T2 > > m = ( Observable < Notification < T2 > > ) this ; return RxJavaPlugins . onAssembly ( new ObservableDematerialize < T2 > ( m ) ) ; |
public class OptionsRenderer { /** * Renders options report .
* @ param config rendering config
* @ return rendered report */
@ Override public String renderReport ( final OptionsConfig config ) { } } | final StringBuilder res = new StringBuilder ( ) ; render ( config , res ) ; return res . toString ( ) ; |
public class ExeHelpAction { /** * { @ inheritDoc } Prints the usage statement . The format is :
* < pre >
* Usage : { tasks | . . . } [ arguments ]
* < / pre > */
public String getScriptUsage ( ) { } } | StringBuffer scriptUsage = new StringBuffer ( NL ) ; scriptUsage . append ( getHelpPart ( "usage" , COMMAND ) ) ; scriptUsage . append ( " {" ) ; ExeAction [ ] tasks = ExeAction . values ( ) ; for ( int i = 0 ; i < tasks . length ; i ++ ) { ExeAction task = tasks [ i ] ; scriptUsage . append ( task . toString ( ) ) ; if ( i != ( tasks . length - 1 ) ) { scriptUsage . append ( "|" ) ; } } scriptUsage . append ( "} [" ) ; scriptUsage . append ( getHelpPart ( "global.options.lower" ) ) ; scriptUsage . append ( "]" ) ; scriptUsage . append ( NL ) ; return scriptUsage . toString ( ) ; |
public class SectionCursorAdapter { /** * This will map a position in the list adapter ( which includes mSections ) to a position in
* the cursor ( which does not contain mSections ) .
* @ param listPosition the position of the current item in the list with mSections included
* @ return the correct position to use with the cursor */
public int getCursorPositionWithoutSections ( int listPosition ) { } } | if ( mSections . size ( ) == 0 ) { return listPosition ; } else if ( ! isSection ( listPosition ) ) { int sectionIndex = getIndexWithinSections ( listPosition ) ; if ( isListPositionBeforeFirstSection ( listPosition , sectionIndex ) ) { return listPosition ; } else { return listPosition - ( sectionIndex + 1 ) ; } } else { return NO_CURSOR_POSITION ; } |
public class StructureSequenceMatcher { /** * Generates a ProteinSequence corresponding to the sequence of struct ,
* and maintains a mapping from the sequence back to the original groups .
* Chains are appended to one another . ' X ' is used for heteroatoms .
* @ param struct Input structure
* @ param groupIndexPosition An empty map , which will be populated with
* ( residue index in returned ProteinSequence ) - > ( Group within struct )
* @ return A ProteinSequence with the full sequence of struct . Chains are
* concatenated in the same order as the input structures
* @ see { @ link SeqRes2AtomAligner # getFullAtomSequence ( List , Map ) } , which
* does the heavy lifting . */
public static ProteinSequence getProteinSequenceForStructure ( Structure struct , Map < Integer , Group > groupIndexPosition ) { } } | if ( groupIndexPosition != null ) { groupIndexPosition . clear ( ) ; } StringBuilder seqStr = new StringBuilder ( ) ; for ( Chain chain : struct . getChains ( ) ) { List < Group > groups = chain . getAtomGroups ( ) ; Map < Integer , Integer > chainIndexPosition = new HashMap < Integer , Integer > ( ) ; int prevLen = seqStr . length ( ) ; // get the sequence for this chain
String chainSeq = SeqRes2AtomAligner . getFullAtomSequence ( groups , chainIndexPosition , false ) ; seqStr . append ( chainSeq ) ; // fix up the position to include previous chains , and map the value back to a Group
for ( Integer seqIndex : chainIndexPosition . keySet ( ) ) { Integer groupIndex = chainIndexPosition . get ( seqIndex ) ; groupIndexPosition . put ( prevLen + seqIndex , groups . get ( groupIndex ) ) ; } } ProteinSequence s = null ; try { s = new ProteinSequence ( seqStr . toString ( ) ) ; } catch ( CompoundNotFoundException e ) { // I believe this can ' t happen , please correct this if I ' m wrong - JD 2014-10-24
// we can log an error if it does , it would mean there ' s a bad bug somewhere
logger . error ( "Could not create protein sequence, unknown compounds in string: {}" , e . getMessage ( ) ) ; } return s ; |
public class PersistentExecutorMBeanImpl { /** * ( non - Javadoc )
* @ see com . ibm . websphere . concurrent . persistent . mbean . PersistentExecutorMBean # transfer ( java . lang . Long , long ) */
@ Override public int transfer ( Long maxTaskId , long oldPartitionId ) throws Exception { } } | try { return _pe . transfer ( maxTaskId , oldPartitionId ) ; } catch ( Exception e ) { throw buildAndLogException ( e ) ; } |
public class SimulatorJobTracker { /** * The cleanupJob method maintains the queue cleanQueue . When a job is finalized ,
* it is added to the cleanupQueue . Jobs are removed from the cleanupQueue
* so that its size is maintained to be less than that specified by
* JOBS _ IN _ MUMAK _ MEMORY .
* @ param job : The JobInProgress object that was just finalized and is
* going to be added to the cleanupQueue . */
private void cleanupJob ( JobInProgress job ) { } } | cleanupQueue . add ( job . getJobID ( ) ) ; while ( cleanupQueue . size ( ) > JOBS_IN_MUMAK_MEMORY ) { JobID removedJob = cleanupQueue . poll ( ) ; // retireJob ( removedJob , " " ) ;
} |
public class JavaLexer { /** * $ ANTLR start " T _ _ 56" */
public final void mT__56 ( ) throws RecognitionException { } } | try { int _type = T__56 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 37:7 : ( ' > ' )
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 37:9 : ' > '
{ match ( '>' ) ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class ClientNode { /** * Gets node index and its peer count
* @ return ClientsResult object with info */
public ClientsResult calculateAllNodesResult ( ) { } } | final List < String > childNodePaths = getChildren ( electionRootPath , false ) ; _logger . info ( "Total peers = {} " , childNodePaths . size ( ) ) ; Collections . sort ( childNodePaths ) ; int index = childNodePaths . indexOf ( clientNodePath . substring ( clientNodePath . lastIndexOf ( '/' ) + 1 ) ) ; return new ClientsResult ( index , childNodePaths . size ( ) ) ; |
public class DocumentStore { /** * Deletes the DocumentStore instance .
* @ throws DocumentStoreNotDeletedException if the DocumentStore doesn ' t exist on disk or
* if there was an error deleting the DocumentStore directory . */
public void delete ( ) throws DocumentStoreNotDeletedException { } } | try { this . close ( ) ; } catch ( Exception e ) { // caught exception could be something benign like DocumentStore already closed , so just log
logger . log ( Level . WARNING , "Caught exception whilst closing DocumentStore in delete()" , e ) ; } if ( ! location . exists ( ) ) { String msg = String . format ( "DocumentStore %s doesn't exist on disk" , location ) ; logger . warning ( msg ) ; throw new DocumentStoreNotDeletedException ( msg ) ; } else { try { FileUtils . deleteDirectory ( location ) ; } catch ( IOException ioe ) { String msg = String . format ( "DocumentStore %s not deleted" , location ) ; logger . log ( Level . WARNING , msg , ioe ) ; throw new DocumentStoreNotDeletedException ( msg , ioe ) ; } eventBus . post ( new DocumentStoreDeleted ( databaseName ) ) ; } |
public class ClassHelper { /** * Load a class .
* @ param name
* The class name .
* @ param < T >
* The expected class type .
* @ return The class . */
public static < T > Class < T > getType ( String name ) { } } | Class < T > type ; try { type = ( Class < T > ) Class . forName ( name ) ; } catch ( ClassNotFoundException e ) { throw new XOException ( "Cannot find class with name '" + name + "'" , e ) ; } return type ; |
public class ClassScanner { /** * Finds a class from its name */
public Class < ? > findClass ( String className ) throws ClassNotFoundException { } } | for ( String skip : SKIP_CLASSES ) { if ( skip . equals ( className ) ) { return null ; } } for ( ClassLoader classLoader : getClassLoaders ( ) ) { try { return classLoader . loadClass ( className ) ; } catch ( ClassNotFoundException e ) { // ignore
} } return Class . forName ( className ) ; |
public class EncryptionContext { /** * Fluent API to add encryption context . */
public EncryptionContext withContext ( String key , String value ) { } } | this . context . put ( key , value ) ; return this ; |
public class HttpRosetteAPI { /** * Returns a byte array from InputStream .
* @ param is InputStream
* @ return byte array
* @ throws IOException */
private static byte [ ] getBytes ( InputStream is ) throws IOException { } } | ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; byte [ ] buf = new byte [ 4096 ] ; while ( true ) { int r = is . read ( buf ) ; if ( r == - 1 ) { out . flush ( ) ; return out . toByteArray ( ) ; } out . write ( buf , 0 , r ) ; } |
public class ClientCertificateSigner { /** * The main method for this application . It expects the following arguments :
* < ol >
* < li > The name of the target environment ( e . g . Sandbox , PreProd , Production , etc . ) . < / li >
* < li > The name of the client . < / li >
* < li > The path to the directory that contains the private certificate authorities and passwords . < / li >
* < / ol >
* @ param args The arguments that were passed into this program . */
static public void main ( String [ ] args ) { } } | String environment = args [ 0 ] ; String clientCertificatePrefix = args [ 1 ] + "-" + environment ; String caKeyStorePrefix = args [ 2 ] + File . separator + environment + "-CA" ; try ( FileReader pwReader = new FileReader ( caKeyStorePrefix + ".pw" ) ; FileInputStream caInput = new FileInputStream ( caKeyStorePrefix + ".p12" ) ; PemReader csrReader = new PemReader ( new FileReader ( clientCertificatePrefix + ".csr" ) ) ; PemWriter pemWriter = new PemWriter ( new FileWriter ( clientCertificatePrefix + ".pem" ) ) ) { logger . info ( "Loading the private certificate authority keys..." ) ; int size = new Tag ( 16 ) . toString ( ) . length ( ) ; char [ ] caPassword = new char [ size ] ; pwReader . read ( caPassword ) ; RsaCertificateManager manager = new RsaCertificateManager ( ) ; KeyStore caKeyStore = manager . retrieveKeyStore ( caInput , caPassword ) ; PrivateKey caPrivateKey = manager . retrievePrivateKey ( caKeyStore , CA_ALIAS , caPassword ) ; X509Certificate caCertificate = manager . retrieveCertificate ( caKeyStore , CA_ALIAS ) ; logger . info ( "Reading in the certificate signing request..." ) ; byte [ ] requestBytes = csrReader . readPemObject ( ) . getContent ( ) ; PKCS10CertificationRequest csr = new PKCS10CertificationRequest ( requestBytes ) ; logger . info ( "Generating and signing a new client certificate..." ) ; long lifetime = 30L /* years */
* 365L /* days */
* 24L /* hours */
* 60L /* minutes */
* 60L /* seconds */
* 1000L /* milliseconds */
; BigInteger serialNumber = new BigInteger ( RandomUtils . generateRandomBytes ( 16 ) ) ; X509Certificate clientCertificate = manager . signCertificateRequest ( caPrivateKey , caCertificate , csr , serialNumber , lifetime ) ; clientCertificate . verify ( caCertificate . getPublicKey ( ) ) ; logger . info ( "Writing out the certificates to a file..." ) ; pemWriter . writeObject ( new PemObject ( "CERTIFICATE" , clientCertificate . getEncoded ( ) ) ) ; pemWriter . writeObject ( new PemObject ( "CERTIFICATE" , caCertificate . getEncoded ( ) ) ) ; } catch ( CertificateException | NoSuchAlgorithmException | InvalidKeyException | NoSuchProviderException | SignatureException | IOException e ) { logger . error ( "An error occurred while attempting to generate the client certificate:" , e ) ; System . exit ( 1 ) ; } System . exit ( 0 ) ; |
public class CryptoUtil { /** * 生成AES密钥 , 可选长度为128,192,256位 . */
public static byte [ ] generateAesKey ( int keysize ) { } } | try { KeyGenerator keyGenerator = KeyGenerator . getInstance ( AES_ALG ) ; keyGenerator . init ( keysize ) ; SecretKey secretKey = keyGenerator . generateKey ( ) ; return secretKey . getEncoded ( ) ; } catch ( GeneralSecurityException e ) { throw ExceptionUtil . unchecked ( e ) ; } |
public class DiscCache { /** * 设置缓存文件夹的名字
* @ param dirName Dir Name */
public void setCacheDir ( String dirName , int mode ) { } } | if ( sDebug ) { LogUtils . v ( TAG , "setCacheDir() dirName=" + dirName + " mode=" + mode ) ; } if ( dirName == null ) { mCacheDirName = DIR_NAME_DEFAULT ; } else { mCacheDirName = dirName ; } mMode = mode ; checkCacheDir ( true ) ; |
import java . util . Iterator ; import java . util . HashMap ; import java . util . Map ; public class FilterDictionary { /** * This function filters a dictionary based on its values . It excludes entries that have values below the specified threshold .
* Examples :
* filter _ dictionary ( { ' Cierra Vega ' : 175 , ' Alden Cantrell ' : 180 , ' Kierra Gentry ' : 165 , ' Pierre Cox ' : 190 } , 170)
* - > { ' Cierra Vega ' : 175 , ' Alden Cantrell ' : 180 , ' Pierre Cox ' : 190}
* filter _ dictionary ( { ' Cierra Vega ' : 175 , ' Alden Cantrell ' : 180 , ' Kierra Gentry ' : 165 , ' Pierre Cox ' : 190 } , 180)
* - > { ' Alden Cantrell ' : 180 , ' Pierre Cox ' : 190}
* filter _ dictionary ( { ' Cierra Vega ' : 175 , ' Alden Cantrell ' : 180 , ' Kierra Gentry ' : 165 , ' Pierre Cox ' : 190 } , 190)
* - > { ' Pierre Cox ' : 190}
* @ param input _ dict : The dictionary to be filtered .
* @ param threshold : The lowest acceptable value .
* @ return : Returns a dictionary that only includes entries whose values are exceeding the specified threshold . */
public static HashMap < Object , Integer > filterDictionary ( HashMap < Object , Integer > input_dict , int threshold ) { } } | HashMap < Object , Integer > filtered_dict = new HashMap < Object , Integer > ( ) ; Iterator < Map . Entry < Object , Integer > > itr = input_dict . entrySet ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { Map . Entry < Object , Integer > entry = itr . next ( ) ; if ( entry . getValue ( ) >= threshold ) { filtered_dict . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return filtered_dict ; |
public class CPRuleAssetCategoryRelLocalServiceBaseImpl { /** * Deletes the cp rule asset category rel with the primary key from the database . Also notifies the appropriate model listeners .
* @ param CPRuleAssetCategoryRelId the primary key of the cp rule asset category rel
* @ return the cp rule asset category rel that was removed
* @ throws PortalException if a cp rule asset category rel with the primary key could not be found */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CPRuleAssetCategoryRel deleteCPRuleAssetCategoryRel ( long CPRuleAssetCategoryRelId ) throws PortalException { } } | return cpRuleAssetCategoryRelPersistence . remove ( CPRuleAssetCategoryRelId ) ; |
public class PortletPlaceholderEventSource { /** * Implement to generate CharacterEvents based on a { @ link StartElement } match . If not
* implemented throws UnsupportedOperationException */
protected void generateCharacterEvents ( IPortletWindowId portletWindowId , StartElement event , Collection < CharacterEvent > eventBuffer ) throws XMLStreamException { } } | throw new UnsupportedOperationException ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.