signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ProfileManager { /** * Start managing user profile data . Call this method in onResume method of your profile Activity .
* @ param subscriber Subscriber for callbacks relevant for UI updates . */
public void resume ( @ NonNull ProfileManagerSubscriber subscriber ) { } } | profileModifier . resume ( subscriber ) ; profileChecker . resume ( subscriber ) ; |
public class FormatNumber { /** * Create the internal Formatter instance and perform the formatting .
* @ throws JspException if a JSP exception has occurred */
public void doTag ( ) throws JspException { } } | JspTag parentTag = SimpleTagSupport . findAncestorWithClass ( this , IFormattable . class ) ; // if there are errors we need to either add these to the parent AbstractBastTag or report an error .
if ( hasErrors ( ) ) { if ( parentTag instanceof IFormattable ) { IFormattable parent = ( IFormattable ) parentTag ; parent . formatterHasError ( ) ; } reportErrors ( ) ; return ; } // if there are no errors then add this to the parent as a formatter .
if ( parentTag instanceof IFormattable ) { NumberFormatter formatter = new NumberFormatter ( ) ; formatter . setPattern ( _pattern ) ; formatter . setType ( _type ) ; formatter . setLocale ( getLocale ( ) ) ; IFormattable parent = ( IFormattable ) parentTag ; parent . addFormatter ( formatter ) ; } else { String s = Bundle . getString ( "Tags_FormattableParentRequired" ) ; registerTagError ( s , null ) ; reportErrors ( ) ; } |
public class RRDToolWriter { /** * Generate a RRA line for rrdtool */
private String getRraStr ( ArcDef def ) { } } | return "RRA:" + def . getConsolFun ( ) + ":" + def . getXff ( ) + ":" + def . getSteps ( ) + ":" + def . getRows ( ) ; |
public class NormalizationTransliterator { /** * Implements { @ link Transliterator # handleTransliterate } . */
@ Override protected void handleTransliterate ( Replaceable text , Position offsets , boolean isIncremental ) { } } | // start and limit of the input range
int start = offsets . start ; int limit = offsets . limit ; if ( start >= limit ) { return ; } /* * Normalize as short chunks at a time as possible even in
* bulk mode , so that styled text is minimally disrupted .
* In incremental mode , a chunk that ends with offsets . limit
* must not be normalized .
* If it was known that the input text is not styled , then
* a bulk mode normalization could be used .
* ( For details , see the comment in the C + + version . ) */
StringBuilder segment = new StringBuilder ( ) ; StringBuilder normalized = new StringBuilder ( ) ; int c = text . char32At ( start ) ; do { int prev = start ; // Skip at least one character so we make progress .
// c holds the character at start .
segment . setLength ( 0 ) ; do { segment . appendCodePoint ( c ) ; start += Character . charCount ( c ) ; } while ( start < limit && ! norm2 . hasBoundaryBefore ( c = text . char32At ( start ) ) ) ; if ( start == limit && isIncremental && ! norm2 . hasBoundaryAfter ( c ) ) { // stop in incremental mode when we reach the input limit
// in case there are additional characters that could change the
// normalization result
start = prev ; break ; } norm2 . normalize ( segment , normalized ) ; if ( ! Normalizer2Impl . UTF16Plus . equal ( segment , normalized ) ) { // replace the input chunk with its normalized form
text . replace ( prev , start , normalized . toString ( ) ) ; // update all necessary indexes accordingly
int delta = normalized . length ( ) - ( start - prev ) ; start += delta ; limit += delta ; } } while ( start < limit ) ; offsets . start = start ; offsets . contextLimit += limit - offsets . limit ; offsets . limit = limit ; |
public class AbstractMetric { /** * / * [ deutsch ]
* < p > Vergleicht Zeiteinheiten absteigend nach ihrer L & auml ; nge . < / p >
* @ param u1 first time unit
* @ param u2 second time unit
* @ return negative , zero or positive if u1 is greater , equal to
* or smaller than u2 */
@ Override public int compare ( U u1 , U u2 ) { } } | return Double . compare ( u2 . getLength ( ) , u1 . getLength ( ) ) ; // descending |
public class DataWriter { /** * To be called after all threads are done calling { @ link # writePoint ( jsat . classifiers . DataPoint , double ) } . */
public synchronized void finish ( ) throws IOException { } } | synchronized ( out ) { for ( ByteArrayOutputStream baos : all_buffers ) { baos . writeTo ( out ) ; baos . reset ( ) ; } out . flush ( ) ; } |
public class CombineWriter { /** * { @ inheritDoc } */
@ Override public void write ( Record record ) throws IOException , InterruptedException { } } | if ( record . isKeyEmpty ( ) ) { record . setKey ( defaultRecord . getKey ( ) ) ; } else { if ( record . getKey ( ) . isGroupingEmpty ( ) && ! record . getKey ( ) . isSortEmpty ( ) ) { record . getKey ( ) . setGrouping ( defaultRecord . getKey ( ) . getGrouping ( ) ) ; } } if ( record . isValueEmpty ( ) ) { record . setValue ( defaultRecord . getValue ( ) ) ; } flushCount ++ ; if ( flushCount > cache ) { flush ( ) ; flushCount = 0 ; } List < Value > l = buffer . get ( record . getKey ( ) ) ; if ( l == null ) { l = new ArrayList < Value > ( ) ; } l . add ( record . getValue ( ) ) ; buffer . put ( record . getKey ( ) , l ) ; |
public class BinaryKey { /** * Determine if the supplied hexadecimal string is potentially a binary key by checking the format of the string .
* @ param hexadecimalStr the hexadecimal string ; may be null
* @ return true if the supplied string is a properly formatted hexadecimal representation of a binary key , or false otherwise */
public static boolean isProperlyFormattedKey ( String hexadecimalStr ) { } } | if ( hexadecimalStr == null ) return false ; // Length is expected to be the same as the digest . . .
final int length = hexadecimalStr . length ( ) ; if ( length != ALGORITHM . getHexadecimalStringLength ( ) ) return false ; // The characters all must be hexadecimal digits . . .
return StringUtil . isHexString ( hexadecimalStr ) ; |
public class RequestHandler { /** * Checks , for a sub - set of { @ link CouchbaseRequest } , if the current environment has
* the necessary feature activated . If not , throws an { @ link ServiceNotAvailableException } .
* @ param request the request to check .
* @ throws ServiceNotAvailableException if the request type needs a particular feature which isn ' t activated . */
protected void checkFeaturesForRequest ( final CouchbaseRequest request , final BucketConfig config ) { } } | if ( request instanceof BinaryRequest && ! config . serviceEnabled ( ServiceType . BINARY ) ) { throw new ServiceNotAvailableException ( "The KeyValue service is not enabled or no node in the cluster " + "supports it." ) ; } else if ( request instanceof ViewRequest && ! config . serviceEnabled ( ServiceType . VIEW ) ) { throw new ServiceNotAvailableException ( "The View service is not enabled or no node in the cluster " + "supports it." ) ; } else if ( request instanceof QueryRequest && ! config . serviceEnabled ( ServiceType . QUERY ) ) { throw new ServiceNotAvailableException ( "The Query service is not enabled or no node in the " + "cluster supports it." ) ; } else if ( request instanceof SearchRequest && ! config . serviceEnabled ( ServiceType . SEARCH ) ) { throw new ServiceNotAvailableException ( "The Search service is not enabled or no node in the " + "cluster supports it." ) ; } else if ( request instanceof AnalyticsRequest && ! config . serviceEnabled ( ServiceType . ANALYTICS ) ) { throw new ServiceNotAvailableException ( "The Analytics service is not enabled or no node in the " + "cluster supports it." ) ; } |
public class MapFileTileSetIDBroker { /** * Copies the ID from the old tileset to the new tileset which is
* useful when a tileset is renamed . This is called by the { @ link
* RenameTileSet } utility . */
protected boolean renameTileSet ( String oldName , String newName ) { } } | Integer tsid = _map . get ( oldName ) ; if ( tsid != null ) { _map . put ( newName , tsid ) ; // fudge our stored tileset ID so that we flush ourselves when
// the rename tool requests that we commit the changes
_storedTileSetID -- ; return true ; } else { return false ; } |
public class RouteFiltersInner { /** * Creates or updates a route filter in a specified resource group .
* @ param resourceGroupName The name of the resource group .
* @ param routeFilterName The name of the route filter .
* @ param routeFilterParameters Parameters supplied to the create or update route filter operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the RouteFilterInner object if successful . */
public RouteFilterInner beginCreateOrUpdate ( String resourceGroupName , String routeFilterName , RouteFilterInner routeFilterParameters ) { } } | return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , routeFilterName , routeFilterParameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class InjectorShell { /** * The Logger is a special case because it knows the injection point of the injected member . It ' s
* the only binding that does this . */
private static void bindLogger ( InjectorImpl injector ) { } } | Key < Logger > key = Key . get ( Logger . class ) ; LoggerFactory loggerFactory = new LoggerFactory ( ) ; injector . state . putBinding ( key , new ProviderInstanceBindingImpl < Logger > ( injector , key , SourceProvider . UNKNOWN_SOURCE , loggerFactory , Scoping . UNSCOPED , loggerFactory , ImmutableSet . < InjectionPoint > of ( ) ) ) ; try { Key < org . slf4j . Logger > slf4jKey = Key . get ( org . slf4j . Logger . class ) ; SLF4JLoggerFactory slf4jLoggerFactory = new SLF4JLoggerFactory ( injector ) ; injector . state . putBinding ( slf4jKey , new ProviderInstanceBindingImpl < org . slf4j . Logger > ( injector , slf4jKey , SourceProvider . UNKNOWN_SOURCE , slf4jLoggerFactory , Scoping . UNSCOPED , slf4jLoggerFactory , ImmutableSet . < InjectionPoint > of ( ) ) ) ; } catch ( Throwable e ) { } |
public class TlvUtil { /** * Method used get Tag value as String
* @ param tag
* tag type
* @ param value
* tag value
* @ return */
private static String getTagValueAsString ( final ITag tag , final byte [ ] value ) { } } | StringBuilder buf = new StringBuilder ( ) ; switch ( tag . getTagValueType ( ) ) { case TEXT : buf . append ( "=" ) ; buf . append ( new String ( value ) ) ; break ; case NUMERIC : buf . append ( "NUMERIC" ) ; break ; case BINARY : buf . append ( "BINARY" ) ; break ; case MIXED : buf . append ( "=" ) ; buf . append ( getSafePrintChars ( value ) ) ; break ; case DOL : buf . append ( "" ) ; break ; default : break ; } return buf . toString ( ) ; |
public class ICUService { /** * < p > Return a snapshot of the visible IDs for this service . This
* set will not change as Factories are added or removed , but the
* supported ids will , so there is no guarantee that all and only
* the ids in the returned set are visible and supported by the
* service in subsequent calls . < / p >
* < p > matchID is passed to createKey to create a key . If the
* key is not null , it is used to filter out ids that don ' t have
* the key as a fallback . */
public Set < String > getVisibleIDs ( String matchID ) { } } | Set < String > result = getVisibleIDMap ( ) . keySet ( ) ; Key fallbackKey = createKey ( matchID ) ; if ( fallbackKey != null ) { Set < String > temp = new HashSet < String > ( result . size ( ) ) ; for ( String id : result ) { if ( fallbackKey . isFallbackOf ( id ) ) { temp . add ( id ) ; } } result = temp ; } return result ; |
public class CPDefinitionInventoryLocalServiceBaseImpl { /** * Returns the cp definition inventory matching the UUID and group .
* @ param uuid the cp definition inventory ' s UUID
* @ param groupId the primary key of the group
* @ return the matching cp definition inventory
* @ throws PortalException if a matching cp definition inventory could not be found */
@ Override public CPDefinitionInventory getCPDefinitionInventoryByUuidAndGroupId ( String uuid , long groupId ) throws PortalException { } } | return cpDefinitionInventoryPersistence . findByUUID_G ( uuid , groupId ) ; |
public class LinedData { /** * ( non - Javadoc )
* @ see br . com . digilabs . jqplot . data . ChartData # toJsonString ( ) */
public String toJsonString ( ) { } } | JSONArray jsonArray = new JSONArray ( ) ; jsonArray . put ( data ) ; return jsonArray . toString ( ) ; |
public class ApitraryDaoSupport { /** * resolveApitraryEntity .
* @ param entity
* a T object .
* @ param < T >
* a T object .
* @ return a { @ link java . lang . String } object . */
protected < T > String resolveApitraryEntity ( T entity ) { } } | String entityName = ClassUtil . getClassAnnotationValue ( entity . getClass ( ) , Entity . class , "value" , String . class ) ; if ( entityName == null || entityName . isEmpty ( ) ) { entityName = StringUtil . toVerb ( entity . getClass ( ) . getSimpleName ( ) ) ; } return entityName ; |
public class AutoScalingGroup { /** * One or more load balancers associated with the group .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLoadBalancerNames ( java . util . Collection ) } or { @ link # withLoadBalancerNames ( java . util . Collection ) } if
* you want to override the existing values .
* @ param loadBalancerNames
* One or more load balancers associated with the group .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AutoScalingGroup withLoadBalancerNames ( String ... loadBalancerNames ) { } } | if ( this . loadBalancerNames == null ) { setLoadBalancerNames ( new com . amazonaws . internal . SdkInternalList < String > ( loadBalancerNames . length ) ) ; } for ( String ele : loadBalancerNames ) { this . loadBalancerNames . add ( ele ) ; } return this ; |
public class DomUtils { /** * Serializes a DOM . The OutputStream handed over to this method is not closed inside this method . */
static void serialize ( final Document doc , final OutputStream os , final String encoding ) throws TransformerFactoryConfigurationError , TransformerException , IOException { } } | if ( doc == null ) throw new IllegalArgumentException ( "No document provided." ) ; if ( os == null ) throw new IllegalArgumentException ( "No output stream provided" ) ; if ( encoding == null || encoding . isEmpty ( ) ) throw new IllegalArgumentException ( "No encoding provided." ) ; final TransformerFactory transformerFactory = TransformerFactory . newInstance ( ) ; transformerFactory . setAttribute ( "indent-number" , Integer . valueOf ( 2 ) ) ; final Transformer t = transformerFactory . newTransformer ( ) ; t . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , "no" ) ; t . setOutputProperty ( OutputKeys . METHOD , "xml" ) ; t . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; t . setOutputProperty ( OutputKeys . ENCODING , encoding ) ; final OutputStreamWriter osw = new OutputStreamWriter ( os , encoding ) ; t . transform ( new DOMSource ( doc ) , new StreamResult ( osw ) ) ; osw . flush ( ) ; |
public class ImageScaling { /** * Same as { @ link # squareScaling ( BufferedImage ) } but takes the imageFileName instead of a
* { @ code BufferedImage } .
* @ param imageFilename
* @ return
* @ throws IOException */
public BufferedImage squareScaling ( String imageFilename ) throws IOException { } } | // read the image
BufferedImage image ; try { // first try reading with the default class
image = ImageIO . read ( new File ( imageFilename ) ) ; } catch ( IllegalArgumentException e ) { // if it fails retry with the corrected class
// This exception is probably because of a grayscale jpeg image .
System . out . println ( "Exception: " + e . getMessage ( ) + " | Image: " + imageFilename ) ; image = ImageIOGreyScale . read ( new File ( imageFilename ) ) ; } return squareScaling ( image ) ; |
public class HighwayHash { /** * Computes the hash value after all bytes were processed . Invalidates the
* state .
* NOTE : The 256 - bit HighwayHash algorithm is not yet frozen and subject to change .
* @ return array of size 4 containing 256 - bit hash */
public long [ ] finalize256 ( ) { } } | permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; permuteAndUpdate ( ) ; done = true ; long [ ] hash = new long [ 4 ] ; modularReduction ( v1 [ 1 ] + mul1 [ 1 ] , v1 [ 0 ] + mul1 [ 0 ] , v0 [ 1 ] + mul0 [ 1 ] , v0 [ 0 ] + mul0 [ 0 ] , hash , 0 ) ; modularReduction ( v1 [ 3 ] + mul1 [ 3 ] , v1 [ 2 ] + mul1 [ 2 ] , v0 [ 3 ] + mul0 [ 3 ] , v0 [ 2 ] + mul0 [ 2 ] , hash , 2 ) ; return hash ; |
public class BNFHeadersImpl { /** * Method to overlay the new header value onto the older value in the parse
* buffers .
* @ param elem */
private void overlayValue ( HeaderElement elem ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Overlaying existing header: " + elem . getName ( ) ) ; } int next_index = this . lastCRLFBufferIndex ; int next_pos = this . lastCRLFPosition ; if ( null != elem . nextSequence && ! elem . nextSequence . wasAdded ( ) ) { next_index = elem . nextSequence . getLastCRLFBufferIndex ( ) ; next_pos = elem . nextSequence . getLastCRLFPosition ( ) ; } WsByteBuffer buffer = this . parseBuffers [ elem . getLastCRLFBufferIndex ( ) ] ; buffer . position ( elem . getLastCRLFPosition ( ) + ( elem . isLastCRLFaCR ( ) ? 2 : 1 ) ) ; if ( next_index == elem . getLastCRLFBufferIndex ( ) ) { // all in one buffer
buffer . put ( elem . getKey ( ) . getMarshalledByteArray ( foundCompactHeader ( ) ) ) ; buffer . put ( elem . asRawBytes ( ) , elem . getOffset ( ) , elem . getValueLength ( ) ) ; } else { // header straddles buffers
int index = elem . getLastCRLFBufferIndex ( ) ; index = overlayBytes ( elem . getKey ( ) . getMarshalledByteArray ( foundCompactHeader ( ) ) , 0 , - 1 , index ) ; index = overlayBytes ( elem . asRawBytes ( ) , elem . getOffset ( ) , elem . getValueLength ( ) , index ) ; buffer = this . parseBuffers [ index ] ; } // pad trailing whitespace if we need it
int start = buffer . position ( ) ; if ( start < next_pos ) { scribbleWhiteSpace ( buffer , start , next_pos ) ; } |
public class QualitygatesService { /** * This is part of the internal API .
* This is a POST request .
* @ see < a href = " https : / / next . sonarqube . com / sonarqube / web _ api / api / qualitygates / update _ condition " > Further information about this action online ( including a response example ) < / a >
* @ since 4.3 */
public void updateCondition ( UpdateConditionRequest request ) { } } | call ( new PostRequest ( path ( "update_condition" ) ) . setParam ( "error" , request . getError ( ) ) . setParam ( "id" , request . getId ( ) ) . setParam ( "metric" , request . getMetric ( ) ) . setParam ( "op" , request . getOp ( ) ) . setParam ( "organization" , request . getOrganization ( ) ) , UpdateConditionResponse . parser ( ) ) ; |
public class hqlParser { /** * hql . g : 649:1 : collectionExpr : ( ELEMENTS ^ | INDICES ^ ) OPEN ! path CLOSE ! ; */
public final hqlParser . collectionExpr_return collectionExpr ( ) throws RecognitionException { } } | hqlParser . collectionExpr_return retval = new hqlParser . collectionExpr_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token ELEMENTS274 = null ; Token INDICES275 = null ; Token OPEN276 = null ; Token CLOSE278 = null ; ParserRuleReturnScope path277 = null ; CommonTree ELEMENTS274_tree = null ; CommonTree INDICES275_tree = null ; CommonTree OPEN276_tree = null ; CommonTree CLOSE278_tree = null ; try { // hql . g : 650:2 : ( ( ELEMENTS ^ | INDICES ^ ) OPEN ! path CLOSE ! )
// hql . g : 650:4 : ( ELEMENTS ^ | INDICES ^ ) OPEN ! path CLOSE !
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; // hql . g : 650:4 : ( ELEMENTS ^ | INDICES ^ )
int alt99 = 2 ; int LA99_0 = input . LA ( 1 ) ; if ( ( LA99_0 == ELEMENTS ) ) { alt99 = 1 ; } else if ( ( LA99_0 == INDICES ) ) { alt99 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 99 , 0 , input ) ; throw nvae ; } switch ( alt99 ) { case 1 : // hql . g : 650:5 : ELEMENTS ^
{ ELEMENTS274 = ( Token ) match ( input , ELEMENTS , FOLLOW_ELEMENTS_in_collectionExpr3253 ) ; ELEMENTS274_tree = ( CommonTree ) adaptor . create ( ELEMENTS274 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( ELEMENTS274_tree , root_0 ) ; } break ; case 2 : // hql . g : 650:17 : INDICES ^
{ INDICES275 = ( Token ) match ( input , INDICES , FOLLOW_INDICES_in_collectionExpr3258 ) ; INDICES275_tree = ( CommonTree ) adaptor . create ( INDICES275 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( INDICES275_tree , root_0 ) ; } break ; } OPEN276 = ( Token ) match ( input , OPEN , FOLLOW_OPEN_in_collectionExpr3262 ) ; pushFollow ( FOLLOW_path_in_collectionExpr3265 ) ; path277 = path ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , path277 . getTree ( ) ) ; CLOSE278 = ( Token ) match ( input , CLOSE , FOLLOW_CLOSE_in_collectionExpr3267 ) ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving
} return retval ; |
public class JsonUtil { /** * Creates the resource parsed from JSON .
* Uses the ' jcr : primaryType ' property from the propertySet to determine
* the right primary type for the new node .
* Uses the ' jcr : mixinTypes ' property to set up the mixin types of the node
* before the other properties are set ( it ' s important that all main type settings
* are done before the properties are set to avoid constraint violations ) .
* @ param resolver
* @ param path
* @ param propertiesSet
* @ param factory
* @ param mapping
* @ return
* @ throws RepositoryException */
public static Resource createResource ( ResourceResolver resolver , String path , Map < String , JsonProperty > propertiesSet , ValueFactory factory , MappingRules mapping ) throws RepositoryException { } } | // determine the new nodes primary type from the properties
JsonProperty primaryType = propertiesSet . get ( PropertyUtil . PROP_PRIMARY_TYPE ) ; Resource resource = ResourceUtil . getOrCreateResource ( resolver , path , primaryType != null ? ( String ) primaryType . value : null ) ; if ( resource != null ) { JsonProperty mixinTypes = propertiesSet . get ( PropertyUtil . PROP_MIXIN_TYPES ) ; if ( mixinTypes != null ) { // import mixin types property first ( ! )
if ( ! importJsonProperty ( factory , resource , mixinTypes , mapping ) ) { propertiesSet . remove ( PropertyUtil . PROP_MIXIN_TYPES ) ; } } for ( Map . Entry < String , JsonProperty > entry : propertiesSet . entrySet ( ) ) { JsonProperty property = entry . getValue ( ) ; // import all the other properties - not the primary and mixin types
if ( ! PropertyUtil . PROP_PRIMARY_TYPE . equals ( property . name ) && ! PropertyUtil . PROP_MIXIN_TYPES . equals ( property . name ) ) { if ( ! importJsonProperty ( factory , resource , property , mapping ) ) { entry . setValue ( null ) ; } } } } return resource ; |
public class PropertyUtils { /** * Similar to { @ link PropertyUtils # getPropertyClass ( Class , String ) } but returns the property class for the tail of the given
* property path .
* @ param clazz bean to be accessed
* @ param propertyPath bean ' s fieldName
* @ return property class */
public static Class < ? > getPropertyClass ( Class < ? > clazz , List < String > propertyPath ) { } } | Class < ? > current = clazz ; for ( String propertyName : propertyPath ) { current = getPropertyClass ( current , propertyName ) ; } return current ; |
public class Closeables { /** * Creates a closeable iterator from the given iterator that does nothing when close is called .
* @ param iterator The iterator to wrap to allow it to become a closeable iterator
* @ param < E > The type of the iterators
* @ return An iterator that does nothing when closed */
public static < E > CloseableIterator < E > iterator ( Iterator < ? extends E > iterator ) { } } | if ( iterator instanceof CloseableIterator ) { return ( CloseableIterator < E > ) iterator ; } return new IteratorAsCloseableIterator < > ( iterator ) ; |
public class ExtensionsInner { /** * Creates an HDInsight cluster extension .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ param extensionName The name of the cluster extension .
* @ param parameters The cluster extensions create request .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > createAsync ( String resourceGroupName , String clusterName , String extensionName , ExtensionInner parameters ) { } } | return createWithServiceResponseAsync ( resourceGroupName , clusterName , extensionName , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class DataFlow { /** * Run the { @ code transfer } dataflow analysis over the method , lambda or initializer which is the
* leaf of the { @ code path } .
* < p > For caching , we make the following assumptions : - if two paths to methods are { @ code equal } ,
* their control flow graph is the same . - if two transfer functions are { @ code equal } , and are
* run over the same control flow graph , the analysis result is the same . - for all contexts , the
* analysis result is the same . */
private < A extends AbstractValue < A > , S extends Store < S > , T extends TransferFunction < A , S > > Result < A , S , T > dataflow ( TreePath path , Context context , T transfer ) { } } | final ProcessingEnvironment env = JavacProcessingEnvironment . instance ( context ) ; final ControlFlowGraph cfg = cfgCache . getUnchecked ( CfgParams . create ( path , env ) ) ; final AnalysisParams aparams = AnalysisParams . create ( transfer , cfg , env ) ; @ SuppressWarnings ( "unchecked" ) final Analysis < A , S , T > analysis = ( Analysis < A , S , T > ) analysisCache . getUnchecked ( aparams ) ; return new Result < A , S , T > ( ) { @ Override public Analysis < A , S , T > getAnalysis ( ) { return analysis ; } @ Override public ControlFlowGraph getControlFlowGraph ( ) { return cfg ; } } ; |
public class BatchTinCanAPIProvider { /** * Send a batch of LRS statements . MUST BE SCHEDULED ! Failure to properly configure this class
* will result in memory leaks . */
public void sendBatch ( ) { } } | LrsStatement statement = null ; List < LrsStatement > list = new ArrayList < LrsStatement > ( ) ; while ( ( statement = statementQueue . poll ( ) ) != null ) { list . add ( statement ) ; } if ( ! list . isEmpty ( ) ) { postStatementList ( list ) ; } |
public class SVGParser { /** * Parse a list of Length / Coords */
private static List < Length > parseLengthList ( String val ) throws SVGParseException { } } | if ( val . length ( ) == 0 ) throw new SVGParseException ( "Invalid length list (empty string)" ) ; List < Length > coords = new ArrayList < > ( 1 ) ; TextScanner scan = new TextScanner ( val ) ; scan . skipWhitespace ( ) ; while ( ! scan . empty ( ) ) { float scalar = scan . nextFloat ( ) ; if ( Float . isNaN ( scalar ) ) throw new SVGParseException ( "Invalid length list value: " + scan . ahead ( ) ) ; Unit unit = scan . nextUnit ( ) ; if ( unit == null ) unit = Unit . px ; coords . add ( new Length ( scalar , unit ) ) ; scan . skipCommaWhitespace ( ) ; } return coords ; |
public class ArtifactInfoMap { /** * Add a { @ link ArtifactInfo } to the Map
* @ param bundleInfo */
public void add ( ArtifactInfo < BundleInfoContext > bundleInfo ) { } } | List < ArtifactInfo < BundleInfoContext > > list = artifacts . get ( bundleInfo . getId ( ) ) ; if ( list == null ) { list = new ArrayList < > ( ) ; artifacts . put ( bundleInfo . getId ( ) , list ) ; } list . add ( bundleInfo ) ; Collections . sort ( list , Collections . reverseOrder ( ) ) ; |
public class AbsAAFLur { /** * This is where you build AAF CLient Code . Answer the question " Is principal " bait " in the " pond " */
public boolean fish ( String bait , Permission pond ) { } } | if ( isDebug ( bait ) ) { boolean rv = false ; StringBuilder sb = new StringBuilder ( "Log for " ) ; sb . append ( bait ) ; if ( supports ( bait ) ) { User < PERM > user = getUser ( bait ) ; if ( user == null ) { sb . append ( "\n\tUser is not in Cache" ) ; } else { if ( user . noPerms ( ) ) sb . append ( "\n\tUser has no Perms" ) ; if ( user . permExpired ( ) ) { sb . append ( "\n\tUser's perm expired [" ) ; sb . append ( new Date ( user . permExpires ( ) ) ) ; sb . append ( ']' ) ; } else { sb . append ( "\n\tUser's perm expires [" ) ; sb . append ( new Date ( user . permExpires ( ) ) ) ; sb . append ( ']' ) ; } } if ( user == null || ( user . noPerms ( ) && user . permExpired ( ) ) ) { user = loadUser ( bait ) ; sb . append ( "\n\tloadUser called" ) ; } if ( user == null ) { sb . append ( "\n\tUser was not Loaded" ) ; } else if ( user . contains ( pond ) ) { sb . append ( "\n\tUser contains " ) ; sb . append ( pond . getKey ( ) ) ; rv = true ; } else { sb . append ( "\n\tUser does not contain " ) ; sb . append ( pond . getKey ( ) ) ; List < Permission > perms = new ArrayList < Permission > ( ) ; user . copyPermsTo ( perms ) ; for ( Permission p : perms ) { sb . append ( "\n\t\t" ) ; sb . append ( p . getKey ( ) ) ; } } } else { sb . append ( "AAF Lur does not support [" ) ; sb . append ( bait ) ; sb . append ( "]" ) ; } aaf . access . log ( Level . INFO , sb ) ; return rv ; } else { if ( supports ( bait ) ) { User < PERM > user = getUser ( bait ) ; if ( user == null || ( user . noPerms ( ) && user . permExpired ( ) ) ) { user = loadUser ( bait ) ; } return user == null ? false : user . contains ( pond ) ; } return false ; } |
public class GrpcServerBuilder { /** * Add a service to this server .
* @ param serviceDefinition the service definition of new service
* @ return an updated instance of this { @ link GrpcServerBuilder } */
public GrpcServerBuilder addService ( GrpcService serviceDefinition ) { } } | ServerServiceDefinition service = serviceDefinition . getServiceDefinition ( ) ; if ( SecurityUtils . isAuthenticationEnabled ( mConfiguration ) && serviceDefinition . isAuthenticated ( ) ) { // Intercept the service with authenticated user injector .
service = ServerInterceptors . intercept ( service , new AuthenticatedUserInjector ( mAuthenticationServer ) ) ; } mNettyServerBuilder = mNettyServerBuilder . addService ( service ) ; return this ; |
public class Fat { /** * Allocate a series of clusters for a new file .
* @ param nrClusters when number of clusters to allocate
* @ return long
* @ throws IOException if there are no free clusters */
public long [ ] allocNew ( int nrClusters ) throws IOException { } } | final long rc [ ] = new long [ nrClusters ] ; rc [ 0 ] = allocNew ( ) ; for ( int i = 1 ; i < nrClusters ; i ++ ) { rc [ i ] = allocAppend ( rc [ i - 1 ] ) ; } return rc ; |
public class Ix { /** * Calls the given action just before when the consumer calls next ( ) of this Ix . iterator ( ) .
* The result ' s iterator ( ) forwards calls to remove ( ) to this Iterator .
* @ param action the action to call for each item
* @ return the new Ix instance
* @ throws NullPointerException if action is null
* @ since 1.0 */
public final Ix < T > doOnNext ( IxConsumer < ? super T > action ) { } } | return new IxDoOn < T > ( this , nullCheck ( action , "action is null" ) , IxEmptyAction . instance0 ( ) ) ; |
public class CleverTapAPI { /** * Validation */
private ValidationResult popValidationResult ( ) { } } | // really a shift
ValidationResult vr = null ; synchronized ( pendingValidationResultsLock ) { try { if ( ! pendingValidationResults . isEmpty ( ) ) { vr = pendingValidationResults . remove ( 0 ) ; } } catch ( Exception e ) { // no - op
} } return vr ; |
public class ClassNodeUtils { /** * Determine if an explicit ( non - generated ) constructor is in the class .
* @ param xform if non null , add an error if an explicit constructor is found
* @ param cNode the type of the containing class
* @ return true if an explicit ( non - generated ) constructor was found */
public static boolean hasExplicitConstructor ( AbstractASTTransformation xform , ClassNode cNode ) { } } | List < ConstructorNode > declaredConstructors = cNode . getDeclaredConstructors ( ) ; for ( ConstructorNode constructorNode : declaredConstructors ) { // allow constructors added by other transforms if flagged as Generated
if ( hasAnnotation ( constructorNode , GENERATED_TYPE ) ) { continue ; } if ( xform != null ) { xform . addError ( "Error during " + xform . getAnnotationName ( ) + " processing. Explicit constructors not allowed for class: " + cNode . getNameWithoutPackage ( ) , constructorNode ) ; } return true ; } return false ; |
public class None { /** * = = = Functional Methods = = = */
@ SuppressWarnings ( "unchecked" ) @ Override public < O > Option < O > map ( Function < ? super T , ? extends O > mapper ) { } } | return ( Option < O > ) INSTANCE ; |
public class WindowsRegistry { /** * Delete given key from registry .
* @ param hk the HKEY
* @ param key the key to be deleted
* @ throws RegistryException when something is not right */
public void deleteKey ( HKey hk , String key ) throws RegistryException { } } | int rc = - 1 ; try { rc = ReflectedMethods . deleteKey ( hk . root ( ) , hk . hex ( ) , key ) ; } catch ( Exception e ) { throw new RegistryException ( "Cannot delete key " + key , e ) ; } if ( rc != RC . SUCCESS ) { throw new RegistryException ( "Cannot delete key " + key + ". Return code is " + rc ) ; } |
public class IsANumberValidator { /** * ( non - Javadoc )
* @ see
* com . fs . commons . desktop . validation . Validator # validate ( com . fs . commons . desktop .
* validation . Problems , java . lang . String , java . lang . Object ) */
@ Override public boolean validate ( final Problems problems , final String compName , final String model ) { } } | final ParsePosition p = new ParsePosition ( 0 ) ; try { NumberFormat . getNumberInstance ( this . locale == null ? Locale . getDefault ( ) : this . locale ) . parse ( model , p ) ; if ( model . length ( ) != p . getIndex ( ) || p . getErrorIndex ( ) != - 1 ) { problems . add ( ValidationBundle . getMessage ( IsANumberValidator . class , "NOT_A_NUMBER" , model , compName ) ) ; // NOI18N
return false ; } } catch ( final NumberFormatException nfe ) { problems . add ( ValidationBundle . getMessage ( IsANumberValidator . class , "NOT_A_NUMBER" , model , compName ) ) ; // NOI18N
return false ; } return true ; |
public class XmlUtils { /** * 转MAP
* @ param element
* @ return
* @ author */
public static Map < String , String > toMap ( Element element ) { } } | Map < String , String > rest = new HashMap < String , String > ( ) ; List < Element > els = element . elements ( ) ; for ( Element el : els ) { rest . put ( el . getName ( ) . toLowerCase ( ) , el . getTextTrim ( ) ) ; } return rest ; |
public class sslservice_sslciphersuite_binding { /** * Use this API to fetch sslservice _ sslciphersuite _ binding resources of given name . */
public static sslservice_sslciphersuite_binding [ ] get ( nitro_service service , String servicename ) throws Exception { } } | sslservice_sslciphersuite_binding obj = new sslservice_sslciphersuite_binding ( ) ; obj . set_servicename ( servicename ) ; sslservice_sslciphersuite_binding response [ ] = ( sslservice_sslciphersuite_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class ConfigUtils { /** * Return string value at < code > path < / code > if < code > config < / code > has path . If not return an empty string
* @ param config in which the path may be present
* @ param path key to look for in the config object
* @ return string value at < code > path < / code > if < code > config < / code > has path . If not return an empty string */
public static String emptyIfNotPresent ( Config config , String path ) { } } | return getString ( config , path , StringUtils . EMPTY ) ; |
public class GenerateConfigurationVisitor { /** * Creates the pipeline , visits all streamable nodes and the creates the ClientBootstrap with the pipeline and
* remote address , */
@ Override public Configuration visit ( AstConnectNode connectNode , State state ) { } } | // masking is a no - op by default for each stream
state . readUnmasker = Masker . IDENTITY_MASKER ; state . writeMasker = Masker . IDENTITY_MASKER ; state . pipelineAsMap = new LinkedHashMap < > ( ) ; for ( AstStreamableNode streamable : connectNode . getStreamables ( ) ) { streamable . accept ( this , state ) ; } /* Add the completion handler */
String handlerName = String . format ( "completion#%d" , state . pipelineAsMap . size ( ) + 1 ) ; CompletionHandler completionHandler = new CompletionHandler ( ) ; completionHandler . setRegionInfo ( connectNode . getRegionInfo ( ) ) ; state . pipelineAsMap . put ( handlerName , completionHandler ) ; String awaitName = connectNode . getAwaitName ( ) ; Barrier awaitBarrier = null ; if ( awaitName != null ) { awaitBarrier = state . lookupBarrier ( awaitName ) ; } final ChannelPipeline pipeline = pipelineFromMap ( state . pipelineAsMap ) ; /* * TODO . This is weird . I will only have one pipeline per connect . But if I don ' t set a factory When a connect
* occurs it will create a shallow copy of the pipeline I set . This doesn ' t work due to the beforeAdd methods in
* ExecutionHandler . Namely when the pipeline is cloned it uses the same handler objects so the handler future
* is not null and we fail with an assertion error . */
ChannelPipelineFactory pipelineFactory = new ChannelPipelineFactory ( ) { private int numCalled ; @ Override public ChannelPipeline getPipeline ( ) { if ( numCalled ++ != 0 ) { throw new RobotException ( "getPipeline called more than once" ) ; } return pipeline ; } } ; // Now that connect supports barrier and expression value , connect uri may not be available at this point .
// To defer the evaluation of connect uri and initialization of ClientBootstrap , LocationResolver and
// ClientResolver are created with information necessary to create ClientBootstrap when the connect uri
// is available .
Supplier < URI > locationResolver = connectNode . getLocation ( ) :: getValue ; OptionsResolver optionsResolver = new OptionsResolver ( connectNode . getOptions ( ) ) ; ClientBootstrapResolver clientResolver = new ClientBootstrapResolver ( bootstrapFactory , addressFactory , pipelineFactory , locationResolver , optionsResolver , awaitBarrier , connectNode . getRegionInfo ( ) ) ; // retain pipelines for tear down
state . configuration . getClientAndServerPipelines ( ) . add ( pipeline ) ; state . configuration . getClientResolvers ( ) . add ( clientResolver ) ; return state . configuration ; |
public class ResourceUtil { /** * retrieves all children of a sling : resourceType */
public static List < Resource > getChildrenByResourceType ( final Resource resource , String resourceType ) { } } | final ArrayList < Resource > children = new ArrayList < > ( ) ; if ( resource != null ) { for ( final Resource child : resource . getChildren ( ) ) { if ( child . isResourceType ( resourceType ) ) { children . add ( child ) ; } } } return children ; |
public class Iterate { /** * Filters a collection into a PartitionIterable based on a predicate .
* Example using a Java 8 lambda expression :
* < pre >
* PartitionIterable & lt ; Person & gt ; newYorkersAndNonNewYorkers =
* Iterate . < b > partition < / b > ( people , person - > person . getAddress ( ) . getState ( ) . getName ( ) . equals ( " New York " ) ) ;
* < / pre >
* Example using an anonymous inner class :
* < pre >
* PartitionIterable & lt ; Person & gt ; newYorkersAndNonNewYorkers =
* Iterate . < b > partition < / b > ( people , new Predicate & lt ; Person & gt ; ( )
* public boolean value ( Person person )
* return person . getAddress ( ) . getState ( ) . getName ( ) . equals ( " New York " ) ;
* < / pre > */
public static < T > PartitionIterable < T > partition ( Iterable < T > iterable , Predicate < ? super T > predicate ) { } } | if ( iterable instanceof RichIterable < ? > ) { return ( ( RichIterable < T > ) iterable ) . partition ( predicate ) ; } if ( iterable instanceof ArrayList ) { return ArrayListIterate . partition ( ( ArrayList < T > ) iterable , predicate ) ; } if ( iterable instanceof List ) { return ListIterate . partition ( ( List < T > ) iterable , predicate ) ; } if ( iterable != null ) { return IterableIterate . partition ( iterable , predicate ) ; } throw new IllegalArgumentException ( "Cannot perform a partition on null" ) ; |
public class Request { /** * Creates a new Request configured to post a GraphObject to a particular graph path , to either create or update the
* object at that path .
* @ param session
* the Session to use , or null ; if non - null , the session must be in an opened state
* @ param graphPath
* the graph path to retrieve , create , or delete
* @ param graphObject
* the GraphObject to create or update
* @ param callback
* a callback that will be called when the request is completed to handle success or error conditions
* @ return a Request that is ready to execute */
public static Request newPostRequest ( Session session , String graphPath , GraphObject graphObject , Callback callback ) { } } | Request request = new Request ( session , graphPath , null , HttpMethod . POST , callback ) ; request . setGraphObject ( graphObject ) ; return request ; |
public class ExampleUtils { /** * Returns a Pubsub client builder using the specified { @ link PubsubOptions } . */
private static Pubsub . Builder newPubsubClient ( PubsubOptions options ) { } } | return new Pubsub . Builder ( Transport . getTransport ( ) , Transport . getJsonFactory ( ) , chainHttpRequestInitializer ( options . getGcpCredential ( ) , // Do not log 404 . It clutters the output and is possibly even required by the caller .
new RetryHttpRequestInitializer ( ImmutableList . of ( 404 ) ) ) ) . setRootUrl ( options . getPubsubRootUrl ( ) ) . setApplicationName ( options . getAppName ( ) ) . setGoogleClientRequestInitializer ( options . getGoogleApiTrace ( ) ) ; |
public class Discovery { /** * Percentage of queries with an associated event .
* The percentage of queries using the * * natural _ language _ query * * parameter that have a corresponding \ " click \ " event
* over a specified time window . This metric requires having integrated event tracking in your application using the
* * * Events * * API .
* @ param getMetricsEventRateOptions the { @ link GetMetricsEventRateOptions } containing the options for the call
* @ return a { @ link ServiceCall } with a response type of { @ link MetricResponse } */
public ServiceCall < MetricResponse > getMetricsEventRate ( GetMetricsEventRateOptions getMetricsEventRateOptions ) { } } | String [ ] pathSegments = { "v1/metrics/event_rate" } ; RequestBuilder builder = RequestBuilder . get ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "discovery" , "v1" , "getMetricsEventRate" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; if ( getMetricsEventRateOptions != null ) { if ( getMetricsEventRateOptions . startTime ( ) != null ) { builder . query ( "start_time" , String . valueOf ( getMetricsEventRateOptions . startTime ( ) ) ) ; } if ( getMetricsEventRateOptions . endTime ( ) != null ) { builder . query ( "end_time" , String . valueOf ( getMetricsEventRateOptions . endTime ( ) ) ) ; } if ( getMetricsEventRateOptions . resultType ( ) != null ) { builder . query ( "result_type" , getMetricsEventRateOptions . resultType ( ) ) ; } } return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( MetricResponse . class ) ) ; |
public class HierarchicalLowestChildDenyOverridesPolicyAlg { /** * Applies the combining rule to the set of policies based on the evaluation
* context .
* @ param context
* the context from the request
* @ param parameters
* a ( possibly empty ) non - null < code > List < / code > of
* < code > CombinerParameter < / code > s
* @ param policyElements
* the policies to combine
* @ return the result of running the combining algorithm */
@ Override @ SuppressWarnings ( "unchecked" ) public Result combine ( EvaluationCtx context , @ SuppressWarnings ( "rawtypes" ) List parameters , @ SuppressWarnings ( "rawtypes" ) List policyElements ) { } } | logger . info ( "Combining using: " + getIdentifier ( ) ) ; boolean atLeastOneError = false ; boolean atLeastOnePermit = false ; Set < Set < ? > > denyObligations = new HashSet < Set < ? > > ( ) ; Status firstIndeterminateStatus = null ; Set < AbstractPolicy > matchedPolicies = new HashSet < AbstractPolicy > ( ) ; Iterator < ? > it = policyElements . iterator ( ) ; while ( it . hasNext ( ) ) { AbstractPolicy policy = ( ( PolicyCombinerElement ) it . next ( ) ) . getPolicy ( ) ; // make sure that the policy matches the context
MatchResult match = policy . match ( context ) ; if ( match . getResult ( ) == MatchResult . INDETERMINATE ) { atLeastOneError = true ; // keep track of the first error , regardless of cause
if ( firstIndeterminateStatus == null ) { firstIndeterminateStatus = match . getStatus ( ) ; } } else if ( match . getResult ( ) == MatchResult . MATCH ) { matchedPolicies . add ( policy ) ; } } Set < AbstractPolicy > applicablePolicies = getApplicablePolicies ( context , matchedPolicies ) ; for ( AbstractPolicy policy : applicablePolicies ) { Result result = policy . evaluate ( context ) ; int effect = result . getDecision ( ) ; if ( effect == Result . DECISION_DENY ) { denyObligations . addAll ( result . getObligations ( ) ) ; return new Result ( Result . DECISION_DENY , context . getResourceId ( ) . encode ( ) , denyObligations ) ; } if ( effect == Result . DECISION_PERMIT ) { atLeastOnePermit = true ; } else if ( effect == Result . DECISION_INDETERMINATE ) { atLeastOneError = true ; // keep track of the first error , regardless of cause
if ( firstIndeterminateStatus == null ) { firstIndeterminateStatus = result . getStatus ( ) ; } } } // if we got a PERMIT , return it
if ( atLeastOnePermit ) { return new Result ( Result . DECISION_PERMIT , context . getResourceId ( ) . encode ( ) ) ; } // if we got an INDETERMINATE , return it
if ( atLeastOneError ) { return new Result ( Result . DECISION_INDETERMINATE , firstIndeterminateStatus , context . getResourceId ( ) . encode ( ) ) ; } // if we got here , then nothing applied to us
return new Result ( Result . DECISION_NOT_APPLICABLE , context . getResourceId ( ) . encode ( ) ) ; |
public class VoltCompiler { /** * Internal method for compiling with and without a project . xml file or DDL files .
* @ param projectReader Reader for project file or null if a project file is not used .
* @ param jarOutputPath The location to put the finished JAR to .
* @ param ddlFilePaths The list of DDL files to compile ( when no project is provided ) .
* @ param jarOutputRet The in - memory jar to populate or null if the caller doesn ' t provide one .
* @ return true if successful */
private boolean compileInternalToFile ( final String jarOutputPath , final VoltCompilerReader cannonicalDDLIfAny , final Catalog previousCatalogIfAny , final List < VoltCompilerReader > ddlReaderList , final InMemoryJarfile jarOutputRet ) { } } | if ( jarOutputPath == null ) { addErr ( "The output jar path is null." ) ; return false ; } InMemoryJarfile jarOutput = compileInternal ( cannonicalDDLIfAny , previousCatalogIfAny , ddlReaderList , jarOutputRet ) ; if ( jarOutput == null ) { return false ; } try { jarOutput . writeToFile ( new File ( jarOutputPath ) ) . run ( ) ; } catch ( final Exception e ) { e . printStackTrace ( ) ; addErr ( "Error writing catalog jar to disk: " + e . getMessage ( ) ) ; return false ; } return true ; |
public class FileIndexBuilder { /** * Appends next record to this index builder and returns this .
* @ param recordSize size of record measured in bytes
* @ return this */
public FileIndexBuilder appendNextRecord ( long recordSize ) { } } | checkIfDestroyed ( ) ; if ( recordSize < 0 ) throw new IllegalArgumentException ( "Size cannot be negative." ) ; if ( currentRecord == step ) { index . add ( startingByte + currentByte ) ; currentRecord = 0 ; } currentByte += recordSize ; ++ currentRecord ; return this ; |
public class DBFUtils { /** * Checks that a byte array contains some specific byte
* @ param array The array to search in
* @ param value The byte to search for
* @ return true if the array contains spcified value */
public static boolean contains ( byte [ ] array , byte value ) { } } | if ( array != null ) { for ( byte data : array ) { if ( data == value ) { return true ; } } } return false ; |
public class MetricContext { /** * See { @ link com . codahale . metrics . MetricRegistry # getMeters ( com . codahale . metrics . MetricFilter ) } .
* This method will return fully - qualified metric names if the { @ link MetricContext } is configured
* to report fully - qualified metric names . */
@ Override public SortedMap < String , Meter > getMeters ( MetricFilter filter ) { } } | return this . innerMetricContext . getMeters ( filter ) ; |
public class CmsLogFileOptionProvider { /** * Gets the log file options . < p >
* @ return the log file options */
public static TreeSet < File > getLogFiles ( ) { } } | TreeSet < File > result = new TreeSet < > ( ) ; for ( File file : new File ( CmsLogFileApp . LOG_FOLDER ) . listFiles ( ) ) { result . add ( file ) ; } for ( String dir : getAdditionalLogDirectories ( ) ) { File file = new File ( dir ) ; if ( file . exists ( ) ) { if ( file . isDirectory ( ) ) { for ( File child : file . listFiles ( ) ) { if ( child . canRead ( ) ) { result . add ( child ) ; } else { LOG . error ( "Can not read " + child . getAbsolutePath ( ) ) ; } } } } } return result ; |
public class ApplicationContextHolder { /** * 实现 ApplicationContextAware 接口的回调方法 , 设置上下文环境
* @ param applicationContext ApplicationContext */
@ Override public void setApplicationContext ( ApplicationContext applicationContext ) { } } | if ( context == null ) { synchronized ( ApplicationContextHolder . class ) { if ( context == null ) { ApplicationContextHolder . context = applicationContext ; } } } |
public class SarlCompiler { /** * Rename when the super function access is fixed ( https : / / github . com / eclipse / xtext - extras / pull / 411) */
private void convertNullSafeWrapperToPrimitive ( LightweightTypeReference wrapper , LightweightTypeReference primitive , XExpression context , ITreeAppendable appendable , Later expression ) { } } | // BEGIN Specific
final String defaultValue = primitive . isType ( boolean . class ) ? "false" : "0" ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
// END Specific
final XExpression normalized = normalizeBlockExpression ( context ) ; if ( normalized instanceof XAbstractFeatureCall && ! ( context . eContainer ( ) instanceof XAbstractFeatureCall ) ) { // Avoid javac bug
// https : / / bugs . eclipse . org / bugs / show _ bug . cgi ? id = 410797
// TODO make that dependent on the compiler version ( javac 1.7 fixed that bug )
final XAbstractFeatureCall featureCall = ( XAbstractFeatureCall ) normalized ; if ( featureCall . isStatic ( ) ) { final JvmIdentifiableElement feature = featureCall . getFeature ( ) ; if ( feature instanceof JvmOperation ) { if ( ! ( ( JvmOperation ) feature ) . getTypeParameters ( ) . isEmpty ( ) ) { // BEGIN Specific
appendable . append ( "((" ) ; // $ NON - NLS - 1 $
expression . exec ( appendable ) ; appendable . append ( ") == null ? " ) ; // $ NON - NLS - 1 $
appendable . append ( defaultValue ) ; appendable . append ( " : " ) ; // $ NON - NLS - 1 $
// END Specific
appendable . append ( "(" ) ; // $ NON - NLS - 1 $
appendable . append ( primitive ) ; appendable . append ( ") " ) ; // $ NON - NLS - 1 $
expression . exec ( appendable ) ; // BEGIN Specific
appendable . append ( ") " ) ; // $ NON - NLS - 1 $
// END Specific
return ; } } } } // BEGIN Specific
appendable . append ( "((" ) ; // $ NON - NLS - 1 $
expression . exec ( appendable ) ; appendable . append ( ") == null ? " ) ; // $ NON - NLS - 1 $
appendable . append ( defaultValue ) ; appendable . append ( " : " ) ; // $ NON - NLS - 1 $
// END Specific
final boolean mustInsertTypeCast ; try { mustInsertTypeCast = ( Boolean ) this . reflect . invoke ( this , "mustInsertTypeCast" , context , wrapper ) ; // $ NON - NLS - 1 $
} catch ( Exception exception ) { throw new Error ( exception ) ; } if ( mustInsertTypeCast ) { appendable . append ( "(" ) ; // $ NON - NLS - 1 $
appendable . append ( wrapper ) ; appendable . append ( ") " ) ; // $ NON - NLS - 1 $
} // BEGIN Specific
appendable . append ( "(" ) ; // $ NON - NLS - 1 $
expression . exec ( appendable ) ; appendable . append ( ")" ) ; // $ NON - NLS - 1 $
// END Specific
appendable . append ( "." ) ; // $ NON - NLS - 1 $
appendable . append ( primitive ) ; appendable . append ( "Value())" ) ; // $ NON - NLS - 1 $ |
public class DatabaseFullPrunedBlockStore { /** * Create a new store for the given { @ link NetworkParameters } .
* @ param params The network .
* @ throws BlockStoreException If the store couldn ' t be created . */
private void createNewStore ( NetworkParameters params ) throws BlockStoreException { } } | try { // Set up the genesis block . When we start out fresh , it is by
// definition the top of the chain .
StoredBlock storedGenesisHeader = new StoredBlock ( params . getGenesisBlock ( ) . cloneAsHeader ( ) , params . getGenesisBlock ( ) . getWork ( ) , 0 ) ; // The coinbase in the genesis block is not spendable . This is because of how Bitcoin Core inits
// its database - the genesis transaction isn ' t actually in the db so its spent flags can never be updated .
List < Transaction > genesisTransactions = Lists . newLinkedList ( ) ; StoredUndoableBlock storedGenesis = new StoredUndoableBlock ( params . getGenesisBlock ( ) . getHash ( ) , genesisTransactions ) ; put ( storedGenesisHeader , storedGenesis ) ; setChainHead ( storedGenesisHeader ) ; setVerifiedChainHead ( storedGenesisHeader ) ; } catch ( VerificationException e ) { throw new RuntimeException ( e ) ; // Cannot happen .
} |
public class HiCS { /** * Identifies high contrast subspaces in a given full - dimensional database .
* @ param relation the relation the HiCS should be evaluated for
* @ param subspaceIndex Subspace indexes
* @ return a set of high contrast subspaces */
private Set < HiCSSubspace > calculateSubspaces ( Relation < ? extends NumberVector > relation , ArrayList < ArrayDBIDs > subspaceIndex , Random random ) { } } | final int dbdim = RelationUtil . dimensionality ( relation ) ; FiniteProgress dprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Subspace dimensionality" , dbdim , LOG ) : null ; if ( dprog != null ) { dprog . setProcessed ( 2 , LOG ) ; } TreeSet < HiCSSubspace > subspaceList = new TreeSet < > ( HiCSSubspace . SORT_BY_SUBSPACE ) ; TopBoundedHeap < HiCSSubspace > dDimensionalList = new TopBoundedHeap < > ( cutoff , HiCSSubspace . SORT_BY_CONTRAST_ASC ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Generating two-element subsets" , ( dbdim * ( dbdim - 1 ) ) >> 1 , LOG ) : null ; // compute two - element sets of subspaces
for ( int i = 0 ; i < dbdim ; i ++ ) { for ( int j = i + 1 ; j < dbdim ; j ++ ) { HiCSSubspace ts = new HiCSSubspace ( ) ; ts . set ( i ) ; ts . set ( j ) ; calculateContrast ( relation , ts , subspaceIndex , random ) ; dDimensionalList . add ( ts ) ; LOG . incrementProcessed ( prog ) ; } } LOG . ensureCompleted ( prog ) ; IndefiniteProgress qprog = LOG . isVerbose ( ) ? new IndefiniteProgress ( "Testing subspace candidates" , LOG ) : null ; for ( int d = 3 ; ! dDimensionalList . isEmpty ( ) ; d ++ ) { if ( dprog != null ) { dprog . setProcessed ( d , LOG ) ; } // result now contains all d - dimensional sets of subspaces
ArrayList < HiCSSubspace > candidateList = new ArrayList < > ( dDimensionalList . size ( ) ) ; for ( Heap < HiCSSubspace > . UnorderedIter it = dDimensionalList . unorderedIter ( ) ; it . valid ( ) ; it . advance ( ) ) { subspaceList . add ( it . get ( ) ) ; candidateList . add ( it . get ( ) ) ; } dDimensionalList . clear ( ) ; // candidateList now contains the * m * best d - dimensional sets
Collections . sort ( candidateList , HiCSSubspace . SORT_BY_SUBSPACE ) ; // TODO : optimize APRIORI style , by not even computing the bit set or ?
for ( int i = 0 ; i < candidateList . size ( ) - 1 ; i ++ ) { for ( int j = i + 1 ; j < candidateList . size ( ) ; j ++ ) { HiCSSubspace set1 = candidateList . get ( i ) , set2 = candidateList . get ( j ) ; HiCSSubspace joinedSet = new HiCSSubspace ( ) ; joinedSet . or ( set1 ) ; joinedSet . or ( set2 ) ; if ( joinedSet . cardinality ( ) != d ) { continue ; } calculateContrast ( relation , joinedSet , subspaceIndex , random ) ; dDimensionalList . add ( joinedSet ) ; LOG . incrementProcessed ( qprog ) ; } } // Prune
for ( HiCSSubspace cand : candidateList ) { for ( Heap < HiCSSubspace > . UnorderedIter it = dDimensionalList . unorderedIter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( it . get ( ) . contrast > cand . contrast ) { subspaceList . remove ( cand ) ; break ; } } } } LOG . setCompleted ( qprog ) ; if ( dprog != null ) { dprog . setProcessed ( dbdim , LOG ) ; dprog . ensureCompleted ( LOG ) ; } return subspaceList ; |
public class GoogleHadoopFileSystemBase { /** * Makes the given path and all non - existent parents directories .
* Has the semantics of Unix ' mkdir - p ' .
* @ param hadoopPath Given path .
* @ param permission Permissions to set on the given directory .
* @ return true on success , false otherwise .
* @ throws IOException if an error occurs . */
@ Override public boolean mkdirs ( Path hadoopPath , FsPermission permission ) throws IOException { } } | long startTime = System . nanoTime ( ) ; Preconditions . checkArgument ( hadoopPath != null , "hadoopPath must not be null" ) ; checkOpen ( ) ; logger . atFine ( ) . log ( "GHFS.mkdirs: %s, perm: %s" , hadoopPath , permission ) ; URI gcsPath = getGcsPath ( hadoopPath ) ; try { getGcsFs ( ) . mkdirs ( gcsPath ) ; } catch ( java . nio . file . FileAlreadyExistsException faee ) { // Need to convert to the Hadoop flavor of FileAlreadyExistsException .
throw ( FileAlreadyExistsException ) new FileAlreadyExistsException ( faee . getMessage ( ) ) . initCause ( faee ) ; } long duration = System . nanoTime ( ) - startTime ; increment ( Counter . MKDIRS ) ; increment ( Counter . MKDIRS_TIME , duration ) ; return true ; |
public class Matrix4 { /** * Sets this to a scale matrix .
* @ return a reference to this matrix , for chaining . */
public Matrix4 setToScale ( IVector3 scale ) { } } | return setToScale ( scale . x ( ) , scale . y ( ) , scale . z ( ) ) ; |
public class CATSyncAsynchReader { /** * Sends the message in one transmission back down to our peer . If the messageSlices parameter is
* not null then the message has already been encoded and does not need to be done again . This
* may be in the case where the message was destined to be sent in chunks but is so small that it
* does not seem worth it .
* @ param jsMessage The entire message to send .
* @ param messageSlices The already encoded message slices .
* @ throws UnsupportedEncodingException
* @ throws MessageCopyFailedException
* @ throws IncorrectMessageTypeException
* @ throws MessageEncodeFailedException */
private void sendEntireMessage ( JsMessage jsMessage , List < DataSlice > messageSlices ) throws UnsupportedEncodingException , MessageCopyFailedException , IncorrectMessageTypeException , MessageEncodeFailedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "sendEntireMessage" , new Object [ ] { jsMessage , messageSlices } ) ; int msgLen = 0 ; try { CommsServerByteBuffer buffer = poolManager . allocate ( ) ; ConversationState convState = ( ConversationState ) conversation . getAttachment ( ) ; buffer . putShort ( convState . getConnectionObjectId ( ) ) ; if ( ! mainConsumer . getUsingConnectionReceive ( ) ) { buffer . putShort ( mainConsumer . getConsumerSessionId ( ) ) ; } // Put the message into the buffer in whatever way is suitable
if ( messageSlices == null ) { msgLen = buffer . putMessage ( jsMessage , convState . getCommsConnection ( ) , conversation ) ; } else { msgLen = buffer . putMessgeWithoutEncode ( messageSlices ) ; } // Decide on the segment
int seg = JFapChannelConstants . SEG_RECEIVE_SESS_MSG_R ; if ( mainConsumer . getUsingConnectionReceive ( ) ) { seg = JFapChannelConstants . SEG_RECEIVE_CONN_MSG_R ; } int jfapPriority = JFapChannelConstants . getJFAPPriority ( jsMessage . getPriority ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Sending with JFAP priority of " + jfapPriority ) ; conversation . send ( buffer , seg , requestNumber , jfapPriority , false , ThrottlingPolicy . BLOCK_THREAD , null ) ; mainConsumer . messagesSent ++ ; } catch ( SIException e ) { // No FFDC code needed
// Only FFDC if we haven ' t received a meTerminated event .
if ( ! ( ( ConversationState ) mainConsumer . getConversation ( ) . getAttachment ( ) ) . hasMETerminated ( ) ) { FFDCFilter . processException ( e , CLASS_NAME + ".sendEntireMessage" , CommsConstants . CATSYNCASYNCHREADER_SEND_MSG_01 , this ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , e . getMessage ( ) , e ) ; SibTr . error ( tc , "COMMUNICATION_ERROR_SICO2015" , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "sendEntireMessage" ) ; |
public class CmafGroupSettingsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CmafGroupSettings cmafGroupSettings , ProtocolMarshaller protocolMarshaller ) { } } | if ( cmafGroupSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cmafGroupSettings . getBaseUrl ( ) , BASEURL_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getClientCache ( ) , CLIENTCACHE_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getCodecSpecification ( ) , CODECSPECIFICATION_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getDestination ( ) , DESTINATION_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getDestinationSettings ( ) , DESTINATIONSETTINGS_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getEncryption ( ) , ENCRYPTION_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getFragmentLength ( ) , FRAGMENTLENGTH_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getManifestCompression ( ) , MANIFESTCOMPRESSION_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getManifestDurationFormat ( ) , MANIFESTDURATIONFORMAT_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getMinBufferTime ( ) , MINBUFFERTIME_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getMinFinalSegmentLength ( ) , MINFINALSEGMENTLENGTH_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getSegmentControl ( ) , SEGMENTCONTROL_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getSegmentLength ( ) , SEGMENTLENGTH_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getStreamInfResolution ( ) , STREAMINFRESOLUTION_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getWriteDashManifest ( ) , WRITEDASHMANIFEST_BINDING ) ; protocolMarshaller . marshall ( cmafGroupSettings . getWriteHlsManifest ( ) , WRITEHLSMANIFEST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BitcoinTransactionFlinkInputFormat { /** * Saves the current state of the stream
* @ return current position in stream
* ( non - Javadoc )
* @ see org . apache . flink . api . common . io . CheckpointableInputFormat # getCurrentState ( ) */
@ Override public Tuple2 < Long , Long > getCurrentState ( ) throws IOException { } } | return new Tuple2 < > ( this . stream . getPos ( ) , this . currentTransactionCounterInBlock ) ; |
public class BaseLevel1 { /** * computes a vector - vector dot product .
* @ param n number of accessed element
* @ param alpha
* @ param X an INDArray
* @ param Y an INDArray
* @ return the vector - vector dot product of X and Y */
@ Override public double dot ( long n , double alpha , INDArray X , INDArray Y ) { } } | if ( Nd4j . getExecutioner ( ) . getProfilingMode ( ) == OpExecutioner . ProfilingMode . ALL ) OpProfiler . getInstance ( ) . processBlasCall ( false , X , Y ) ; if ( X . isSparse ( ) && ! Y . isSparse ( ) ) { return Nd4j . getSparseBlasWrapper ( ) . level1 ( ) . dot ( n , alpha , X , Y ) ; } else if ( ! X . isSparse ( ) && Y . isSparse ( ) ) { return Nd4j . getSparseBlasWrapper ( ) . level1 ( ) . dot ( n , alpha , Y , X ) ; } else if ( X . isSparse ( ) && Y . isSparse ( ) ) { // TODO - MKL doesn ' t contain such routines
return 0 ; } if ( X . data ( ) . dataType ( ) == DataType . DOUBLE ) { DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , X , Y ) ; return ddot ( n , X , BlasBufferUtil . getBlasStride ( X ) , Y , BlasBufferUtil . getBlasStride ( Y ) ) ; } else if ( X . data ( ) . dataType ( ) == DataType . FLOAT ) { DefaultOpExecutioner . validateDataType ( DataType . FLOAT , X , Y ) ; return sdot ( n , X , BlasBufferUtil . getBlasStride ( X ) , Y , BlasBufferUtil . getBlasStride ( Y ) ) ; } else { DefaultOpExecutioner . validateDataType ( DataType . HALF , X , Y ) ; return hdot ( n , X , BlasBufferUtil . getBlasStride ( X ) , Y , BlasBufferUtil . getBlasStride ( Y ) ) ; } |
public class RESTBaseEntityV1ProxyHandler { /** * Checks the return value and creates a proxy for the content if required .
* @ param retValue The value to be returned .
* @ return The return value with proxied content . */
private Object checkAndProxyReturnValue ( Object retValue ) { } } | if ( retValue != null && retValue instanceof RESTBaseEntityCollectionV1 ) { // The parent will either be a user defined parent , or the entity itself .
final RESTBaseEntityV1 < ? > parent = this . parent == null ? getProxyEntity ( ) : this . parent ; return RESTCollectionProxyFactory . create ( getProviderFactory ( ) , ( RESTBaseEntityCollectionV1 ) retValue , isRevision , parent ) ; } else { return retValue ; } |
public class PropertyNameResolver { /** * Return the index value from the property expression or - 1.
* @ param expression The property expression
* @ return The index value or - 1 if the property is not indexed
* @ throws IllegalArgumentException If the indexed property is illegally
* formed or has an invalid ( non - numeric ) value . */
public int getIndex ( String expression ) { } } | if ( expression == null || expression . length ( ) == 0 ) { return - 1 ; } for ( int i = 0 ; i < expression . length ( ) ; i ++ ) { char c = expression . charAt ( i ) ; if ( c == Nested || c == MappedStart ) { return - 1 ; } else if ( c == IndexedStart ) { int end = expression . indexOf ( IndexedEnd , i ) ; if ( end < 0 ) { throw new IllegalArgumentException ( "Missing End Delimiter" ) ; } String value = expression . substring ( i + 1 , end ) ; if ( value . length ( ) == 0 ) { throw new IllegalArgumentException ( "No Index Value" ) ; } int index = 0 ; try { index = Integer . parseInt ( value , 10 ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Invalid index value '" + value + "'" ) ; } return index ; } } return - 1 ; |
public class DatatypeConverter { /** * Print currency .
* @ param value currency value
* @ return currency value */
public static final BigDecimal printCurrency ( Number value ) { } } | return ( value == null || value . doubleValue ( ) == 0 ? null : new BigDecimal ( value . doubleValue ( ) * 100 ) ) ; |
public class AWSSupportClient { /** * Requests a refresh of the Trusted Advisor check that has the specified check ID . Check IDs can be obtained by
* calling < a > DescribeTrustedAdvisorChecks < / a > .
* < note >
* Some checks are refreshed automatically , and they cannot be refreshed by using this operation . Use of the
* < code > RefreshTrustedAdvisorCheck < / code > operation for these checks causes an < code > InvalidParameterValue < / code >
* error .
* < / note >
* The response contains a < a > TrustedAdvisorCheckRefreshStatus < / a > object , which contains these fields :
* < ul >
* < li >
* < b > status . < / b > The refresh status of the check : " none " , " enqueued " , " processing " , " success " , or " abandoned " .
* < / li >
* < li >
* < b > millisUntilNextRefreshable . < / b > The amount of time , in milliseconds , until the check is eligible for refresh .
* < / li >
* < li >
* < b > checkId . < / b > The unique identifier for the check .
* < / li >
* < / ul >
* @ param refreshTrustedAdvisorCheckRequest
* @ return Result of the RefreshTrustedAdvisorCheck operation returned by the service .
* @ throws InternalServerErrorException
* An internal server error occurred .
* @ sample AWSSupport . RefreshTrustedAdvisorCheck
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / support - 2013-04-15 / RefreshTrustedAdvisorCheck "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public RefreshTrustedAdvisorCheckResult refreshTrustedAdvisorCheck ( RefreshTrustedAdvisorCheckRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeRefreshTrustedAdvisorCheck ( request ) ; |
public class LiteralImpl { /** * Encode this Literal into an ObjectOutput ( used only by Selector . encode ) . */
public void encodeSelf ( ObjectOutput buf ) throws IOException { } } | buf . writeByte ( ( byte ) type ) ; switch ( type ) { case STRING : buf . writeUTF ( ( String ) value ) ; return ; case LONG : buf . writeLong ( ( ( Long ) value ) . longValue ( ) ) ; // was NumericValue
return ; case DOUBLE : buf . writeDouble ( ( ( Double ) value ) . doubleValue ( ) ) ; // was NumericValue
return ; case INT : buf . writeInt ( ( ( Integer ) value ) . intValue ( ) ) ; // was NumericValue
return ; case FLOAT : buf . writeFloat ( ( ( Float ) value ) . floatValue ( ) ) ; // was NumericValue
return ; case BOOLEAN : buf . writeBoolean ( ( ( Boolean ) value ) . booleanValue ( ) ) ; // was BooleanValue
return ; case OBJECT : buf . writeObject ( value ) ; default : throw new IllegalStateException ( ) ; } |
public class ServiceInstanceQuery { /** * Get a metadata value match regex pattern QueryCriterion .
* Add a QueryCriterion to check ServiceInstance has metadata value
* match the target regex pattern .
* @ param key
* the metadata key .
* @ param pattern
* the target regex pattern that metadata should match .
* @ return
* the ServiceInstanceQuery . */
public ServiceInstanceQuery getPatternQueryCriterion ( String key , String pattern ) { } } | QueryCriterion c = new PatternQueryCriterion ( key , pattern ) ; addQueryCriterion ( c ) ; return this ; |
public class CreationalContextImpl { /** * Destroys dependent instance
* @ param instance
* @ return true if the instance was destroyed , false otherwise */
public boolean destroyDependentInstance ( T instance ) { } } | synchronized ( dependentInstances ) { for ( Iterator < ContextualInstance < ? > > iterator = dependentInstances . iterator ( ) ; iterator . hasNext ( ) ; ) { ContextualInstance < ? > contextualInstance = iterator . next ( ) ; if ( contextualInstance . getInstance ( ) == instance ) { iterator . remove ( ) ; destroy ( contextualInstance ) ; return true ; } } } return false ; |
public class SqlValidatorImpl { /** * Return the intended modality of a SELECT or set - op . */
private SqlModality deduceModality ( SqlNode query ) { } } | if ( query instanceof SqlSelect ) { SqlSelect select = ( SqlSelect ) query ; return select . getModifierNode ( SqlSelectKeyword . STREAM ) != null ? SqlModality . STREAM : SqlModality . RELATION ; } else if ( query . getKind ( ) == SqlKind . VALUES ) { return SqlModality . RELATION ; } else { assert query . isA ( SqlKind . SET_QUERY ) ; final SqlCall call = ( SqlCall ) query ; return deduceModality ( call . getOperandList ( ) . get ( 0 ) ) ; } |
public class AjaxController { /** * Handles request for listing accounts */
@ RequestMapping ( value = "/listGedikStocks" , method = RequestMethod . GET ) public @ ResponseBody ResponseGedikStockList listGedikStocks ( HttpServletResponse response ) { } } | logger . debug ( "Received request to list gedik stocks" ) ; ResponseGedikStockList gedikStockList = null ; try { gedikStockList = gedikStockService . getAllStocks ( ) ; } catch ( IOException e ) { logger . error ( e . getMessage ( ) ) ; e . printStackTrace ( ) ; } return gedikStockList ; |
public class Dictionary { /** * Loads the contents of the specified input stream into this dictionary .
* @ param input A dictionary file .
* @ throws IOException If the specified input stream can not be read . */
protected void loadGrams ( DataInputStream input ) throws IOException { } } | gramSet = new HashSet ( ) ; try { while ( true ) { int gramLength = input . readInt ( ) ; int [ ] words = new int [ gramLength ] ; for ( int wi = 0 ; wi < gramLength ; wi ++ ) { words [ wi ] = input . readInt ( ) ; } gramSet . add ( new NGram ( words ) ) ; } } catch ( EOFException e ) { } |
public class CmsJSONSearchConfigurationParser { /** * Helper for reading an optional String value list - returning < code > null < / code > if parsing fails for the whole list , otherwise just skipping unparsable entries .
* @ param json The JSON object where the list should be read from .
* @ param key The key of the value to read .
* @ return The value from the JSON , or < code > null < / code > if the value does not exist . */
protected static List < String > parseOptionalStringValues ( JSONObject json , String key ) { } } | List < String > list = null ; try { list = parseMandatoryStringValues ( json , key ) ; } catch ( JSONException e ) { LOG . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_OPTIONAL_STRING_LIST_MISSING_1 , key ) , e ) ; return null ; } return list ; |
public class CClassNode { /** * add _ ctype _ to _ cc _ by _ range / / Encoding out ! */
public void addCTypeByRange ( int ctype , boolean not , ScanEnvironment env , int sbOut , int mbr [ ] ) { } } | int n = mbr [ 0 ] ; int i ; if ( ! not ) { for ( i = 0 ; i < n ; i ++ ) { for ( int j = CR_FROM ( mbr , i ) ; j <= CR_TO ( mbr , i ) ; j ++ ) { if ( j >= sbOut ) { if ( j > CR_FROM ( mbr , i ) ) { addCodeRangeToBuf ( env , j , CR_TO ( mbr , i ) ) ; i ++ ; } // ! goto sb _ end ! , remove duplication !
for ( ; i < n ; i ++ ) { addCodeRangeToBuf ( env , CR_FROM ( mbr , i ) , CR_TO ( mbr , i ) ) ; } return ; } bs . set ( env , j ) ; } } // ! sb _ end : !
for ( ; i < n ; i ++ ) { addCodeRangeToBuf ( env , CR_FROM ( mbr , i ) , CR_TO ( mbr , i ) ) ; } } else { int prev = 0 ; for ( i = 0 ; i < n ; i ++ ) { for ( int j = prev ; j < CR_FROM ( mbr , i ) ; j ++ ) { if ( j >= sbOut ) { // ! goto sb _ end2 ! , remove duplication
prev = sbOut ; for ( i = 0 ; i < n ; i ++ ) { if ( prev < CR_FROM ( mbr , i ) ) addCodeRangeToBuf ( env , prev , CR_FROM ( mbr , i ) - 1 ) ; prev = CR_TO ( mbr , i ) + 1 ; } if ( prev < 0x7fffffff ) addCodeRangeToBuf ( env , prev , 0x7fffffff ) ; return ; } bs . set ( env , j ) ; } prev = CR_TO ( mbr , i ) + 1 ; } for ( int j = prev ; j < sbOut ; j ++ ) { bs . set ( env , j ) ; } // ! sb _ end2 : !
prev = sbOut ; for ( i = 0 ; i < n ; i ++ ) { if ( prev < CR_FROM ( mbr , i ) ) addCodeRangeToBuf ( env , prev , CR_FROM ( mbr , i ) - 1 ) ; prev = CR_TO ( mbr , i ) + 1 ; } if ( prev < 0x7fffffff ) addCodeRangeToBuf ( env , prev , 0x7fffffff ) ; } |
public class WebContainer { /** * Check for the JSR77 runtime , and , if available , use it to register module
* and servlet mbeans . As a side effect , assign the mbean registration to
* the web module metadata and to the servlet metadata .
* The web module container is required for access to the web module descriptor .
* @ param webModule The web module to register .
* @ param container The container of the web module . */
protected void registerMBeans ( WebModuleMetaDataImpl webModule , com . ibm . wsspi . adaptable . module . Container container ) { } } | String methodName = "registerMBeans" ; String appName = webModule . getApplicationMetaData ( ) . getName ( ) ; String webAppName = webModule . getName ( ) ; String debugName ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { debugName = appName + " " + webAppName ; } else { debugName = null ; } WebMBeanRuntime mBeanRuntime = webMBeanRuntimeServiceRef . getService ( ) ; if ( mBeanRuntime == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName , "Web Module [ " + debugName + " ]: No MBean Runtime" ) ; } return ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName , "Web Module [ " + debugName + " ]: MBean Runtime" ) ; } } String ddPath = com . ibm . ws . javaee . dd . web . WebApp . DD_NAME ; // This should be obtained by an adapt call .
Iterator < IServletConfig > servletConfigs = webModule . getConfiguration ( ) . getServletInfos ( ) ; webModule . mBeanServiceReg = mBeanRuntime . registerModuleMBean ( appName , webAppName , container , ddPath , servletConfigs ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName , "Web Module [ " + debugName + " ]: Registration [ " + webModule . mBeanServiceReg + " ]" ) ; } servletConfigs = webModule . getConfiguration ( ) . getServletInfos ( ) ; while ( servletConfigs . hasNext ( ) ) { IServletConfig servletConfig = servletConfigs . next ( ) ; String servletName = servletConfig . getServletName ( ) ; WebComponentMetaDataImpl wcmdi = ( WebComponentMetaDataImpl ) servletConfig . getMetaData ( ) ; wcmdi . mBeanServiceReg = mBeanRuntime . registerServletMBean ( appName , webAppName , servletName ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName , "Web Module [ " + debugName + " ] Servlet [ " + servletName + " ]: Registration [ " + wcmdi . mBeanServiceReg + " ]" ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName , "Web Module [ " + debugName + " ]: Completed registrations" ) ; } |
public class PersonName { /** * 使用词性序列从多个人名中选择一个最佳的
* @ param candidateWords
* @ return */
private static List < Word > selectBest ( List < List < Word > > candidateWords ) { } } | if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "开始从多个识别结果中选择一个最佳的结果:{}" , candidateWords ) ; } Map < List < Word > , Integer > map = new ConcurrentHashMap < > ( ) ; AtomicInteger i = new AtomicInteger ( ) ; candidateWords . stream ( ) . forEach ( candidateWord -> { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( i . incrementAndGet ( ) + "、开始处理:" + candidateWord ) ; } // 词性标注
PartOfSpeechTagging . process ( candidateWord ) ; // 根据词性标注的结果进行评分
StringBuilder seq = new StringBuilder ( ) ; candidateWord . forEach ( word -> seq . append ( word . getPartOfSpeech ( ) . getPos ( ) . charAt ( 0 ) ) . append ( " " ) ) ; String seqStr = seq . toString ( ) ; AtomicInteger score = new AtomicInteger ( ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "词序列:{} 的词性序列:{}" , candidateWord , seqStr ) ; } POS_SEQ . keySet ( ) . stream ( ) . forEach ( pos_seq -> { if ( seqStr . contains ( pos_seq ) ) { int sc = POS_SEQ . get ( pos_seq ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( pos_seq + "词序增加分值:" + sc ) ; } score . addAndGet ( sc ) ; } } ) ; score . addAndGet ( - candidateWord . size ( ) ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "长度的负值也作为分值:" + ( - candidateWord . size ( ) ) ) ; LOGGER . debug ( "评分结果:" + score . get ( ) ) ; } map . put ( candidateWord , score . get ( ) ) ; } ) ; // 选择分值最高的
List < Word > result = map . entrySet ( ) . stream ( ) . sorted ( ( a , b ) -> b . getValue ( ) . compareTo ( a . getValue ( ) ) ) . map ( e -> e . getKey ( ) ) . collect ( Collectors . toList ( ) ) . get ( 0 ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "选择结果:" + result ) ; } return result ; |
public class ServiceValidationViewFactory { /** * Register view .
* @ param type the type
* @ param view the view */
public void registerView ( final String type , final Pair < View , View > view ) { } } | registeredViews . put ( type , view ) ; |
public class RelativeToEasterSundayParser { /** * Returns the easter Sunday within the julian chronology .
* @ param nYear
* The year to retrieve Julian Easter Sunday date
* @ return julian easter Sunday */
public static JulianDate getJulianEasterSunday ( final int nYear ) { } } | final int a = nYear % 4 ; final int b = nYear % 7 ; final int c = nYear % 19 ; final int d = ( 19 * c + 15 ) % 30 ; final int e = ( 2 * a + 4 * b - d + 34 ) % 7 ; final int x = d + e + 114 ; final int nMonth = x / 31 ; final int nDay = ( x % 31 ) + 1 ; return JulianDate . of ( nYear , ( nMonth == 3 ? Month . MARCH : Month . APRIL ) . getValue ( ) , nDay ) ; |
public class MExtensionFileFilter { /** * Returns the human readable description of this filter . For example : " JPEG and GIF Image Files ( * . jpg , * . gif ) "
* @ return String
* @ see # setDescription
* @ see # setExtensionListInDescription
* @ see # isExtensionListInDescription
* @ see javax . swing . filechooser . FileFilter # getDescription */
@ Override public String getDescription ( ) { } } | if ( fullDescription == null ) { final String temp ; if ( description == null || isExtensionListInDescription ( ) ) { final StringBuilder sb = new StringBuilder ( ) ; sb . append ( description == null ? "(" : description + " (" ) ; // build the description from the extension list
boolean first = true ; for ( final String filterKey : filters . keySet ( ) ) { if ( first ) { first = false ; } else { sb . append ( ", " ) ; } sb . append ( "*." ) ; sb . append ( filterKey ) ; } sb . append ( ')' ) ; temp = sb . toString ( ) ; } else { temp = description ; } // Command Query Separation with lazy initialization : set fullDescription only once
fullDescription = temp ; } return fullDescription ; |
public class PatternBox { /** * Pattern for an EntityReference has distinct PhysicalEntities associated with both left and
* right of a Conversion .
* @ return the pattern */
public static Pattern modifiedPESimple ( ) { } } | Pattern p = new Pattern ( EntityReference . class , "modified ER" ) ; p . add ( erToPE ( ) , "modified ER" , "first PE" ) ; p . add ( participatesInConv ( ) , "first PE" , "Conversion" ) ; p . add ( new ConversionSide ( ConversionSide . Type . OTHER_SIDE ) , "first PE" , "Conversion" , "second PE" ) ; p . add ( equal ( false ) , "first PE" , "second PE" ) ; p . add ( peToER ( ) , "second PE" , "modified ER" ) ; return p ; |
public class SCMController { /** * Form to create a change log filter */
@ RequestMapping ( value = "changeLog/fileFilter/{projectId}/create" , method = RequestMethod . GET ) public Form createChangeLogFileFilterForm ( @ SuppressWarnings ( "UnusedParameters" ) @ PathVariable ID projectId ) { } } | return Form . create ( ) . with ( Text . of ( "name" ) . label ( "Name" ) . help ( "Name to use to save the filter." ) ) . with ( Memo . of ( "patterns" ) . label ( "Filter(s)" ) . help ( "List of ANT-like patterns (one per line)." ) ) ; |
public class DTMManagerDefault { /** * This method returns the SAX2 parser to use with the InputSource
* obtained from this URI .
* It may return null if any SAX2 - conformant XML parser can be used ,
* or if getInputSource ( ) will also return null . The parser must
* be free for use ( i . e . , not currently in use for another parse ( ) .
* After use of the parser is completed , the releaseXMLReader ( XMLReader )
* must be called .
* @ param inputSource The value returned from the URIResolver .
* @ return a SAX2 XMLReader to use to resolve the inputSource argument .
* @ return non - null XMLReader reference ready to parse . */
synchronized public XMLReader getXMLReader ( Source inputSource ) { } } | try { XMLReader reader = ( inputSource instanceof SAXSource ) ? ( ( SAXSource ) inputSource ) . getXMLReader ( ) : null ; // If user did not supply a reader , ask for one from the reader manager
if ( null == reader ) { if ( m_readerManager == null ) { m_readerManager = XMLReaderManager . getInstance ( ) ; } reader = m_readerManager . getXMLReader ( ) ; } return reader ; } catch ( SAXException se ) { throw new DTMException ( se . getMessage ( ) , se ) ; } |
public class FacebookAccessTokenJsonExtractor { /** * non standard . examples : < br >
* ' { " error " : { " message " : " This authorization code has been
* used . " , " type " : " OAuthException " , " code " : 100 , " fbtrace _ id " : " DtxvtGRaxbB " } } ' < br >
* ' { " error " : { " message " : " Error validating application . Invalid application
* ID . " , " type " : " OAuthException " , " code " : 101 , " fbtrace _ id " : " CvDR + X4WWIx " } } ' */
@ Override public void generateError ( String response ) { } } | extractParameter ( response , MESSAGE_REGEX_PATTERN , false ) ; throw new FacebookAccessTokenErrorResponse ( extractParameter ( response , MESSAGE_REGEX_PATTERN , false ) , extractParameter ( response , TYPE_REGEX_PATTERN , false ) , extractParameter ( response , CODE_REGEX_PATTERN , false ) , extractParameter ( response , FBTRACE_ID_REGEX_PATTERN , false ) , response ) ; |
public class VoldemortBuildAndPushJob { /** * Get the Json Schema of the input path , assuming the path contains just one
* schema version in all files under that path . */
private synchronized JsonSchema getInputPathJsonSchema ( ) throws IOException { } } | if ( inputPathJsonSchema == null ) { // No need to query Hadoop more than once as this shouldn ' t change mid - run ,
// thus , we can lazily initialize and cache the result .
inputPathJsonSchema = HadoopUtils . getSchemaFromPath ( getInputPath ( ) ) ; } return inputPathJsonSchema ; |
public class SearchView { /** * Query rewriting . */
private void rewriteQueryFromSuggestion ( int position ) { } } | CharSequence oldQuery = mQueryTextView . getText ( ) ; Cursor c = mSuggestionsAdapter . getCursor ( ) ; if ( c == null ) { return ; } if ( c . moveToPosition ( position ) ) { // Get the new query from the suggestion .
CharSequence newQuery = mSuggestionsAdapter . convertToString ( c ) ; if ( newQuery != null ) { // The suggestion rewrites the query .
// Update the text field , without getting new suggestions .
setQuery ( newQuery ) ; } else { // The suggestion does not rewrite the query , restore the user ' s query .
setQuery ( oldQuery ) ; } } else { // We got a bad position , restore the user ' s query .
setQuery ( oldQuery ) ; } |
public class StandardQuery { /** * Resets any cached reference to a query executor . If a reference is
* available , it is replaced , but a clear reference is not set . */
protected void resetExecutor ( ) throws RepositoryException { } } | if ( mExecutor != null ) { mExecutor = executorFactory ( ) . executor ( mFilter , mOrdering , null ) ; } |
public class JCusparse { /** * Description : This routine converts a sparse matrix in CSR storage format
* to a dense matrix . */
public static int cusparseScsr2dense ( cusparseHandle handle , int m , int n , cusparseMatDescr descrA , Pointer csrSortedValA , Pointer csrSortedRowPtrA , Pointer csrSortedColIndA , Pointer A , int lda ) { } } | return checkResult ( cusparseScsr2denseNative ( handle , m , n , descrA , csrSortedValA , csrSortedRowPtrA , csrSortedColIndA , A , lda ) ) ; |
public class ELTools { /** * Yields the type of the variable given by an expression .
* @ param p _ expression
* @ return */
public static Class < ? > getType ( String p_expression ) { } } | Field declaredField = getField ( p_expression ) ; if ( null != declaredField ) { return declaredField . getType ( ) ; } return null ; |
public class EntityREST { /** * Delete an entity identified by its GUID .
* @ param guid GUID for the entity
* @ return EntityMutationResponse */
@ DELETE @ Path ( "/guid/{guid}" ) @ Consumes ( { } } | Servlets . JSON_MEDIA_TYPE , MediaType . APPLICATION_JSON } ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public EntityMutationResponse deleteByGuid ( @ PathParam ( "guid" ) final String guid ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "EntityREST.deleteByGuid(" + guid + ")" ) ; } return entitiesStore . deleteById ( guid ) ; } finally { AtlasPerfTracer . log ( perf ) ; } |
public class PowerMock { /** * Creates a strict mock object that supports mocking of final and native
* methods .
* @ param < T > the type of the mock object
* @ param type the type of the mock object
* @ param methods optionally what methods to mock
* @ return the mock object . */
public static synchronized < T > T createStrictMock ( Class < T > type , Method ... methods ) { } } | return doMock ( type , false , new StrictMockStrategy ( ) , null , methods ) ; |
public class SystemFunctionSet { /** * @ see org . s1 . objects . Objects # diff ( java . util . Map , java . util . Map )
* @ param old
* @ param nw
* @ return */
@ MapMethod public List < Map < String , Object > > diff ( Map < String , Object > old , Map < String , Object > nw ) { } } | List < ObjectDiff . DiffBean > l = Objects . diff ( old , nw ) ; List < Map < String , Object > > list = Objects . newArrayList ( ) ; for ( ObjectDiff . DiffBean b : l ) { list . add ( Objects . newHashMap ( String . class , Object . class , "path" , b . getPath ( ) , "old" , b . getOldValue ( ) , "new" , b . getNewValue ( ) ) ) ; } return list ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link FeatureCollectionType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link FeatureCollectionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "FeatureCollection" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_Feature" ) public JAXBElement < FeatureCollectionType > createFeatureCollection ( FeatureCollectionType value ) { } } | return new JAXBElement < FeatureCollectionType > ( _FeatureCollection_QNAME , FeatureCollectionType . class , null , value ) ; |
public class UtilizationShell { /** * Displays format of commands . */
private static void printUsage ( ) { } } | String prefix = "Usage: hadoop " + UtilizationShell . class . getSimpleName ( ) ; System . err . println ( prefix ) ; System . err . println ( " [-all]" ) ; System . err . println ( " [-cluster]" ) ; System . err . println ( " [-job]" ) ; System . err . println ( " [-job jobID1 jobID2...]" ) ; System . err . println ( " [-tasktracker]" ) ; System . err . println ( " [-tasktracker hostname1 hostname2...]" ) ; System . err . println ( " [-help [cmd]]" ) ; System . err . println ( ) ; ToolRunner . printGenericCommandUsage ( System . err ) ; |
public class gslbservice { /** * Use this API to delete gslbservice of given name . */
public static base_response delete ( nitro_service client , String servicename ) throws Exception { } } | gslbservice deleteresource = new gslbservice ( ) ; deleteresource . servicename = servicename ; return deleteresource . delete_resource ( client ) ; |
public class Util { /** * Get an array of { @ link Sequence } s for the passed { @ link EventProcessor } s
* @ param processors for which to get the sequences
* @ return the array of { @ link Sequence } s */
public static Sequence [ ] getSequencesFor ( final EventProcessor ... processors ) { } } | Sequence [ ] sequences = new Sequence [ processors . length ] ; for ( int i = 0 ; i < sequences . length ; i ++ ) { sequences [ i ] = processors [ i ] . getSequence ( ) ; } return sequences ; |
public class DscNodesInner { /** * Retrieve the dsc node identified by node id .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param nodeId The node id .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the DscNodeInner object if successful . */
public DscNodeInner get ( String resourceGroupName , String automationAccountName , String nodeId ) { } } | return getWithServiceResponseAsync ( resourceGroupName , automationAccountName , nodeId ) . toBlocking ( ) . single ( ) . body ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.