signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class HessianDebugOutputStream { /** * closes the stream . */ @ Override public void close ( ) throws IOException { } }
OutputStream os = _os ; _os = null ; if ( os != null ) { _state . next ( - 1 ) ; os . close ( ) ; } _state . println ( ) ;
public class EnumUtils { /** * < p > Creates a long bit vector representation of the given array of Enum values . < / p > * < p > This generates a value that is usable by { @ link EnumUtils # processBitVector } . < / p > * < p > Do not use this method if you have more than 64 values in your Enum , as this * would create a value greater than a long can hold . < / p > * @ param enumClass the class of the enum we are working with , not { @ code null } * @ param values the values we want to convert , not { @ code null } * @ param < E > the type of the enumeration * @ return a long whose value provides a binary representation of the given set of enum values . * @ throws NullPointerException if { @ code enumClass } or { @ code values } is { @ code null } * @ throws IllegalArgumentException if { @ code enumClass } is not an enum class or has more than 64 values * @ since 3.0.1 * @ see # generateBitVectors ( Class , Iterable ) */ @ GwtIncompatible ( "incompatible method" ) @ SafeVarargs public static < E extends Enum < E > > long generateBitVector ( final Class < E > enumClass , final E ... values ) { } }
Validate . noNullElements ( values ) ; return generateBitVector ( enumClass , Arrays . asList ( values ) ) ;
public class ObjectInputStream { /** * Reads a new proxy class descriptor from the receiver . It is assumed the * proxy class descriptor has not been read yet ( not a cyclic reference ) . * Return the proxy class descriptor read . * @ return The { @ code Class } read from the stream . * @ throws IOException * If an IO exception happened when reading the class * descriptor . * @ throws ClassNotFoundException * If a class for one of the objects could not be found */ private Class < ? > readNewProxyClassDesc ( ) throws ClassNotFoundException , IOException { } }
int count = input . readInt ( ) ; String [ ] interfaceNames = new String [ count ] ; for ( int i = 0 ; i < count ; i ++ ) { interfaceNames [ i ] = input . readUTF ( ) ; } Class < ? > proxy = resolveProxyClass ( interfaceNames ) ; // Consume unread class annotation data and TC _ ENDBLOCKDATA discardData ( ) ; return proxy ;
public class Document { /** * Resets the header of this document . */ public void resetHeader ( ) { } }
this . header = null ; DocListener listener ; for ( Iterator iterator = listeners . iterator ( ) ; iterator . hasNext ( ) ; ) { listener = ( DocListener ) iterator . next ( ) ; listener . resetHeader ( ) ; }
public class Namespace { /** * Returns a namespace entity using the provided namespace qualifier . * @ param em The entity manager . Cannot be null . * @ param qualifier The qualifier . Cannot be null or empty . * @ return The namespace or null if no corresponding namespace is found . */ public static Namespace findByQualifier ( EntityManager em , String qualifier ) { } }
SystemAssert . requireArgument ( em != null , "EntityManager cannot be null." ) ; SystemAssert . requireArgument ( qualifier != null && ! qualifier . isEmpty ( ) , "Namespace qualifier cannot be null or empty." ) ; TypedQuery < Namespace > query = em . createNamedQuery ( "Namespace.findByQualifier" , Namespace . class ) ; try { return query . setParameter ( "qualifier" , qualifier ) . getSingleResult ( ) ; } catch ( NoResultException ex ) { return null ; }
public class RestTool { /** * Makes a call to the MangoPay API . * This generic method handles calls targeting collections of * < code > Dto < / code > instances . In order to process single objects , * use < code > request < / code > method instead . * @ param < T > Type on behalf of which the request is being called . * @ param classOfT Type on behalf of which the request is being called . * @ param classOfTItem The class of single item in array . * @ param urlMethod Relevant method key . * @ param requestType HTTP request term . For lists should be always GET . * @ param requestData Collection of key - value pairs of request * parameters . * @ param pagination Pagination object . * @ param additionalUrlParams * @ return The collection of Dto instances returned from API . * @ throws Exception */ public < T extends Dto > List < T > requestList ( Class < T [ ] > classOfT , Class < T > classOfTItem , String urlMethod , String requestType , Map < String , String > requestData , Pagination pagination , Map < String , String > additionalUrlParams ) throws Exception { } }
this . requestType = requestType ; this . requestData = requestData ; List < T > responseResult = this . doRequestList ( classOfT , classOfTItem , urlMethod , pagination , additionalUrlParams ) ; if ( pagination != null ) { pagination = this . pagination ; } return responseResult ;
public class ObjectUtility { /** * Return true if the object are NOT equals . < br / > * This method is NullPointerException - proof * @ param object1 first object to compare * @ param object2 second object to compare * @ return true if the object are NOT equals */ public static boolean notEquals ( final Object object1 , final Object object2 ) { } }
return object1 != null && ! object1 . equals ( object2 ) || object1 == null && object2 != null ;
public class Parser { /** * SourceElement */ private ParseTree parseScriptElement ( ) { } }
if ( peekImportDeclaration ( ) ) { return parseImportDeclaration ( ) ; } if ( peekExportDeclaration ( ) ) { return parseExportDeclaration ( false ) ; } if ( peekInterfaceDeclaration ( ) ) { return parseInterfaceDeclaration ( ) ; } if ( peekEnumDeclaration ( ) ) { return parseEnumDeclaration ( ) ; } if ( peekTypeAlias ( ) ) { return parseTypeAlias ( ) ; } if ( peekAmbientDeclaration ( ) ) { return parseAmbientDeclaration ( ) ; } if ( peekNamespaceDeclaration ( ) ) { return parseNamespaceDeclaration ( false ) ; } return parseSourceElement ( ) ;
public class AbstractListPreference { /** * Return the index of the entry , a specific value corresponds to . * @ param value * The value of the entry , whose index should be returned , as an instance of the type * { @ link CharSequence } * @ return The index of the entry , the given value corresponds to , as an { @ link Integer } value * or - 1 if there is no such entry */ protected final int indexOf ( @ Nullable final CharSequence value ) { } }
if ( value != null && getEntryValues ( ) != null ) { for ( int i = getEntryValues ( ) . length - 1 ; i >= 0 ; i -- ) { if ( getEntryValues ( ) [ i ] . equals ( value ) ) { return i ; } } } return - 1 ;
public class Streams { /** * Group data in a Stream using knowledge of the current batch and the next entry to determing grouping limits * @ see Traversable # groupedUntil ( BiPredicate ) * @ param stream Stream to group * @ param predicate Predicate to determine grouping * @ return Stream grouped into Lists determined by predicate */ public final static < T > Stream < Seq < T > > groupedStatefullyUntil ( final Stream < T > stream , final BiPredicate < Seq < ? super T > , ? super T > predicate ) { } }
return StreamSupport . stream ( new GroupedStatefullySpliterator < > ( stream . spliterator ( ) , ( ) -> Seq . of ( ) , Function . identity ( ) , predicate . negate ( ) ) , stream . isParallel ( ) ) ;
public class nsconnectiontable { /** * Use this API to fetch all the nsconnectiontable resources that are configured on netscaler . */ public static nsconnectiontable [ ] get ( nitro_service service ) throws Exception { } }
nsconnectiontable obj = new nsconnectiontable ( ) ; nsconnectiontable [ ] response = ( nsconnectiontable [ ] ) obj . get_resources ( service ) ; return response ;
public class UpdateAcceleratorAttributesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateAcceleratorAttributesRequest updateAcceleratorAttributesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateAcceleratorAttributesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateAcceleratorAttributesRequest . getAcceleratorArn ( ) , ACCELERATORARN_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorAttributesRequest . getFlowLogsEnabled ( ) , FLOWLOGSENABLED_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorAttributesRequest . getFlowLogsS3Bucket ( ) , FLOWLOGSS3BUCKET_BINDING ) ; protocolMarshaller . marshall ( updateAcceleratorAttributesRequest . getFlowLogsS3Prefix ( ) , FLOWLOGSS3PREFIX_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Expression { /** * Return results that match either < code > Dimension < / code > object . * @ param or * Return results that match either < code > Dimension < / code > object . */ public void setOr ( java . util . Collection < Expression > or ) { } }
if ( or == null ) { this . or = null ; return ; } this . or = new java . util . ArrayList < Expression > ( or ) ;
public class Wikipedia { /** * Gets the pages or redirects with a name similar to the pattern . * Calling this method is quite costly , as similarity is computed for all names . * @ param pPattern The pattern . * @ param pSize The maximum size of the result list . Only the most similar results will be included . * @ return A map of pages with names similar to the pattern and their distance values . Smaller distances are more similar . * @ throws WikiApiException Thrown if errors occurred . */ protected Map < Page , Double > getSimilarPages ( String pPattern , int pSize ) throws WikiApiException { } }
Title title = new Title ( pPattern ) ; String pattern = title . getWikiStyleTitle ( ) ; // a mapping of the most similar pages and their similarity values // It is returned by this method . Map < Page , Double > pageMap = new HashMap < Page , Double > ( ) ; // holds a mapping of the best distance values to page IDs Map < Integer , Double > distanceMap = new HashMap < Integer , Double > ( ) ; Session session = this . __getHibernateSession ( ) ; session . beginTransaction ( ) ; Iterator results = session . createQuery ( "select pml.pageID, pml.name from PageMapLine as pml" ) . list ( ) . iterator ( ) ; while ( results . hasNext ( ) ) { Object [ ] row = ( Object [ ] ) results . next ( ) ; int pageID = ( Integer ) row [ 0 ] ; String pageName = ( String ) row [ 1 ] ; // this returns a similarity - if we want to use it , we have to change the semantics the ordering of the results // double distance = new Levenshtein ( ) . getSimilarity ( pageName , pPattern ) ; double distance = new LevenshteinStringDistance ( ) . distance ( pageName , pattern ) ; distanceMap . put ( pageID , distance ) ; // if there are more than " pSize " entries in the map remove the last one ( it has the biggest distance ) if ( distanceMap . size ( ) > pSize ) { Set < Map . Entry < Integer , Double > > valueSortedSet = new TreeSet < Map . Entry < Integer , Double > > ( new ValueComparator ( ) ) ; valueSortedSet . addAll ( distanceMap . entrySet ( ) ) ; Iterator it = valueSortedSet . iterator ( ) ; // remove the first element if ( it . hasNext ( ) ) { // get the id of this entry and remove it in the distanceMap Map . Entry entry = ( Map . Entry ) it . next ( ) ; distanceMap . remove ( entry . getKey ( ) ) ; } } } session . getTransaction ( ) . commit ( ) ; for ( int pageID : distanceMap . keySet ( ) ) { Page page = null ; try { page = this . getPage ( pageID ) ; } catch ( WikiPageNotFoundException e ) { logger . error ( "Page with pageID " + pageID + " could not be found. Fatal error. Terminating." ) ; e . printStackTrace ( ) ; System . exit ( 1 ) ; } pageMap . put ( page , distanceMap . get ( pageID ) ) ; } return pageMap ;
public class BaseSynthesizer { /** * Get a form of a given AnalyzedToken , where the form is defined by a * part - of - speech tag . * @ param token AnalyzedToken to be inflected . * @ param posTag The desired part - of - speech tag . * @ return inflected words , or an empty array if no forms were found */ @ Override public String [ ] synthesize ( AnalyzedToken token , String posTag ) throws IOException { } }
List < String > wordForms = new ArrayList < > ( ) ; lookup ( token . getLemma ( ) , posTag , wordForms ) ; return wordForms . toArray ( new String [ 0 ] ) ;
public class GolangGenerator { /** * Recursively traverse groups to create the group properties */ private void generateGroupProperties ( final StringBuilder sb , final List < Token > tokens , final String prefix ) { } }
for ( int i = 0 , size = tokens . size ( ) ; i < size ; i ++ ) { final Token token = tokens . get ( i ) ; if ( token . signal ( ) == Signal . BEGIN_GROUP ) { final String propertyName = formatPropertyName ( token . name ( ) ) ; generateId ( sb , prefix , propertyName , token ) ; generateSinceActingDeprecated ( sb , prefix , propertyName , token ) ; generateExtensibilityMethods ( sb , prefix + propertyName , token ) ; // Look inside for nested groups with extra prefix generateGroupProperties ( sb , tokens . subList ( i + 1 , i + token . componentTokenCount ( ) - 1 ) , prefix + propertyName ) ; i += token . componentTokenCount ( ) - 1 ; } }
public class AssetsApi { /** * Get character asset names ( asynchronously ) Return names for a set of item * ids , which you can get from character assets endpoint . Typically used for * items that can customize names , like containers or ships . - - - SSO Scope : * esi - assets . read _ assets . v1 * @ param characterId * An EVE character ID ( required ) * @ param requestBody * A list of item ids ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call postCharactersCharacterIdAssetsNamesAsync ( Integer characterId , List < Long > requestBody , String datasource , String token , final ApiCallback < List < CharacterAssetsNamesResponse > > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = postCharactersCharacterIdAssetsNamesValidateBeforeCall ( characterId , requestBody , datasource , token , callback ) ; Type localVarReturnType = new TypeToken < List < CharacterAssetsNamesResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class DisksInner { /** * Creates or updates a disk . * @ param resourceGroupName The name of the resource group . * @ param diskName The name of the managed disk that is being created . The name can ' t be changed after the disk is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The maximum name length is 80 characters . * @ param disk Disk object supplied in the body of the Put disk operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DiskInner > beginCreateOrUpdateAsync ( String resourceGroupName , String diskName , DiskInner disk , final ServiceCallback < DiskInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , diskName , disk ) , serviceCallback ) ;
public class AbstractResources { /** * List resources using Smartsheet REST API . * Exceptions : * IllegalArgumentException : if any argument is null , or path is empty string * InvalidRequestException : if there is any problem with the REST API request * AuthorizationException : if there is any problem with the REST API authorization ( access token ) * ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting ) * SmartsheetRestException : if there is any other REST API related error occurred during the operation * SmartsheetException : if there is any other error occurred during the operation * @ param < T > the generic type * @ param path the relative path of the resource collections * @ param objectClass the resource object class * @ return the resources * @ throws SmartsheetException if an error occurred during the operation */ protected < T > List < T > listResources ( String path , Class < T > objectClass ) throws SmartsheetException { } }
Util . throwIfNull ( path , objectClass ) ; Util . throwIfEmpty ( path ) ; HttpRequest request ; request = createHttpRequest ( smartsheet . getBaseURI ( ) . resolve ( path ) , HttpMethod . GET ) ; List < T > obj = null ; try { HttpResponse response = this . smartsheet . getHttpClient ( ) . request ( request ) ; switch ( response . getStatusCode ( ) ) { case 200 : obj = this . smartsheet . getJsonSerializer ( ) . deserializeList ( objectClass , response . getEntity ( ) . getContent ( ) ) ; break ; default : handleError ( response ) ; } } finally { smartsheet . getHttpClient ( ) . releaseConnection ( ) ; } return obj ;
public class EphemeralKafkaBroker { /** * Get the path to the Kafka log directory * @ return An Optional that will only contain a value if the broker is running */ public Optional < String > getLogDir ( ) { } }
return brokerStarted ? Optional . of ( kafkaLogDir . toString ( ) ) : Optional . empty ( ) ;
public class GeneralizedOrssSeed { /** * Selects { @ code k } rows of { @ code dataPoints } , weighted by the specified * amount , to be seeds of a < i > k < / i > - means instance . If more seeds are * requested than are available , all possible rows are returned . * @ param dataPoints a matrix whose rows are to be evaluated and from which * { @ code k } data points will be selected * @ param k the number of data points ( rows ) to select * @ param weights as set of scalar int weights that reflect the importance * of each data points . * @ return the set of rows that were selected */ public DoubleVector [ ] chooseSeeds ( Matrix dataPoints , int k , int [ ] weights ) { } }
IntSet selected = new TroveIntSet ( ) ; int rows = dataPoints . rows ( ) ; // Edge case for where the user has requested more seeds than are // available . In this case , just return indices for all the rows if ( rows <= k ) { DoubleVector [ ] arr = new DoubleVector [ rows ] ; for ( int i = 0 ; i < rows ; ++ i ) arr [ i ] = dataPoints . getRowVector ( i ) ; return arr ; } // This array keeps the relative probability of that index ' s data point // being selected as a centroid . Although the probabilities change with // each center added , the array is only allocated once and is refilled // using determineProbabilities ( ) method . double [ ] probabilities = new double [ rows ] ; // This array keeps the memoized computation of the maximum similarity // of each data point i , to any center currently in selected . After the // first two points are selected , each iteration updates this array with // the maximum simiarlity of the new center to that point ' s index . double [ ] inverseSimilarities = new double [ rows ] ; // Pick the first two centers , x , y , with probability proportional to // 1 / sim ( x , y ) . In the original paper the probility is proportional to // | | x - y | | ^ 2 , which is the square of the distance between the two // points . However , since we use the simiarlity ( which is conceptually // the inverse of distance ) , we use the inverse similarity so that // elements that are more similarity ( i . e . , larger values ) have smaller // probabilities . IntPair firstTwoCenters = pickFirstTwo ( dataPoints , simFunc , weights , inverseSimilarities ) ; selected . add ( firstTwoCenters . x ) ; selected . add ( firstTwoCenters . y ) ; // For the remaining k - 2 points to select , pick a random point , x , with // probability min ( 1 / sim ( x , c _ i ) ) for all centers c _ i in selected . // Again , this probability - based selection is updated from the original // ORSS paper , which used | | x - c _ i | | ^ 2 for all centers c . See the // comment above for the reasoning . for ( int i = 2 ; i < k ; i ++ ) { // First , calculate the probabilities for selecting each point given // its similarity to any of the currently selected centers determineProbabilities ( inverseSimilarities , weights , probabilities , selected ) ; // Then sample a point from the multinomial distribution over the // remaining points in dataPoints int point = selectWithProb ( probabilities ) ; // Once we ' ve selected a point , add it the set that we will return // and update the similarity all other non - selected points relative // to be the highest similarity to any selected point boolean added = selected . add ( point ) ; assert added : "Added duplicate row to the set of selected points" ; updateNearestCenter ( inverseSimilarities , dataPoints , point , simFunc ) ; } IntIterator iter = selected . iterator ( ) ; DoubleVector [ ] centroids = new DoubleVector [ k ] ; for ( int i = 0 ; iter . hasNext ( ) ; ++ i ) centroids [ i ] = dataPoints . getRowVector ( iter . nextInt ( ) ) ; return centroids ;
public class MockStringResourceLoader { /** * Registers a mock resource with the first argument as the location and the second as the contents * of the resource . * @ param location The location * @ param contents The contents of the resource */ public void registerMockResource ( String location , byte [ ] contents ) { } }
mockResources . put ( location , new GrailsByteArrayResource ( contents , location ) ) ;
public class AttributeDimensionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AttributeDimension attributeDimension , ProtocolMarshaller protocolMarshaller ) { } }
if ( attributeDimension == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( attributeDimension . getAttributeType ( ) , ATTRIBUTETYPE_BINDING ) ; protocolMarshaller . marshall ( attributeDimension . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PerspectiveManager { /** * Returns the current perspective , or the NullPerspective instance if * no current perspective is defined . */ public Perspective getCurrentPerspective ( ) { } }
String key = MessageFormat . format ( CURRENT_PERSPECTIVE_KEY , new Object [ ] { pageName } ) ; String id = prefs . get ( key , DEFAULT_PERSPECTIVE_KEY ) ; Iterator it = perspectives . iterator ( ) ; while ( it . hasNext ( ) ) { Perspective perspective = ( Perspective ) it . next ( ) ; if ( id . equals ( perspective . getId ( ) ) ) { return perspective ; } } return NullPerspective . NULL_PERSPECTIVE ;
public class ThrottlingRpcService { /** * Invoked when { @ code req } is throttled . By default , this method responds with a * { @ link HttpStatusException } with { @ code 503 Service Unavailable } . */ @ Override protected RpcResponse onFailure ( ServiceRequestContext ctx , RpcRequest req , @ Nullable Throwable cause ) throws Exception { } }
return RpcResponse . ofFailure ( HttpStatusException . of ( HttpStatus . SERVICE_UNAVAILABLE ) ) ;
public class RandomMngrImpl { /** * Picks up port among the available ones in the allowed range ( > 9999 ) . * Once picked up , it is set as an overridden export on the instance . * This method should be called only when { @ link # acknowledgePort ( Application , Instance , String ) } * returned < code > false < / code > . * @ param application the application * @ param instance the instance * @ param exportedVariableName the name of the exported variable */ private void generateRandomPort ( Application application , Instance instance , String exportedVariableName ) { } }
List < Integer > forbiddenPorts = new ArrayList < > ( ) ; // Forbidden ports specified in the preferences String preferences = this . preferencesMngr . get ( IPreferencesMngr . FORBIDDEN_RANDOM_PORTS , "" ) ; for ( String s : Utils . splitNicely ( preferences , "," ) ) { if ( Utils . isEmptyOrWhitespaces ( s ) ) continue ; try { forbiddenPorts . add ( Integer . parseInt ( s ) ) ; } catch ( NumberFormatException e ) { this . logger . severe ( "An invalid port was found in the preferences: " + s ) ; } } // Ports already in use InstanceContext ctx = findAgentContext ( application , instance ) ; List < Integer > portsUsedByAgent = this . agentToRandomPorts . get ( ctx ) ; if ( portsUsedByAgent != null ) forbiddenPorts . addAll ( portsUsedByAgent ) ; // Now , pick up a random port Integer randomPort = - 1 ; for ( int i = PORT_MIN ; i < PORT_MAX && randomPort == - 1 ; i ++ ) { if ( ! forbiddenPorts . contains ( i ) ) randomPort = i ; } // Save it in the cache this . logger . fine ( "Associating a random port to " + exportedVariableName + " in instance " + instance + " of " + application ) ; InstanceContext newCtx = findAgentContext ( application , instance ) ; List < Integer > associatedPorts = this . agentToRandomPorts . get ( newCtx ) ; if ( associatedPorts == null ) { associatedPorts = new ArrayList < > ( ) ; this . agentToRandomPorts . put ( newCtx , associatedPorts ) ; } associatedPorts . add ( randomPort ) ; // Inject the variable value in the model . // Note : we could edit the graph variable directly . // But that would be bad , since several instances of the same component may have different ports . So , // we cannot put the information in the component . Instead , we put it as an overridden export . instance . overriddenExports . put ( exportedVariableName , String . valueOf ( randomPort ) ) ;
public class MessageBirdServiceImpl { /** * Create a HttpURLConnection connection object * @ param serviceUrl URL that needs to be requested * @ param postData PostDATA , must be not null for requestType is POST * @ param requestType Request type POST requests without a payload will generate a exception * @ return base class * @ throws IOException io exception */ public < P > HttpURLConnection getConnection ( final String serviceUrl , final P postData , final String requestType ) throws IOException { } }
if ( requestType == null || ! REQUEST_METHODS . contains ( requestType ) ) { throw new IllegalArgumentException ( String . format ( REQUEST_METHOD_NOT_ALLOWED , requestType ) ) ; } if ( postData == null && "POST" . equals ( requestType ) ) { throw new IllegalArgumentException ( "POST detected without a payload, please supply a payload with a POST request" ) ; } final URL restService = new URL ( serviceUrl ) ; final HttpURLConnection connection ; if ( proxy != null ) { connection = ( HttpURLConnection ) restService . openConnection ( proxy ) ; } else { connection = ( HttpURLConnection ) restService . openConnection ( ) ; } connection . setDoInput ( true ) ; connection . setRequestProperty ( "Accept" , "application/json" ) ; connection . setUseCaches ( false ) ; connection . setRequestProperty ( "charset" , "utf-8" ) ; connection . setRequestProperty ( "Connection" , "close" ) ; connection . setRequestProperty ( "Authorization" , "AccessKey " + accessKey ) ; connection . setRequestProperty ( "User-agent" , userAgentString ) ; if ( "POST" . equals ( requestType ) || "PATCH" . equals ( requestType ) ) { connection . setRequestMethod ( requestType ) ; connection . setDoOutput ( true ) ; connection . setRequestProperty ( "Content-Type" , "application/json" ) ; ObjectMapper mapper = new ObjectMapper ( ) ; mapper . setSerializationInclusion ( Include . NON_NULL ) ; // Specifically set the date format for POST requests so scheduled // messages and other things relying on specific date formats don ' t // fail when sending . DateFormat df = getDateFormat ( ) ; mapper . setDateFormat ( df ) ; final String json = mapper . writeValueAsString ( postData ) ; connection . getOutputStream ( ) . write ( json . getBytes ( String . valueOf ( StandardCharsets . UTF_8 ) ) ) ; } else if ( "DELETE" . equals ( requestType ) ) { // could have just used rquestType as it is connection . setDoOutput ( false ) ; connection . setRequestMethod ( "DELETE" ) ; connection . setRequestProperty ( "Content-Type" , "text/plain" ) ; } else { connection . setDoOutput ( false ) ; connection . setRequestMethod ( "GET" ) ; connection . setRequestProperty ( "Content-Type" , "text/plain" ) ; } return connection ;
public class ZipUtil { /** * Unwraps a ZIP file to the given directory shaving of root dir . * If there are multiple root dirs or entries in the root of zip , * ZipException is thrown . * The output directory must not be a file . * @ param is * inputstream for ZIP file . * @ param outputDir * output directory ( created automatically if not found ) . */ public static void unwrap ( InputStream is , File outputDir ) { } }
unwrap ( is , outputDir , IdentityNameMapper . INSTANCE ) ;
public class OkCoinMarketDataServiceRaw { /** * 获取交割预估价 * @ param currencyPair * @ return * @ throws IOException */ public OkCoinFutureComment getFutureEstimatedPrice ( CurrencyPair currencyPair ) throws IOException { } }
return okCoin . getFutureEstimatedPrice ( "1" , OkCoinAdapters . adaptSymbol ( currencyPair ) ) ;
public class ProxiedTrash { /** * Move the path to trash as the owner of the path . * @ param path { @ link org . apache . hadoop . fs . Path } to move . * @ return true if the move succeeded . * @ throws IOException */ public boolean moveToTrashAsOwner ( Path path ) throws IOException { } }
String owner = this . fs . getFileStatus ( path ) . getOwner ( ) ; return moveToTrashAsUser ( path , owner ) ;
public class SVGRenderer { public void startElementContents ( ElementBox elem ) { } }
if ( elem instanceof BlockBox && ( ( BlockBox ) elem ) . getOverflowX ( ) != BlockBox . OVERFLOW_VISIBLE ) { // for blocks with overflow ! = visible generate a clipping group Rectangle cb = elem . getClippedContentBounds ( ) ; String clip = "cssbox-clip-" + idcounter ; out . print ( "<clipPath id=\"" + clip + "\">" ) ; out . print ( "<rect x=\"" + cb . x + "\" y=\"" + cb . y + "\" width=\"" + cb . width + "\" height=\"" + cb . height + "\" />" ) ; out . println ( "</clipPath>" ) ; out . println ( "<g id=\"cssbox-obj-" + idcounter + "\" clip-path=\"url(#" + clip + ")\">" ) ; idcounter ++ ; }
public class TmdbAccount { /** * Get the list of rated movies ( and associated rating ) for an account . * @ param sessionId * @ param accountId * @ param page * @ param sortBy * @ param language * @ return * @ throws MovieDbException */ public ResultList < MovieBasic > getRatedMovies ( String sessionId , int accountId , Integer page , String sortBy , String language ) throws MovieDbException { } }
TmdbParameters parameters = new TmdbParameters ( ) ; parameters . add ( Param . SESSION_ID , sessionId ) ; parameters . add ( Param . ID , accountId ) ; parameters . add ( Param . PAGE , page ) ; parameters . add ( Param . SORT_BY , sortBy ) ; parameters . add ( Param . LANGUAGE , language ) ; URL url = new ApiUrl ( apiKey , MethodBase . ACCOUNT ) . subMethod ( MethodSub . RATED_MOVIES ) . buildUrl ( parameters ) ; WrapperGenericList < MovieBasic > wrapper = processWrapper ( getTypeReference ( MovieBasic . class ) , url , "rated movies" ) ; return wrapper . getResultsList ( ) ;
public class TeaToolsUtils { /** * Returns true if the specified fileName ends with the specified * file extension . */ public boolean compareFileExtension ( String fileName , String extension ) { } }
if ( fileName == null || extension == null ) { return false ; } fileName = fileName . toLowerCase ( ) . trim ( ) ; extension = extension . toLowerCase ( ) ; return ( fileName . endsWith ( extension ) ) ;
public class AmazonKinesisAnalyticsV2Client { /** * Deletes an < a > InputProcessingConfiguration < / a > from an input . * @ param deleteApplicationInputProcessingConfigurationRequest * @ return Result of the DeleteApplicationInputProcessingConfiguration operation returned by the service . * @ throws ResourceNotFoundException * Specified application can ' t be found . * @ throws ResourceInUseException * The application is not available for this operation . * @ throws InvalidArgumentException * The specified input parameter value is not valid . * @ throws ConcurrentModificationException * Exception thrown as a result of concurrent modifications to an application . This error can be the result * of attempting to modify an application without using the current application ID . * @ throws InvalidRequestException * The request JSON is not valid for the operation . * @ sample AmazonKinesisAnalyticsV2 . DeleteApplicationInputProcessingConfiguration * @ see < a * href = " http : / / docs . aws . amazon . com / goto / WebAPI / kinesisanalyticsv2-2018-05-23 / DeleteApplicationInputProcessingConfiguration " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteApplicationInputProcessingConfigurationResult deleteApplicationInputProcessingConfiguration ( DeleteApplicationInputProcessingConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteApplicationInputProcessingConfiguration ( request ) ;
public class DataDirEntry { /** * more convenient use of the API */ public Optional < SectionHeader > maybeGetSectionTableEntry ( SectionTable table ) { } }
checkArgument ( table != null , "table must not be null" ) ; List < SectionHeader > sections = table . getSectionHeaders ( ) ; // loop through all section headers to check if entry is within section for ( SectionHeader header : sections ) { long vSize = header . getAlignedVirtualSize ( ) ; // corkami : " a section can have a null VirtualSize : in this case , // only the SizeOfRawData is taken into consideration . " // see : https : / / code . google . com / p / corkami / wiki / PE # section _ table if ( vSize == 0 ) { vSize = header . getAlignedSizeOfRaw ( ) ; } long vAddress = header . getAlignedVirtualAddress ( ) ; logger . debug ( "check if rva is within " + vAddress + " and " + ( vAddress + vSize ) ) ; // return header if data entry va points into it if ( rvaIsWithin ( new VirtualLocation ( vAddress , vSize ) ) ) { return Optional . of ( header ) ; } } logger . warn ( "there is no entry that matches data dir entry RVA " + virtualAddress ) ; return Optional . absent ( ) ;
public class IndexResolverReplacer { /** * dnfof */ public boolean replace ( final TransportRequest request , boolean retainMode , String ... replacements ) { } }
return getOrReplaceAllIndices ( request , new IndicesProvider ( ) { @ Override public String [ ] provide ( String [ ] original , Object request , boolean supportsReplace ) { if ( supportsReplace ) { if ( retainMode && ! isAllWithNoRemote ( original ) ) { final Resolved resolved = resolveRequest ( request ) ; final List < String > retained = WildcardMatcher . getMatchAny ( resolved . getAllIndices ( ) , replacements ) ; retained . addAll ( resolved . getRemoteIndices ( ) ) ; return retained . toArray ( new String [ 0 ] ) ; } return replacements ; } else { return NOOP ; } } } , false ) ;
public class ElasticPoolActivitiesInner { /** * Returns elastic pool activities . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param elasticPoolName The name of the elastic pool for which to get the current activity . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; ElasticPoolActivityInner & gt ; object if successful . */ public List < ElasticPoolActivityInner > listByElasticPool ( String resourceGroupName , String serverName , String elasticPoolName ) { } }
return listByElasticPoolWithServiceResponseAsync ( resourceGroupName , serverName , elasticPoolName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DrawerBuilder { /** * Defines a Adapter which wraps the main Adapter used in the RecyclerView to allow extended navigation and other stuff * @ param adapterWrapper * @ return */ public DrawerBuilder withAdapterWrapper ( @ NonNull RecyclerView . Adapter adapterWrapper ) { } }
if ( mAdapter == null ) { throw new RuntimeException ( "this adapter has to be set in conjunction to a normal adapter which is used inside this wrapper adapter" ) ; } this . mAdapterWrapper = adapterWrapper ; return this ;
public class CmsSolrSpellchecker { /** * Performs the actual spell check query using Solr . * @ param request the spell check request * @ return Results of the Solr spell check of type SpellCheckResponse or null if something goes wrong . */ private SpellCheckResponse performSpellcheckQuery ( CmsSpellcheckingRequest request ) { } }
if ( ( null == request ) || ! request . isInitialized ( ) ) { return null ; } final String [ ] wordsToCheck = request . m_wordsToCheck ; final ModifiableSolrParams params = new ModifiableSolrParams ( ) ; params . set ( "spellcheck" , "true" ) ; params . set ( "spellcheck.dictionary" , request . m_dictionaryToUse ) ; params . set ( "spellcheck.extendedResults" , "true" ) ; // Build one string from array of words and use it as query . final StringBuilder builder = new StringBuilder ( ) ; for ( int i = 0 ; i < wordsToCheck . length ; i ++ ) { builder . append ( wordsToCheck [ i ] + " " ) ; } params . set ( "spellcheck.q" , builder . toString ( ) ) ; final SolrQuery query = new SolrQuery ( ) ; query . setRequestHandler ( "/spell" ) ; query . add ( params ) ; try { QueryResponse qres = m_solrClient . query ( query ) ; return qres . getSpellCheckResponse ( ) ; } catch ( Exception e ) { LOG . debug ( "Exception while performing spellcheck query..." , e ) ; } return null ;
public class DeepSparkContext { /** * Creates a JavaRDD of SparkSQL rows * @ param cellsRDD RDD of cells for transforming . * @ return Java RDD of SparkSQL rows * @ throws UnsupportedDataTypeException */ public static JavaRDD < Row > createJavaRowRDD ( JavaRDD < Cells > cellsRDD ) throws UnsupportedDataTypeException { } }
JavaRDD < Row > result = cellsRDD . map ( new Function < Cells , Row > ( ) { @ Override public Row call ( Cells cells ) throws Exception { return CellsUtils . getRowFromCells ( cells ) ; } } ) ; return result ;
public class ClientFactory { /** * Asynchronously creates a message receiver to the entity using the client settings in PeekLock mode * @ param namespaceName namespace of entity * @ param entityPath path of entity * @ param clientSettings client settings * @ return a CompletableFuture representing the pending creation of message receiver */ public static CompletableFuture < IMessageReceiver > createMessageReceiverFromEntityPathAsync ( String namespaceName , String entityPath , ClientSettings clientSettings ) { } }
return createMessageReceiverFromEntityPathAsync ( namespaceName , entityPath , clientSettings , DEFAULTRECEIVEMODE ) ;
public class SchemaTypeAdapter { /** * Constructs { @ link Schema . Type # MAP MAP } type schema from the json input . * @ param reader The { @ link JsonReader } for streaming json input tokens . * @ param knownRecords Set of record name already encountered during the reading . * @ return A { @ link Schema } of type { @ link Schema . Type # MAP MAP } . * @ throws java . io . IOException When fails to construct a valid schema from the input . */ private Schema readMap ( JsonReader reader , Set < String > knownRecords ) throws IOException { } }
return Schema . mapOf ( readInnerSchema ( reader , "keys" , knownRecords ) , readInnerSchema ( reader , "values" , knownRecords ) ) ;
public class ServerStatusTool { /** * Watch the status file and print details to standard output until the * STOPPED or STOPPED _ WITH _ ERR state is encountered . If there are any * problems reading the status file , a timeout is reached , or * STOPPED _ WITH _ ERR is encountered , this will throw an exception . */ public void watchShutdown ( int stoppingTimeout , int shutdownTimeout ) throws Exception { } }
if ( ! _statusFile . exists ( ) ) { _statusFile . append ( ServerState . STOPPING , "WARNING: Server status file did not exist; re-created" ) ; } // use this for timeout checks later long startTime = System . currentTimeMillis ( ) ; ServerStatusMessage [ ] messages = getAllMessages ( ) ; ServerStatusMessage lastMessage = messages [ messages . length - 1 ] ; boolean stopping = false ; boolean stopped = false ; while ( ! stopped ) { showShutdown ( messages ) ; // update stopping and stopped flags , and // throw a shutdown exception if STOPPED _ WITH _ ERR // is encountered for ( ServerStatusMessage element : messages ) { ServerState state = element . getState ( ) ; if ( state == ServerState . STOPPING ) { stopping = true ; } else if ( state == ServerState . STOPPED ) { stopped = true ; } else if ( state == ServerState . STOPPED_WITH_ERR ) { throw new Exception ( "Fedora shutdown finished with error (see above)" ) ; } } if ( ! stopped ) { // wait half a second try { Thread . sleep ( 500 ) ; } catch ( Throwable th ) { } // throw an exception if either timeout has been // exceeded long now = System . currentTimeMillis ( ) ; if ( ! stopping ) { if ( ( now - startTime ) / 1000 > stoppingTimeout ) { throw new Exception ( "Server shutdown did not begin within " + stoppingTimeout + " seconds" ) ; } } if ( ( now - startTime ) / 1000 > shutdownTimeout ) { throw new Exception ( "Server shutdown did not complete within " + shutdownTimeout + " seconds" ) ; } // get next batch of messages messages = _statusFile . getMessages ( lastMessage ) ; if ( messages . length > 0 ) { lastMessage = messages [ messages . length - 1 ] ; } } }
public class UTF16 { /** * Shifts offset16 by the argument number of codepoints * @ param source string * @ param offset16 UTF16 position to shift * @ param shift32 number of codepoints to shift * @ return new shifted offset16 * @ exception IndexOutOfBoundsException if the new offset16 is out of bounds . */ public static int moveCodePointOffset ( String source , int offset16 , int shift32 ) { } }
int result = offset16 ; int size = source . length ( ) ; int count ; char ch ; if ( offset16 < 0 || offset16 > size ) { throw new StringIndexOutOfBoundsException ( offset16 ) ; } if ( shift32 > 0 ) { if ( shift32 + offset16 > size ) { throw new StringIndexOutOfBoundsException ( offset16 ) ; } count = shift32 ; while ( result < size && count > 0 ) { ch = source . charAt ( result ) ; if ( isLeadSurrogate ( ch ) && ( ( result + 1 ) < size ) && isTrailSurrogate ( source . charAt ( result + 1 ) ) ) { result ++ ; } count -- ; result ++ ; } } else { if ( offset16 + shift32 < 0 ) { throw new StringIndexOutOfBoundsException ( offset16 ) ; } for ( count = - shift32 ; count > 0 ; count -- ) { result -- ; if ( result < 0 ) { break ; } ch = source . charAt ( result ) ; if ( isTrailSurrogate ( ch ) && result > 0 && isLeadSurrogate ( source . charAt ( result - 1 ) ) ) { result -- ; } } } if ( count != 0 ) { throw new StringIndexOutOfBoundsException ( shift32 ) ; } return result ;
public class SRTServletResponse { /** * Adds a header field with the specified string value . If this is * called more than once , the current value will replace the previous value . * @ param name The header field name . * @ param s The field ' s string value . */ public void setHeader ( String name , String s ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "setHeader" , " name --> " + name + " value --> " + PasswordNullifier . nullifyParams ( s ) , "[" + this + "]" ) ; } // Begin : 248739 // Add methods for DRS - Hot failover to set internal headers without checking // if the request is an include . setHeader ( name , s , true ) ;
public class FairSchedulerAdmissionControlServlet { /** * Print a view of not admitted jobs to the given output writer . * @ param out Where to dump the oiutput * @ param userFilterSet Only show jobs from these users if not null * @ param poolFilterSet Only show jobs from these pools if not null */ private void showJobsNotAdmitted ( PrintWriter out , Set < String > userFilterSet , Set < String > poolFilterSet ) { } }
out . print ( "<h2>Not Admitted Jobs</h2>\n" ) ; out . print ( "<b>Filter</b> " + "<input type=\"text\" onkeyup=\"filterTables(this.value)\" " + "id=\"NotAdmittedJobsTableFilter\">" + "<input type=\"checkbox\" id=\"SubmittedTimeFilterToggle\" " + "onChange=\"filterTables(inputRJF.value)\" checked>Submitted Time " + "<input type=\"checkbox\" id=\"JobIDFilterToggle\" " + "onChange=\"filterTables(inputRJF.value)\" checked>JobID " + "<input type=\"checkbox\" id=\"UserFilterToggle\" " + "onChange=\"filterTables(inputRJF.value)\" checked>User " + "<input type=\"checkbox\" id=\"PoolFilterToggle\" " + "onChange=\"filterTables(inputRJF.value)\" checked>Pool " + "<input type=\"checkbox\" id=\"PrioFilterToggle\" " + "onChange=\"filterTables(inputRJF.value)\" checked>Priority" + "<br><br>\n" ) ; out . print ( "<script type=\"text/javascript\">var inputRJF = " + "document.getElementById('NotAdmittedJobsTableFilter');</script>" ) ; out . print ( "<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\" " + "id=\"NotAdmittedJobsTable\" class=\"tablesorter\">\n" ) ; out . printf ( "<thead><tr>" + "<th>Submitted Time</th>" + "<th>JobID</th>" + "<th>User</th>" + "<th>Pool</th>" + "<th>Priority</th>" + "<th>Reason</th>" + "<th>Job Position</th>" + "<th>ETA to Admission (secs)</th>" ) ; out . print ( "</tr></thead><tbody>\n" ) ; Collection < NotAdmittedJobInfo > notAdmittedJobInfos = scheduler . getNotAdmittedJobs ( ) ; for ( NotAdmittedJobInfo jobInfo : notAdmittedJobInfos ) { if ( ( userFilterSet != null ) && ! userFilterSet . contains ( jobInfo . getUser ( ) ) ) { continue ; } if ( ( poolFilterSet != null ) && ! poolFilterSet . contains ( jobInfo . getPool ( ) ) ) { continue ; } out . printf ( "<tr id=\"%s\">\n" , jobInfo . getJobName ( ) ) ; out . printf ( "<td>%s</td>\n" , DATE_FORMAT . format ( jobInfo . getStartDate ( ) ) ) ; out . printf ( "<td><a href=\"jobdetails.jsp?jobid=%s\">%s</a></td>" , jobInfo . getJobName ( ) , jobInfo . getJobName ( ) ) ; out . printf ( "<td>%s</td>\n" , jobInfo . getUser ( ) ) ; out . printf ( "<td>%s</td>\n" , jobInfo . getPool ( ) ) ; out . printf ( "<td>%s</td>\n" , jobInfo . getPriority ( ) ) ; out . printf ( "<td>%s</td>\n" , jobInfo . getReason ( ) ) ; out . printf ( "<td>%d</td>\n" , jobInfo . getHardAdmissionPosition ( ) ) ; out . printf ( "<td>%d</td>\n" , jobInfo . getEstimatedHardAdmissionEntranceSecs ( ) ) ; out . print ( "</tr>\n" ) ; } out . print ( "</tbody></table>\n" ) ;
public class ServletHttpRequest { /** * Unwrap a ServletRequest . * @ see javax . servlet . ServletRequestWrapper * @ see javax . servlet . http . HttpServletRequestWrapper * @ param request * @ return The core ServletHttpRequest which must be the * underlying request object */ public static ServletHttpRequest unwrap ( ServletRequest request ) { } }
while ( ! ( request instanceof ServletHttpRequest ) ) { if ( request instanceof ServletRequestWrapper ) { ServletRequestWrapper wrapper = ( ServletRequestWrapper ) request ; request = wrapper . getRequest ( ) ; } else throw new IllegalArgumentException ( "Does not wrap ServletHttpRequest" ) ; } return ( ServletHttpRequest ) request ;
public class JavaColonNamespaceBindings { /** * Return the context * @ param name * @ return */ static String getContextName ( String name ) { } }
int index = name . lastIndexOf ( '/' ) ; return index == - 1 ? "" : name . substring ( 0 , index ) ;
public class AdminParserUtils { /** * Checks if there ' s at most one option that exists among all opts . * @ param parser OptionParser to checked * @ param opt1 Optional option to check * @ param opt2 Optional option to check * @ throws VoldemortException */ public static void checkOptional ( OptionSet options , String opt1 , String opt2 ) { } }
List < String > opts = Lists . newArrayList ( ) ; opts . add ( opt1 ) ; opts . add ( opt2 ) ; checkOptional ( options , opts ) ;
public class ShanksAgentBayesianReasoningCapability { /** * Add information to the Bayesian network to reason with it . * @ param bn * @ param nodeName * @ param status * @ throws ShanksException */ public static void addEvidence ( Network bn , String nodeName , String status ) throws ShanksException { } }
if ( bn == null || nodeName == null || status == null ) { throw new ShanksException ( "Null parameter in addEvidence method." ) ; } try { if ( ! bn . isPropagatedEvidence ( nodeName ) ) { if ( bn . isRealEvidence ( nodeName ) ) { bn . clearEvidence ( nodeName ) ; } bn . setEvidence ( nodeName , status ) ; bn . updateBeliefs ( ) ; } } catch ( Exception e ) { bn . updateBeliefs ( ) ; HashMap < String , Float > belief = ShanksAgentBayesianReasoningCapability . getNodeStatesHypotheses ( bn , nodeName ) ; String msg = e . getMessage ( ) + " -> values for node: " + nodeName + " -> " ; boolean zeroBel = false ; for ( Entry < String , Float > entry : belief . entrySet ( ) ) { if ( status . equals ( entry . getKey ( ) ) && entry . getValue ( ) == 0.0f ) { zeroBel = true ; // Impossible value calculated by inference ( this is because the BN is a big shit . . . // But it is not a real error . . . break ; } msg = msg + entry . getKey ( ) + "-" + entry . getValue ( ) + " " ; } if ( ! zeroBel ) { throw new ShanksException ( msg ) ; } }
public class PortletDefinitionImporterExporter { /** * Check that a permission type from the XML file matches with a real permission . * @ param system The name of the permission manager * @ param activity The name of the permission to search for . * @ return the permission type string to use * @ throws IllegalArgumentException if an unsupported permission type is specified */ private ExternalPermissionDefinition toExternalPermissionDefinition ( String system , String activity ) { } }
ExternalPermissionDefinition def = ExternalPermissionDefinition . find ( system , activity ) ; if ( def != null ) { return def ; } String delim = "" ; StringBuilder buffer = new StringBuilder ( ) ; for ( ExternalPermissionDefinition perm : ExternalPermissionDefinition . values ( ) ) { buffer . append ( delim ) ; buffer . append ( perm . toString ( ) ) ; delim = ", " ; } throw new IllegalArgumentException ( "Permission type " + system + "." + activity + " is not supported. " + "The only supported permissions at this time are: " + buffer . toString ( ) ) ;
public class PgBinaryWriter { /** * Writes primitive float to the output stream * @ param value value to write */ public void writeFloat ( float value ) { } }
try { buffer . writeInt ( 4 ) ; buffer . writeFloat ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; }
public class Reflect { /** * Compile a class at runtime and reflect on it . * For example : * < code > < pre > * Supplier & lt ; String > supplier = Reflect . compile ( * " org . joor . Test " , * " package org . joor ; \ n " + * " class Test implements java . util . function . Supplier & lt ; String > { \ n " + * " public String get ( ) { \ n " + * " return \ " Hello World ! \ " ; \ n " + * " } \ n " ) . create ( ) . get ( ) ; * < / pre > < / code > * @ param name The qualified class name * @ param content The source code for the class * @ param options compiler options * @ return A wrapped { @ link Class } * @ throws ReflectException if anything went wrong compiling the class . */ public static Reflect compile ( String name , String content , CompileOptions options ) throws ReflectException { } }
return onClass ( Compile . compile ( name , content , options ) ) ;
public class KAFDocument { /** * Creates a factualitylayer object and add it to the document * @ param term the Term of the coreference . * @ return a new factuality . */ public Factvalue newFactvalue ( WF wf , String prediction ) { } }
Factvalue factuality = new Factvalue ( wf , prediction ) ; annotationContainer . add ( factuality , Layer . FACTUALITY_LAYER , AnnotationType . FACTVALUE ) ; return factuality ;
public class Maybe { /** * { @ inheritDoc } */ @ Override public final < B > Maybe < B > discardL ( Applicative < B , Maybe < ? > > appB ) { } }
return Monad . super . discardL ( appB ) . coerce ( ) ;
public class AbstractJoynrServletModule { /** * Sets up filters that are annotated with the { @ link WebFilter } annotation . * Every class in the classpath is searched for the annotation . */ @ SuppressWarnings ( "unchecked" ) private void bindAnnotatedFilters ( ) { } }
String appsPackages = null ; if ( System . getProperties ( ) . containsKey ( IO_JOYNR_APPS_PACKAGES ) ) { logger . info ( "Using property {} from system properties" , IO_JOYNR_APPS_PACKAGES ) ; appsPackages = System . getProperty ( IO_JOYNR_APPS_PACKAGES ) ; } else { Properties servletProperties = PropertyLoader . loadProperties ( "servlet.properties" ) ; if ( servletProperties . containsKey ( IO_JOYNR_APPS_PACKAGES ) ) { appsPackages = servletProperties . getProperty ( IO_JOYNR_APPS_PACKAGES ) ; } } if ( appsPackages != null ) { String [ ] packageNames = appsPackages . split ( ";" ) ; logger . info ( "Searching packages for @WebFilter annotation: {}" , Arrays . toString ( packageNames ) ) ; PackageNamesScanner scanner = new PackageNamesScanner ( packageNames ) ; AnnotationScannerListener sl = new AnnotationScannerListener ( WebFilter . class ) ; scanner . scan ( sl ) ; for ( Class < ? > webFilterAnnotatedClass : sl . getAnnotatedClasses ( ) ) { if ( Filter . class . isAssignableFrom ( webFilterAnnotatedClass ) ) { bind ( webFilterAnnotatedClass ) . in ( Singleton . class ) ; filter ( "/*" ) . through ( ( Class < ? extends Filter > ) webFilterAnnotatedClass ) ; logger . info ( "Adding filter {} for '/*'" , webFilterAnnotatedClass . getName ( ) ) ; } } }
public class ClientSecurityContextStore { /** * { @ inheritDoc } */ @ Override public Principal getCurrentPrincipal ( ) { } }
Principal principal = null ; // Get hold of the current subject Subject subject = null ; try { subject = WSSubject . getCallerSubject ( ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Current subject: " , subject ) ; } } catch ( WSSecurityException e ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Failed to get current subject" , e ) ; } } // If we have a subject , extract the first principal if ( subject != null ) { Set < Principal > principals = subject . getPrincipals ( ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Number of principals: " , principals . size ( ) ) ; } if ( ! principals . isEmpty ( ) ) { principal = principals . iterator ( ) . next ( ) ; } } return principal ;
public class AbstractSerializerCollection { /** * Register the given serializer if it has a name . * @ param from * @ param serializer */ protected void registerIfNamed ( Class < ? > from , Serializer < ? > serializer ) { } }
if ( from . isAnnotationPresent ( Named . class ) ) { Named named = from . getAnnotation ( Named . class ) ; QualifiedName key = new QualifiedName ( named . namespace ( ) , named . name ( ) ) ; nameToSerializer . put ( key , serializer ) ; serializerToName . put ( serializer , key ) ; }
public class FibonacciHeap { /** * Utility function which , given two pointers into disjoint circularly - * linked lists , merges the two lists together into one circularly - linked * list in O ( 1 ) time . Because the lists may be empty , the return value * is the only pointer that ' s guaranteed to be to an element of the * resulting list . * This function assumes that one and two are the minimum elements of the * lists they are in , and returns a pointer to whichever is smaller . If * this condition does not hold , the return value is some arbitrary pointer * into the doubly - linked list . * @ param one A pointer into one of the two linked lists . * @ param two A pointer into the other of the two linked lists . * @ return A pointer to the smallest element of the resulting list . */ private static < T > Entry < T > mergeLists ( Entry < T > one , Entry < T > two ) { } }
/* There are four cases depending on whether the lists are null or not . * We consider each separately . */ if ( one == null && two == null ) { // Both null , resulting list is null . return null ; } else if ( one != null && two == null ) { // Two is null , result is one . return one ; } else if ( one == null && two != null ) { // One is null , result is two . return two ; } else { // Both non - null ; actually do the splice . /* This is actually not as easy as it seems . The idea is that we ' ll * have two lists that look like this : * | | - - N - > | one | - - N - > | | * | | < - P - - | | < - P - - | | * | | - - N - > | two | - - N - > | | * | | < - P - - | | < - P - - | | * And we want to relink everything to get * | | - - N - > | one | | | | * N \ N | * | | - - N - > | two | | | | | * | | < - P - - | | | | | P */ Entry < T > oneNext = one . mNext ; // Cache this since we ' re about to overwrite it . one . mNext = two . mNext ; one . mNext . mPrev = one ; two . mNext = oneNext ; two . mNext . mPrev = two ; /* Return a pointer to whichever ' s smaller . */ return one . mPriority < two . mPriority ? one : two ; }
public class AvroFactory { /** * Creates Avro Writer and Reader for a specific type . * < p > Given an input type , and possible the current schema , and a previously known schema ( also known as writer * schema ) create will deduce the best way to initalize a reader and writer according to the following rules : * < ul > * < li > If type is an Avro generated class ( an { @ link SpecificRecord } then the reader would use the * previousSchema for reading ( if present ) otherwise it would use the schema attached to the auto generated * class . * < li > If the type is a GenericRecord then the reader and the writer would be created with the supplied * ( mandatory ) schema . * < li > Otherwise , we use Avro ' s reflection based reader and writer that would deduce the schema via reflection . * If the previous schema is also present ( when restoring a serializer for example ) then the reader would be * created with both schemas . * < / ul > */ static < T > AvroFactory < T > create ( Class < T > type , @ Nullable Schema currentSchema , @ Nullable Schema previousSchema ) { } }
final ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( SpecificRecord . class . isAssignableFrom ( type ) ) { return fromSpecific ( type , cl , Optional . ofNullable ( previousSchema ) ) ; } if ( GenericRecord . class . isAssignableFrom ( type ) ) { return fromGeneric ( cl , currentSchema ) ; } return fromReflective ( type , cl , Optional . ofNullable ( previousSchema ) ) ;
public class MarkdownHelper { /** * Extracts the tag from an XML element . * @ param in * Input String . * @ return XML tag name */ @ Nonnull public static String getXMLTag ( final String in ) { } }
final StringBuilder aSB = new StringBuilder ( ) ; int pos = 1 ; if ( in . charAt ( 1 ) == '/' ) pos ++ ; while ( Character . isLetterOrDigit ( in . charAt ( pos ) ) ) aSB . append ( in . charAt ( pos ++ ) ) ; return aSB . toString ( ) . toLowerCase ( Locale . US ) ;
public class JdbcQueue { /** * { @ inheritDoc } */ @ Override public boolean queue ( IQueueMessage < ID , DATA > msg ) { } }
if ( msg == null ) { return false ; } try { try ( Connection conn = jdbcHelper . getConnection ( ) ) { return _queueWithRetries ( conn , msg . clone ( ) , 0 , this . maxRetries ) ; } } catch ( Exception e ) { final String logMsg = "(queue) Exception [" + e . getClass ( ) . getName ( ) + "]: " + e . getMessage ( ) ; LOGGER . error ( logMsg , e ) ; throw e instanceof QueueException ? ( QueueException ) e : new QueueException ( e ) ; }
public class EntityListenersIntrospector { /** * Validates and registers the given callback method . * @ param method * the callback method * @ param callbackType * the callback type */ private void validateExternalCallback ( Method method , CallbackType callbackType ) { } }
Class < ? > [ ] parameters = method . getParameterTypes ( ) ; if ( ! parameters [ 0 ] . isAssignableFrom ( entityClass ) ) { String message = String . format ( "Method %s in class %s is not valid for entity %s" , method . getName ( ) , method . getDeclaringClass ( ) . getName ( ) , entityClass . getName ( ) ) ; throw new EntityManagerException ( message ) ; } CallbackMetadata callbackMetadata = new CallbackMetadata ( EntityListenerType . EXTERNAL , callbackType , method ) ; metadata . put ( callbackType , callbackMetadata ) ;
public class CreateAppRequest { /** * Custom rewrite / redirect rules for an Amplify App . * @ param customRules * Custom rewrite / redirect rules for an Amplify App . */ public void setCustomRules ( java . util . Collection < CustomRule > customRules ) { } }
if ( customRules == null ) { this . customRules = null ; return ; } this . customRules = new java . util . ArrayList < CustomRule > ( customRules ) ;
public class Logger { /** * Print a log in a new line . * @ param logLevel the log level of the printing log * @ param format the format of the printing log , null if just need to concat arguments * @ param args the arguments of the printing log */ private void println ( int logLevel , String format , Object ... args ) { } }
if ( logLevel < logConfiguration . logLevel ) { return ; } printlnInternal ( logLevel , formatArgs ( format , args ) ) ;
public class ApacheHTTPSender { /** * { @ inheritDoc } */ public HTTPResponse send ( final CMSessionParams params , final AbstractBody body ) { } }
HttpClient mClient ; BOSHClientConfig mCfg ; lock . lock ( ) ; try { if ( httpClient == null ) { httpClient = initHttpClient ( cfg ) ; } mClient = httpClient ; mCfg = cfg ; } finally { lock . unlock ( ) ; } return new ApacheHTTPResponse ( mClient , mCfg , params , body ) ;
public class HttpUtils { /** * Execute get http response . * @ param url the url * @ param basicAuthUsername the basic auth username * @ param basicAuthPassword the basic auth password * @ param parameters the parameters * @ param headers the headers * @ return the http response */ public static HttpResponse executeGet ( final String url , final String basicAuthUsername , final String basicAuthPassword , final Map < String , Object > parameters , final Map < String , Object > headers ) { } }
try { return execute ( url , HttpMethod . GET . name ( ) , basicAuthUsername , basicAuthPassword , parameters , headers ) ; } catch ( final Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } return null ;
public class ClassTypeInformation { /** * Little helper to allow us to create a generified map , actually just to satisfy the compiler . * @ param type must not be { @ literal null } . * @ return */ private static Map < TypeVariable < ? > , Type > getTypeVariableMap ( Class < ? > type ) { } }
return getTypeVariableMap ( type , new HashSet < Type > ( ) ) ;
public class MathExpressions { /** * Create a { @ code tan ( num ) } expression * < p > Returns the tangent of an angle of num radians . < / p > * @ param num numeric expression * @ return tan ( num ) */ public static < A extends Number & Comparable < ? > > NumberExpression < Double > tan ( Expression < A > num ) { } }
return Expressions . numberOperation ( Double . class , Ops . MathOps . TAN , num ) ;
public class CmsJspTagScaleImage { /** * Handles the Start tag , checks some parameters , uses the CmsImageScaler to create a scaled * version of the image ( and hi - DPI variants if necessary ) , stores all information in a * image bean and stores it as a request attribute ( the name for this attribute is given * with the tag attribute " var " ) . * @ return EVAL _ BODY _ INCLUDE or SKIP _ BODY in case of an unexpected Exception ( please consult * the OpenCms log file if that happens ) */ @ Override public int doStartTag ( ) { } }
ServletRequest req = pageContext . getRequest ( ) ; // this will always be true if the page is called through OpenCms if ( CmsFlexController . isCmsRequest ( req ) ) { try { CmsJspImageBean scaledImage = null ; try { CmsFlexController controller = CmsFlexController . getController ( req ) ; CmsObject cms = controller . getCmsObject ( ) ; String src = CmsLinkManager . getAbsoluteUri ( m_src , controller . getCurrentRequest ( ) . getElementUri ( ) ) ; scaledImage = imageTagAction ( cms , src , m_scaler , m_hiDpiVariantList ) ; } catch ( CmsException e ) { // any issue accessing the VFS - just return SKIP _ BODY // otherwise template layout will get mixed up with nasty exception messages if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_IMAGE_TAG_VFS_ACCESS_1 , m_src ) , e ) ; } } pageContext . getRequest ( ) . setAttribute ( m_var , scaledImage ) ; } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_PROCESS_TAG_1 , "scaleImage" ) , ex ) ; } return SKIP_BODY ; } } return EVAL_BODY_INCLUDE ;
public class MethodWriterImpl { /** * { @ inheritDoc } */ public Content getMethodDocTreeHeader ( MethodDoc method , Content methodDetailsTree ) { } }
String erasureAnchor ; if ( ( erasureAnchor = getErasureAnchor ( method ) ) != null ) { methodDetailsTree . addContent ( writer . getMarkerAnchor ( ( erasureAnchor ) ) ) ; } methodDetailsTree . addContent ( writer . getMarkerAnchor ( writer . getAnchor ( method ) ) ) ; Content methodDocTree = writer . getMemberTreeHeader ( ) ; Content heading = new HtmlTree ( HtmlConstants . MEMBER_HEADING ) ; heading . addContent ( method . name ( ) ) ; methodDocTree . addContent ( heading ) ; return methodDocTree ;
public class FlashImpl { /** * Returns the value of a previous call to setKeepMessages ( ) from this * request . If there was no call yet , false is returned . */ @ Override public boolean isKeepMessages ( ) { } }
FacesContext facesContext = FacesContext . getCurrentInstance ( ) ; ExternalContext externalContext = facesContext . getExternalContext ( ) ; Map < String , Object > requestMap = externalContext . getRequestMap ( ) ; Boolean keepMessages = ( Boolean ) requestMap . get ( FLASH_KEEP_MESSAGES ) ; return ( keepMessages == null ? Boolean . FALSE : keepMessages ) ;
public class FieldDefinition { /** * Get the { @ link FieldDefinition } of this field ' s inverse link definition . This field * must be a link field . Null is returned if either the inverse table or inverse link * have not been defined in this application . * @ return { @ link FieldDefinition } of this link field ' s inverse link field or null if * the inverse table or link have not been defined in this application . */ public FieldDefinition getInverseLinkDef ( ) { } }
assert isLinkType ( ) ; TableDefinition inverseTableDef = getInverseTableDef ( ) ; if ( inverseTableDef == null ) { return null ; } return inverseTableDef . getFieldDef ( m_linkInverse ) ;
public class JTBJavaCCMojo { /** * Creates a new facade to invoke JTB . Most options for the invocation are * derived from the current values of the corresponding mojo parameters . The * caller is responsible to set the input file , output directories and * packages on the returned facade . * @ return The facade for the tool invocation , never < code > null < / code > . */ private JTB newJTB ( ) { } }
final JTB jtb = new JTB ( ) ; jtb . setLog ( getLog ( ) ) ; jtb . setDescriptiveFieldNames ( this . descriptiveFieldNames ) ; jtb . setJavadocFriendlyComments ( this . javadocFriendlyComments ) ; jtb . setNodeParentClass ( this . nodeParentClass ) ; jtb . setParentPointers ( this . parentPointers ) ; jtb . setPrinter ( this . printer ) ; jtb . setScheme ( this . scheme ) ; jtb . setSpecialTokens ( this . specialTokens ) ; jtb . setSupressErrorChecking ( this . supressErrorChecking ) ; return jtb ;
public class WalkerFactory { /** * Create a StepPattern that is contained within a LocationPath . * @ param compiler The compiler that holds the syntax tree / op map to * construct from . * @ param stepOpCodePos The current op code position within the opmap . * @ param mpi The MatchPatternIterator to which the steps will be attached . * @ param analysis 32 bits of analysis , from which the type of AxesWalker * may be influenced . * @ param tail The step that is the first step analyzed , but the last * step in the relative match linked list , i . e . the tail . * May be null . * @ param head The step that is the current head of the relative * match step linked list . * May be null . * @ return the head of the list . * @ throws javax . xml . transform . TransformerException */ private static StepPattern createDefaultStepPattern ( Compiler compiler , int opPos , MatchPatternIterator mpi , int analysis , StepPattern tail , StepPattern head ) throws javax . xml . transform . TransformerException { } }
int stepType = compiler . getOp ( opPos ) ; boolean simpleInit = false ; boolean prevIsOneStepDown = true ; int whatToShow = compiler . getWhatToShow ( opPos ) ; StepPattern ai = null ; int axis , predicateAxis ; switch ( stepType ) { case OpCodes . OP_VARIABLE : case OpCodes . OP_EXTFUNCTION : case OpCodes . OP_FUNCTION : case OpCodes . OP_GROUP : prevIsOneStepDown = false ; Expression expr ; switch ( stepType ) { case OpCodes . OP_VARIABLE : case OpCodes . OP_EXTFUNCTION : case OpCodes . OP_FUNCTION : case OpCodes . OP_GROUP : expr = compiler . compile ( opPos ) ; break ; default : expr = compiler . compile ( opPos + 2 ) ; } axis = Axis . FILTEREDLIST ; predicateAxis = Axis . FILTEREDLIST ; ai = new FunctionPattern ( expr , axis , predicateAxis ) ; simpleInit = true ; break ; case OpCodes . FROM_ROOT : whatToShow = DTMFilter . SHOW_DOCUMENT | DTMFilter . SHOW_DOCUMENT_FRAGMENT ; axis = Axis . ROOT ; predicateAxis = Axis . ROOT ; ai = new StepPattern ( DTMFilter . SHOW_DOCUMENT | DTMFilter . SHOW_DOCUMENT_FRAGMENT , axis , predicateAxis ) ; break ; case OpCodes . FROM_ATTRIBUTES : whatToShow = DTMFilter . SHOW_ATTRIBUTE ; axis = Axis . PARENT ; predicateAxis = Axis . ATTRIBUTE ; // ai = new StepPattern ( whatToShow , Axis . SELF , Axis . SELF ) ; break ; case OpCodes . FROM_NAMESPACE : whatToShow = DTMFilter . SHOW_NAMESPACE ; axis = Axis . PARENT ; predicateAxis = Axis . NAMESPACE ; // ai = new StepPattern ( whatToShow , axis , predicateAxis ) ; break ; case OpCodes . FROM_ANCESTORS : axis = Axis . DESCENDANT ; predicateAxis = Axis . ANCESTOR ; break ; case OpCodes . FROM_CHILDREN : axis = Axis . PARENT ; predicateAxis = Axis . CHILD ; break ; case OpCodes . FROM_ANCESTORS_OR_SELF : axis = Axis . DESCENDANTORSELF ; predicateAxis = Axis . ANCESTORORSELF ; break ; case OpCodes . FROM_SELF : axis = Axis . SELF ; predicateAxis = Axis . SELF ; break ; case OpCodes . FROM_PARENT : axis = Axis . CHILD ; predicateAxis = Axis . PARENT ; break ; case OpCodes . FROM_PRECEDING_SIBLINGS : axis = Axis . FOLLOWINGSIBLING ; predicateAxis = Axis . PRECEDINGSIBLING ; break ; case OpCodes . FROM_PRECEDING : axis = Axis . FOLLOWING ; predicateAxis = Axis . PRECEDING ; break ; case OpCodes . FROM_FOLLOWING_SIBLINGS : axis = Axis . PRECEDINGSIBLING ; predicateAxis = Axis . FOLLOWINGSIBLING ; break ; case OpCodes . FROM_FOLLOWING : axis = Axis . PRECEDING ; predicateAxis = Axis . FOLLOWING ; break ; case OpCodes . FROM_DESCENDANTS_OR_SELF : axis = Axis . ANCESTORORSELF ; predicateAxis = Axis . DESCENDANTORSELF ; break ; case OpCodes . FROM_DESCENDANTS : axis = Axis . ANCESTOR ; predicateAxis = Axis . DESCENDANT ; break ; default : throw new RuntimeException ( XSLMessages . createXPATHMessage ( XPATHErrorResources . ER_NULL_ERROR_HANDLER , new Object [ ] { Integer . toString ( stepType ) } ) ) ; // " Programmer ' s assertion : unknown opcode : " // + stepType ) ; } if ( null == ai ) { whatToShow = compiler . getWhatToShow ( opPos ) ; // % REVIEW % ai = new StepPattern ( whatToShow , compiler . getStepNS ( opPos ) , compiler . getStepLocalName ( opPos ) , axis , predicateAxis ) ; } if ( false || DEBUG_PATTERN_CREATION ) { System . out . print ( "new step: " + ai ) ; System . out . print ( ", axis: " + Axis . getNames ( ai . getAxis ( ) ) ) ; System . out . print ( ", predAxis: " + Axis . getNames ( ai . getAxis ( ) ) ) ; System . out . print ( ", what: " ) ; System . out . print ( " " ) ; ai . debugWhatToShow ( ai . getWhatToShow ( ) ) ; } int argLen = compiler . getFirstPredicateOpPos ( opPos ) ; ai . setPredicates ( compiler . getCompiledPredicates ( argLen ) ) ; return ai ;
public class Decoder { /** * Return the old code id to construct a old decoder */ private String getOldCodeId ( FileStatus srcStat ) throws IOException { } }
if ( codec . id . equals ( "xor" ) || codec . id . equals ( "rs" ) ) { return codec . id ; } else { // Search for xor / rs parity files if ( ParityFilePair . getParityFile ( Codec . getCodec ( "xor" ) , srcStat , this . conf ) != null ) return "xor" ; if ( ParityFilePair . getParityFile ( Codec . getCodec ( "rs" ) , srcStat , this . conf ) != null ) return "rs" ; } return null ;
public class CmsListItemWidget { /** * Adds a widget to the front of the button panel . < p > * @ param w the widget to add */ public void addButtonToFront ( Widget w ) { } }
m_buttonPanel . insert ( w , 0 ) ; if ( CmsCoreProvider . get ( ) . isIe7 ( ) ) { m_buttonPanel . getElement ( ) . getStyle ( ) . setWidth ( m_buttonPanel . getWidgetCount ( ) * 22 , Unit . PX ) ; }
public class ModelsImpl { /** * Gets information about the prebuilt entity models . * @ param appId The application ID . * @ param versionId The version ID . * @ param listPrebuiltsOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < PrebuiltEntityExtractor > > listPrebuiltsAsync ( UUID appId , String versionId , ListPrebuiltsOptionalParameter listPrebuiltsOptionalParameter , final ServiceCallback < List < PrebuiltEntityExtractor > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listPrebuiltsWithServiceResponseAsync ( appId , versionId , listPrebuiltsOptionalParameter ) , serviceCallback ) ;
public class PngtasticFilterHandler { /** * { @ inheritDoc } */ @ Override public void deFilter ( byte [ ] line , byte [ ] previousLine , int sampleBitCount ) throws PngException { } }
PngFilterType filterType = PngFilterType . forValue ( line [ 0 ] ) ; line [ 0 ] = 0 ; PngFilterType previousFilterType = PngFilterType . forValue ( previousLine [ 0 ] ) ; previousLine [ 0 ] = 0 ; switch ( filterType ) { case SUB : { int previous = - ( Math . max ( 1 , sampleBitCount / 8 ) - 1 ) ; for ( int x = 1 , a = previous ; x < line . length ; x ++ , a ++ ) { line [ x ] = ( byte ) ( line [ x ] + ( ( a < 0 ) ? 0 : line [ a ] ) ) ; } break ; } case UP : { for ( int x = 1 ; x < line . length ; x ++ ) { line [ x ] = ( byte ) ( line [ x ] + previousLine [ x ] ) ; } break ; } case AVERAGE : { int previous = - ( Math . max ( 1 , sampleBitCount / 8 ) - 1 ) ; for ( int x = 1 , a = previous ; x < line . length ; x ++ , a ++ ) { line [ x ] = ( byte ) ( line [ x ] + ( ( 0xFF & ( ( a < 0 ) ? 0 : line [ a ] ) ) + ( 0xFF & previousLine [ x ] ) ) / 2 ) ; } break ; } case PAETH : { int previous = - ( Math . max ( 1 , sampleBitCount / 8 ) - 1 ) ; for ( int x = 1 , xp = previous ; x < line . length ; x ++ , xp ++ ) { int result = this . paethPredictor ( line , previousLine , x , xp ) ; line [ x ] = ( byte ) ( line [ x ] + result ) ; } break ; } } line [ 0 ] = filterType . getValue ( ) ; previousLine [ 0 ] = previousFilterType . getValue ( ) ;
public class LocationApi { /** * Get current ship ( asynchronously ) Get the current ship type , name and id * - - - This route is cached for up to 5 seconds SSO Scope : * esi - location . read _ ship _ type . v1 * @ param characterId * An EVE character ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getCharactersCharacterIdShipAsync ( Integer characterId , String datasource , String ifNoneMatch , String token , final ApiCallback < CharacterShipResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getCharactersCharacterIdShipValidateBeforeCall ( characterId , datasource , ifNoneMatch , token , callback ) ; Type localVarReturnType = new TypeToken < CharacterShipResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class PortTcp { /** * Starts the port listening . */ public void bind ( ServerSocketBar ss ) throws IOException { } }
Objects . requireNonNull ( ss ) ; _isBind . set ( true ) ; if ( _protocol == null ) throw new IllegalStateException ( L . l ( "'{0}' must have a configured protocol before starting." , this ) ) ; if ( _throttle == null ) _throttle = new ThrottleSocket ( ) ; _serverSocket = ss ; String scheme = _protocol . name ( ) ; if ( _address != null ) log . info ( scheme + " listening to " + _address + ":" + _port ) ; else log . info ( scheme + " listening to *:" + _port ) ; if ( _sslFactory != null ) { try { _serverSocket = _sslFactory . bind ( _serverSocket ) ; } catch ( RuntimeException e ) { throw e ; } catch ( IOException e ) { throw e ; } catch ( Exception e ) { throw new IOException ( e ) ; } }
public class ObjectUtil { /** * 已知属性名 , 得出get方法 , 如属性名是name , get方法是getName * 遵循javabean规范 * @ param attrName * @ return * @ deprecated 并不遵循java规范 */ public static String getGetMethod ( String attrName ) { } }
StringBuilder mbuffer = new StringBuilder ( "get" ) ; mbuffer . append ( attrName . substring ( 0 , 1 ) . toUpperCase ( ) ) . append ( attrName . substring ( 1 ) ) ; return mbuffer . toString ( ) ;
public class MenuScreen { /** * Set the current menu . * @ param strMenu If null , get the current property from the current parameters . */ public void setMenuProperty ( String strMenu ) { } }
if ( ( strMenu == null ) || ( strMenu . length ( ) == 0 ) || ( strMenu == DEFAULT ) ) { if ( m_strMenuObjectID != null ) return ; // It ' s already set to the default if ( strMenu != DEFAULT ) strMenu = this . getProperty ( DBParams . MENU ) ; if ( ( strMenu == null ) || ( strMenu . length ( ) == 0 ) || ( strMenu == DEFAULT ) ) strMenu = this . getProperty ( DBParams . HOME ) ; } if ( ( strMenu == null ) || ( strMenu . length ( ) == 0 ) || ( HtmlConstants . MAIN_MENU_KEY . equalsIgnoreCase ( strMenu ) ) || ( strMenu == DEFAULT ) ) { strMenu = this . getProperty ( DBParams . HOME ) ; if ( ( strMenu == null ) || ( strMenu . length ( ) == 0 ) || ( strMenu == DEFAULT ) ) strMenu = this . getURLMenu ( ) ; if ( ( strMenu == null ) || ( strMenu . length ( ) == 0 ) || ( strMenu == DEFAULT ) ) strMenu = HtmlConstants . MAIN_MENU_KEY ; } Record recMenus = this . getMainRecord ( ) ; m_strMenu = strMenu ; m_strMenuObjectID = recMenus . getField ( MenusModel . ID ) . toString ( ) ; m_strMenuTitle = recMenus . getField ( MenusModel . NAME ) . toString ( ) ; int oldKeyArea = recMenus . getDefaultOrder ( ) ; try { boolean bIsNumeric = Utility . isNumeric ( strMenu , true ) ; if ( bIsNumeric ) { strMenu = Converter . stripNonNumber ( strMenu ) ; recMenus . setKeyArea ( MenusModel . ID_KEY ) ; recMenus . getField ( MenusModel . ID ) . setString ( strMenu ) ; bIsNumeric = recMenus . seek ( "=" ) ; } if ( ! bIsNumeric ) { recMenus . setKeyArea ( MenusModel . CODE_KEY ) ; recMenus . getField ( MenusModel . CODE ) . setString ( strMenu ) ; if ( recMenus . seek ( "=" ) ) { if ( ! recMenus . getField ( MenusModel . PROGRAM ) . isNull ( ) ) if ( ! recMenus . getField ( MenusModel . PROGRAM ) . equals ( recMenus . getField ( MenusModel . CODE ) ) ) if ( "menu" . equalsIgnoreCase ( recMenus . getField ( MenusModel . TYPE ) . toString ( ) ) ) { // Use a different menu Map < String , Object > map = ( ( PropertiesField ) recMenus . getField ( MenusModel . PARAMS ) ) . getProperties ( ) ; int iOldKeyArea = recMenus . getDefaultOrder ( ) ; recMenus . getField ( MenusModel . CODE ) . moveFieldToThis ( recMenus . getField ( MenusModel . PROGRAM ) ) ; recMenus . setKeyArea ( MenusModel . CODE_KEY ) ; int oldOpenMode = recMenus . getOpenMode ( ) ; recMenus . setOpenMode ( oldOpenMode | DBConstants . OPEN_READ_ONLY ) ; if ( recMenus . seek ( null ) ) { strMenu = recMenus . getField ( MenusModel . ID ) . toString ( ) ; if ( map != null ) { Iterator < ? extends Map . Entry < ? , ? > > i = map . entrySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { Map . Entry < ? , ? > e = i . next ( ) ; ( ( PropertiesField ) recMenus . getField ( MenusModel . PARAMS ) ) . setProperty ( ( String ) e . getKey ( ) , ( String ) e . getValue ( ) ) ; } recMenus . getField ( MenusModel . PARAMS ) . setModified ( false ) ; // Make sure this doesn ' t get written recMenus . setOpenMode ( oldOpenMode ) ; } } recMenus . setKeyArea ( iOldKeyArea ) ; } } else { // Not found , display default screen if ( ( strMenu != DEFAULT ) && ( ! HtmlConstants . MAIN_MENU_KEY . equalsIgnoreCase ( strMenu ) ) && ( ( strMenu != null ) && ( ! strMenu . equalsIgnoreCase ( this . getProperty ( DBParams . HOME ) ) ) ) ) { // Try the default menu once m_strMenuObjectID = null ; this . setMenuProperty ( DEFAULT ) ; } else { // Should never happen , the default menu doesn ' t exist recMenus . addNew ( ) ; ( ( CounterField ) recMenus . getField ( MenusModel . ID ) ) . setValue ( - 1 ) ; // Don ' t read any detail } } } m_strMenuObjectID = recMenus . getField ( MenusModel . ID ) . toString ( ) ; m_strMenu = recMenus . getField ( MenusModel . CODE ) . toString ( ) ; m_strMenuTitle = recMenus . getField ( MenusModel . NAME ) . toString ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; // Never } recMenus . setKeyArea ( oldKeyArea ) ;
public class TaskExecutor { /** * Submit a { @ link Task } to run . * @ param task { @ link Task } to be submitted * @ return a { @ link java . util . concurrent . Future } for the submitted { @ link Task } */ public Future < ? > submit ( Task task ) { } }
LOG . info ( String . format ( "Submitting task %s" , task . getTaskId ( ) ) ) ; return this . taskExecutor . submit ( new TrackingTask ( task ) ) ;
public class FileUtils { /** * Unzip a zip file in a directory that has the same name as the zip file . * For example if the zip file is { @ code my - plugin . zip } then the resulted directory * is { @ code my - plugin } . * @ param filePath the file to evaluate * @ return Path of unzipped folder or original path if this was not a zip file * @ throws IOException on error */ public static Path expandIfZip ( Path filePath ) throws IOException { } }
if ( ! isZipFile ( filePath ) ) { return filePath ; } FileTime pluginZipDate = Files . getLastModifiedTime ( filePath ) ; String fileName = filePath . getFileName ( ) . toString ( ) ; Path pluginDirectory = filePath . resolveSibling ( fileName . substring ( 0 , fileName . lastIndexOf ( "." ) ) ) ; if ( ! Files . exists ( pluginDirectory ) || pluginZipDate . compareTo ( Files . getLastModifiedTime ( pluginDirectory ) ) > 0 ) { // do not overwrite an old version , remove it if ( Files . exists ( pluginDirectory ) ) { FileUtils . delete ( pluginDirectory ) ; } // create root for plugin Files . createDirectories ( pluginDirectory ) ; // expand ' . zip ' file Unzip unzip = new Unzip ( ) ; unzip . setSource ( filePath . toFile ( ) ) ; unzip . setDestination ( pluginDirectory . toFile ( ) ) ; unzip . extract ( ) ; log . info ( "Expanded plugin zip '{}' in '{}'" , filePath . getFileName ( ) , pluginDirectory . getFileName ( ) ) ; } return pluginDirectory ;
public class TelegramBot { /** * Use this method to kick a user from a group or a supergroup . In the case of supergroups , the user will not be * able to return to the group on their own using invite links , etc . , unless unbanned first . The bot must be * an administrator in the group for this to work * @ param chatId The ID of the chat that you want to kick the user from * @ param userId The ID of the user that you want to kick from the chat * @ return True if the user was kicked successfully , otherwise False */ public boolean kickChatMember ( String chatId , int userId ) { } }
HttpResponse < String > response ; JSONObject jsonResponse ; try { MultipartBody request = Unirest . post ( getBotAPIUrl ( ) + "kickChatMember" ) . field ( "chat_id" , chatId , "application/json; charset=utf8;" ) . field ( "user_id" , userId ) ; response = request . asString ( ) ; jsonResponse = Utils . processResponse ( response ) ; if ( jsonResponse != null ) { if ( jsonResponse . getBoolean ( "result" ) ) return true ; } } catch ( UnirestException e ) { e . printStackTrace ( ) ; } return false ;
public class PortType { /** * ONLY used by WsClientBinding */ public static PortType createPortType ( StringType namespace , StringType name , StringType address ) { } }
PortType endpointType = new PortType ( ) ; endpointType . namespace = namespace ; endpointType . name = name ; endpointType . address = address ; return endpointType ;
public class WebFacesConfigDescriptorImpl { /** * If not already created , a new < code > referenced - bean < / code > element will be created and returned . * Otherwise , the first existing < code > referenced - bean < / code > element will be returned . * @ return the instance defined for the element < code > referenced - bean < / code > */ public FacesConfigReferencedBeanType < WebFacesConfigDescriptor > getOrCreateReferencedBean ( ) { } }
List < Node > nodeList = model . get ( "referenced-bean" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FacesConfigReferencedBeanTypeImpl < WebFacesConfigDescriptor > ( this , "referenced-bean" , model , nodeList . get ( 0 ) ) ; } return createReferencedBean ( ) ;
public class Matchers { /** * Matches if the given tree is inside a loop . */ public static < T extends Tree > Matcher < T > inLoop ( ) { } }
return new Matcher < T > ( ) { @ Override public boolean matches ( Tree tree , VisitorState state ) { TreePath path = state . getPath ( ) . getParentPath ( ) ; Tree node = path . getLeaf ( ) ; while ( path != null ) { switch ( node . getKind ( ) ) { case METHOD : case CLASS : return false ; case WHILE_LOOP : case FOR_LOOP : case ENHANCED_FOR_LOOP : case DO_WHILE_LOOP : return true ; default : path = path . getParentPath ( ) ; node = path . getLeaf ( ) ; break ; } } return false ; } } ;
public class PipelinedTernaryConsumer { /** * Performs every composed consumer . * @ param first the first element * @ param second the second element * @ param third the third element */ @ Override public void accept ( E1 first , E2 second , E3 third ) { } }
for ( TriConsumer < E1 , E2 , E3 > consumer : consumers ) { consumer . accept ( first , second , third ) ; }
public class CompareToBuilder { /** * < p > Appends to the < code > builder < / code > the comparison of * two < code > Object < / code > s . < / p > * < ol > * < li > Check if < code > lhs = = rhs < / code > < / li > * < li > Check if either < code > lhs < / code > or < code > rhs < / code > is < code > null < / code > , * a < code > null < / code > object is less than a non - < code > null < / code > object < / li > * < li > Check the object contents < / li > * < / ol > * < p > If < code > lhs < / code > is an array , array comparison methods will be used . * Otherwise < code > comparator < / code > will be used to compare the objects . * If < code > comparator < / code > is < code > null < / code > , < code > lhs < / code > must * implement { @ link Comparable } instead . < / p > * @ param lhs left - hand object * @ param rhs right - hand object * @ param comparator < code > Comparator < / code > used to compare the objects , * < code > null < / code > means treat lhs as < code > Comparable < / code > * @ return this - used to chain append calls * @ throws ClassCastException if < code > rhs < / code > is not assignment - compatible * with < code > lhs < / code > * @ since 2.0 */ public CompareToBuilder append ( final Object lhs , final Object rhs , final Comparator < ? > comparator ) { } }
if ( comparison != 0 ) { return this ; } if ( lhs == rhs ) { return this ; } if ( lhs == null ) { comparison = - 1 ; return this ; } if ( rhs == null ) { comparison = + 1 ; return this ; } if ( lhs . getClass ( ) . isArray ( ) ) { // switch on type of array , to dispatch to the correct handler // handles multi dimensional arrays // throws a ClassCastException if rhs is not the correct array type if ( lhs instanceof long [ ] ) { append ( ( long [ ] ) lhs , ( long [ ] ) rhs ) ; } else if ( lhs instanceof int [ ] ) { append ( ( int [ ] ) lhs , ( int [ ] ) rhs ) ;
public class Compass { /** * < editor - fold defaultstate = " collapsed " desc = " Image related " > */ private BufferedImage create_BIG_ROSE_POINTER_Image ( final int WIDTH ) { } }
final BufferedImage IMAGE = UTIL . createImage ( ( int ) ( WIDTH * 0.0546875f ) , ( int ) ( WIDTH * 0.2f ) , java . awt . Transparency . TRANSLUCENT ) ; final Graphics2D G2 = IMAGE . createGraphics ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_ALPHA_INTERPOLATION , RenderingHints . VALUE_ALPHA_INTERPOLATION_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_COLOR_RENDERING , RenderingHints . VALUE_COLOR_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; final int IMAGE_WIDTH = IMAGE . getWidth ( ) ; final int IMAGE_HEIGHT = IMAGE . getHeight ( ) ; G2 . setStroke ( new BasicStroke ( 0.75f ) ) ; // Define arrow shape of pointer final GeneralPath POINTER_WHITE_LEFT = new GeneralPath ( ) ; final GeneralPath POINTER_WHITE_RIGHT = new GeneralPath ( ) ; POINTER_WHITE_LEFT . moveTo ( IMAGE_WIDTH - IMAGE_WIDTH * 0.95f , IMAGE_HEIGHT ) ; POINTER_WHITE_LEFT . lineTo ( IMAGE_WIDTH / 2.0f , 0 ) ; POINTER_WHITE_LEFT . lineTo ( IMAGE_WIDTH / 2.0f , IMAGE_HEIGHT ) ; POINTER_WHITE_LEFT . closePath ( ) ; POINTER_WHITE_RIGHT . moveTo ( IMAGE_WIDTH * 0.95f , IMAGE_HEIGHT ) ; POINTER_WHITE_RIGHT . lineTo ( IMAGE_WIDTH / 2.0f , 0 ) ; POINTER_WHITE_RIGHT . lineTo ( IMAGE_WIDTH / 2.0f , IMAGE_HEIGHT ) ; POINTER_WHITE_RIGHT . closePath ( ) ; final Area POINTER_FRAME_WHITE = new Area ( POINTER_WHITE_LEFT ) ; POINTER_FRAME_WHITE . add ( new Area ( POINTER_WHITE_RIGHT ) ) ; final Color STROKE_COLOR = getBackgroundColor ( ) . SYMBOL_COLOR . darker ( ) ; final Color FILL_COLOR = getBackgroundColor ( ) . SYMBOL_COLOR ; G2 . setColor ( STROKE_COLOR ) ; G2 . fill ( POINTER_WHITE_RIGHT ) ; G2 . setColor ( FILL_COLOR ) ; G2 . fill ( POINTER_WHITE_LEFT ) ; G2 . setColor ( STROKE_COLOR ) ; G2 . draw ( POINTER_FRAME_WHITE ) ; G2 . dispose ( ) ; return IMAGE ;
public class XSplitter { /** * Determine the common split dimensions from a list of entries . * @ param node node for which to determine the common split * dimensions * @ return common split dimensions */ private IntIterator getCommonSplitDimensions ( N node ) { } }
Collection < SplitHistory > splitHistories = new ArrayList < > ( node . getNumEntries ( ) ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { SpatialEntry entry = node . getEntry ( i ) ; if ( ! ( entry instanceof XTreeDirectoryEntry ) ) { throw new RuntimeException ( "Wrong entry type to derive split dimension from: " + entry . getClass ( ) . getName ( ) ) ; } splitHistories . add ( ( ( XTreeDirectoryEntry ) entry ) . getSplitHistory ( ) ) ; } return SplitHistory . getCommonDimensions ( splitHistories ) ;
public class LocalTime { /** * Obtains an instance of { @ code LocalTime } from an hour , minute , second and nanosecond . * This returns a { @ code LocalTime } with the specified hour , minute , second and nanosecond . * @ param hour the hour - of - day to represent , from 0 to 23 * @ param minute the minute - of - hour to represent , from 0 to 59 * @ param second the second - of - minute to represent , from 0 to 59 * @ param nanoOfSecond the nano - of - second to represent , from 0 to 999,999,999 * @ return the local time , not null * @ throws DateTimeException if the value of any field is out of range */ public static LocalTime of ( int hour , int minute , int second , int nanoOfSecond ) { } }
HOUR_OF_DAY . checkValidValue ( hour ) ; MINUTE_OF_HOUR . checkValidValue ( minute ) ; SECOND_OF_MINUTE . checkValidValue ( second ) ; NANO_OF_SECOND . checkValidValue ( nanoOfSecond ) ; return create ( hour , minute , second , nanoOfSecond ) ;
public class MockSubnetController { /** * Create the mock Subnet . * @ param cidrBlock VPC cidr block . * @ param vpcId vpc Id for subnet . * @ return mock Subnet . */ public MockSubnet createSubnet ( final String cidrBlock , final String vpcId ) { } }
MockSubnet ret = new MockSubnet ( ) ; ret . setCidrBlock ( cidrBlock ) ; ret . setSubnetId ( "subnet-" + UUID . randomUUID ( ) . toString ( ) . substring ( 0 , SUBNET_ID_POSTFIX_LENGTH ) ) ; ret . setVpcId ( vpcId ) ; allMockSubnets . put ( ret . getSubnetId ( ) , ret ) ; return ret ;
public class PolicyTargetSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PolicyTargetSummary policyTargetSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( policyTargetSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( policyTargetSummary . getTargetId ( ) , TARGETID_BINDING ) ; protocolMarshaller . marshall ( policyTargetSummary . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( policyTargetSummary . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( policyTargetSummary . getType ( ) , TYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class IllegalValueTypeException { /** * < p > Creates the message to be used in * { @ link # IllegalValueTypeException ( Class , Class , Set , Class , Field ) } . * @ since 1.0.0 */ private static final String createMessage ( Class < ? extends IckleActivity > injectorActivity , Class < ? extends Annotation > annotation , Class < ? extends Object > expectedType , Class < ? extends Object > valueType , Field field ) { } }
StringBuilder stringBuilder = new StringBuilder ( ) ; stringBuilder . append ( "Annotation " ) ; stringBuilder . append ( annotation . getName ( ) ) ; stringBuilder . append ( " is illegally used on field " ) ; stringBuilder . append ( field . getName ( ) ) ; stringBuilder . append ( " of type " ) ; stringBuilder . append ( valueType . getName ( ) ) ; stringBuilder . append ( " in " ) ; stringBuilder . append ( injectorActivity . getName ( ) ) ; stringBuilder . append ( ". The expected field type is " ) ; stringBuilder . append ( expectedType . getName ( ) ) ; stringBuilder . append ( ". " ) ; return stringBuilder . toString ( ) ;
public class ThriftClient { /** * Returns a new client for the specified host . * @ param host the Cassandra host address . * @ param port the Cassandra host RPC port . * @ return a new client for the specified host . * @ throws TException if there is any problem with the { @ code set _ keyspace } call . */ public static ThriftClient build ( String host , int port ) throws TException { } }
return build ( host , port , null ) ;
public class ZapNTLMEngineImpl { /** * Creates the type 3 message using the given server nonce . The type 3 * message includes all the information for authentication , host , domain , * username and the result of encrypting the nonce sent by the server using * the user ' s password as the key . * @ param user * The user name . This should not include the domain name . * @ param password * The password . * @ param host * The host that is originating the authentication request . * @ param domain * The domain to authenticate within . * @ param nonce * the 8 byte array the server sent . * @ return The type 3 message . * @ throws AuthenticationException * If { @ encrypt ( byte [ ] , byte [ ] ) } fails . */ static String getType3Message ( final String user , final String password , final String host , final String domain , final byte [ ] nonce , final int type2Flags , final String target , final byte [ ] targetInformation , final Certificate peerServerCertificate , final byte [ ] type1Message , final byte [ ] type2Message ) throws AuthenticationException { } }
return new Type3Message ( domain , host , user , password , nonce , type2Flags , target , targetInformation , peerServerCertificate , type1Message , type2Message ) . getResponse ( ) ;
public class KunderaCriteriaBuilder { /** * ( non - Javadoc ) * @ see * javax . persistence . criteria . CriteriaBuilder # construct ( java . lang . Class , * javax . persistence . criteria . Selection < ? > [ ] ) */ @ Override public < Y > CompoundSelection < Y > construct ( Class < Y > arg0 , Selection < ? > ... arg1 ) { } }
return new DefaultCompoundSelection < Y > ( Arrays . asList ( arg1 ) , arg0 ) ;