signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CMAClient { /** * Creates and returns a custom { @ code Gson } instance . */ static Gson createGson ( ) { } }
if ( gson == null ) { gson = new GsonBuilder ( ) . registerTypeAdapter ( CMAField . class , new FieldTypeAdapter ( ) ) . registerTypeAdapter ( CMAEntry . class , new EntrySerializer ( ) ) . registerTypeAdapter ( CMASnapshot . class , new SnapshotDeserializer ( ) ) . registerTypeAdapter ( CMALocale . class , new LocaleSerializer ( ) ) . create ( ) ; } return gson ;
public class Tracer { /** * Push CCM context * @ param key The frame key * @ param callstack The call stack */ public static synchronized void pushCCMContext ( String key , Throwable callstack ) { } }
log . tracef ( "%s" , new TraceEvent ( "CachedConnectionManager" , "NONE" , TraceEvent . PUSH_CCM_CONTEXT , "NONE" , key , callstack != null ? toString ( callstack ) : "" ) ) ;
public class XMLCaster { /** * remove lucee node wraps ( XMLStruct ) from node * @ param node * @ return raw node ( without wrap ) */ public static Node toRawNode ( Node node ) { } }
if ( node instanceof XMLStruct ) return ( ( XMLStruct ) node ) . toNode ( ) ; return node ;
public class InteropFramework { /** * Get a { @ link ProvFormat } given a format string * @ param format the format for which the { @ link ProvFormat } is sought * @ return a { @ link ProvFormat } */ public ProvFormat getTypeForFormat ( String format ) { } }
ProvFormat result ; // try as mimetype and then as an extension result = mimeTypeRevMap . get ( format ) ; if ( result == null ) result = extensionRevMap . get ( format ) ; return result ;
public class OptionValueSources { /** * Invokes the given configurator , obtaining the correct global context type via the datastore configuration type of * the current datastore provider . * @ param configurator the configurator to invoke * @ return a context object containing the options set via the given configurator */ private static < D extends DatastoreConfiguration < G > , G extends GlobalContext < ? , ? > > AppendableConfigurationContext invokeOptionConfigurator ( OptionConfigurator configurator ) { } }
ConfigurableImpl configurable = new ConfigurableImpl ( ) ; configurator . configure ( configurable ) ; return configurable . getContext ( ) ;
public class MapPolyline { /** * Replies the Path2D that corresponds to this polyline . * If < var > startPosition < / var > is greater to zero , * the replied path will be clipped to ignore the part of * the polyline before the given value . * If < var > endPosition < / var > is lower to the length of the polyline , * the replied path will be clipped to ignore the part of * the polyline after the given value . * @ param path the path to fill out . * @ param startPosition is the curviline position from which the polyline is drawn . * @ param endPosition is the curviline position to which the polyline is drawn . * @ since 4.0 */ @ SuppressWarnings ( { } }
"checkstyle:cyclomaticcomplexity" , "checkstyle:npathcomplexity" } ) @ Pure public final void toPath2D ( Path2d path , double startPosition , double endPosition ) { final double length = getLength ( ) ; // For performance purpose if ( ( Double . isNaN ( startPosition ) || startPosition < 0f ) && ( Double . isNaN ( endPosition ) || endPosition >= length ) ) { toPath2D ( path ) ; return ; } final double p1 ; final double p2 ; if ( Double . isNaN ( startPosition ) || startPosition <= 0f ) { p1 = 0. ; } else { p1 = startPosition ; } if ( Double . isNaN ( endPosition ) || endPosition >= length ) { p2 = length ; } else { p2 = endPosition ; } if ( p2 <= p1 ) { return ; } boolean firstDrawn ; double curvilinePosition = 0. ; double previousCurvilinePosition = 0. ; for ( final PointGroup grp : groups ( ) ) { firstDrawn = true ; Point2d previous = null ; for ( final Point2d pts : grp ) { if ( p2 <= previousCurvilinePosition ) { return ; } if ( previous != null ) { curvilinePosition += previous . getDistance ( pts ) ; } if ( curvilinePosition >= p1 ) { final Point2d f ; double curvilineDiff ; if ( previous == null || previousCurvilinePosition >= p1 ) { f = pts ; } else { curvilineDiff = curvilinePosition - previousCurvilinePosition ; if ( curvilineDiff <= 0. ) { f = pts ; } else { f = new Point2d ( ) ; Segment2afp . interpolates ( previous . getX ( ) , previous . getY ( ) , pts . getX ( ) , pts . getY ( ) , ( p1 - previousCurvilinePosition ) / curvilineDiff , f ) ; } } final Point2d l ; if ( p2 < curvilinePosition ) { assert previous != null && p2 >= previousCurvilinePosition && p2 <= curvilinePosition ; curvilineDiff = curvilinePosition - previousCurvilinePosition ; if ( curvilineDiff <= 0. ) { l = null ; } else { l = new Point2d ( ) ; Segment2afp . interpolates ( previous . getX ( ) , previous . getY ( ) , pts . getX ( ) , pts . getY ( ) , ( p2 - previousCurvilinePosition ) / curvilineDiff , l ) ; } } else { l = null ; } if ( l == null ) { if ( firstDrawn ) { firstDrawn = false ; path . moveTo ( f . getX ( ) , f . getY ( ) ) ; } else { path . lineTo ( f . getX ( ) , f . getY ( ) ) ; } if ( f != pts ) { path . lineTo ( pts . getX ( ) , pts . getY ( ) ) ; } } else { if ( firstDrawn ) { firstDrawn = false ; path . moveTo ( l . getX ( ) , l . getY ( ) ) ; } else { path . lineTo ( l . getX ( ) , l . getY ( ) ) ; } } } previous = pts ; previousCurvilinePosition = curvilinePosition ; } }
public class CouponsIterator { /** * next ( ) must be called before the first getValue ( ) . This skips over zero values . * @ return the next coupon in the array . */ boolean next ( ) { } }
index_ ++ ; while ( index_ < maxEntries_ ) { if ( couponsArr_ [ offset_ + index_ ] != 0 ) { return true ; } index_ ++ ; } return false ;
public class DynamoDBMapper { /** * Scans through an Amazon DynamoDB table and returns a single page of matching * results . The table to scan is determined by looking at the annotations on * the specified class , which declares where to store the object data in AWS * DynamoDB , and the scan expression parameter allows the caller to filter * results and control how the scan is executed . * @ param < T > * The type of the objects being returned . * @ param clazz * The class annotated with DynamoDB annotations describing how * to store the object data in Amazon DynamoDB . * @ param scanExpression * Details on how to run the scan , including any filters to apply * to limit results . * @ param config * The configuration to use for this scan , which overrides the * default provided at object construction . */ public < T > ScanResultPage < T > scanPage ( Class < T > clazz , DynamoDBScanExpression scanExpression , DynamoDBMapperConfig config ) { } }
config = mergeConfig ( config ) ; ScanRequest scanRequest = createScanRequestFromExpression ( clazz , scanExpression , config ) ; ScanResult scanResult = db . scan ( applyUserAgent ( scanRequest ) ) ; ScanResultPage < T > result = new ScanResultPage < T > ( ) ; result . setResults ( marshallIntoObjects ( clazz , scanResult . getItems ( ) ) ) ; result . setLastEvaluatedKey ( scanResult . getLastEvaluatedKey ( ) ) ; return result ;
public class TargetsApi { /** * Get recently used targets * Get recently used targets for the current agent . * @ param limit The number of results to return . The default value is 50 . ( optional ) * @ return ApiResponse & lt ; TargetsResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < TargetsResponse > getRecentTargetsWithHttpInfo ( BigDecimal limit ) throws ApiException { } }
com . squareup . okhttp . Call call = getRecentTargetsValidateBeforeCall ( limit , null , null ) ; Type localVarReturnType = new TypeToken < TargetsResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class Antecedent { /** * Computes the activation degree of the antecedent on the expression tree * from the given node * @ param conjunction is the conjunction operator from the RuleBlock * @ param disjunction is the disjunction operator from the RuleBlock * @ param node is a node in the expression tree of the antecedent * @ return the activation degree of the antecedent */ public double activationDegree ( TNorm conjunction , SNorm disjunction , Expression node ) { } }
if ( ! isLoaded ( ) ) { throw new RuntimeException ( String . format ( "[antecedent error] antecedent <%s> is not loaded" , text ) ) ; } final Expression . Type expressionType = node . type ( ) ; if ( expressionType == Expression . Type . Proposition ) { Proposition proposition = ( Proposition ) node ; if ( ! proposition . getVariable ( ) . isEnabled ( ) ) { return 0.0 ; } if ( ! proposition . getHedges ( ) . isEmpty ( ) ) { final int lastIndex = proposition . getHedges ( ) . size ( ) ; ListIterator < Hedge > rit = proposition . getHedges ( ) . listIterator ( lastIndex ) ; Hedge any = rit . previous ( ) ; // if last hedge is " Any " , apply hedges in reverse order and return degree if ( any instanceof Any ) { double result = any . hedge ( Double . NaN ) ; while ( rit . hasPrevious ( ) ) { result = rit . previous ( ) . hedge ( result ) ; } return result ; } } Variable variable = proposition . getVariable ( ) ; double result = Double . NaN ; Variable . Type variableType = variable . type ( ) ; if ( variableType == Variable . Type . Input ) { result = proposition . getTerm ( ) . membership ( variable . getValue ( ) ) ; } else if ( variableType == Variable . Type . Output ) { result = ( ( OutputVariable ) variable ) . fuzzyOutput ( ) . activationDegree ( proposition . getTerm ( ) ) ; } int lastIndex = proposition . getHedges ( ) . size ( ) ; ListIterator < Hedge > reverseIterator = proposition . getHedges ( ) . listIterator ( lastIndex ) ; while ( reverseIterator . hasPrevious ( ) ) { result = reverseIterator . previous ( ) . hedge ( result ) ; } return result ; } if ( expressionType == Expression . Type . Operator ) { Operator operator = ( Operator ) node ; if ( operator . getLeft ( ) == null || operator . getRight ( ) == null ) { throw new RuntimeException ( "[syntax error] left and right operators cannot be null" ) ; } if ( Rule . FL_AND . equals ( operator . getName ( ) ) ) { if ( conjunction == null ) { throw new RuntimeException ( String . format ( "[conjunction error] " + "the following rule requires a conjunction operator:\n%s" , text ) ) ; } return conjunction . compute ( activationDegree ( conjunction , disjunction , operator . getLeft ( ) ) , activationDegree ( conjunction , disjunction , operator . getRight ( ) ) ) ; } if ( Rule . FL_OR . equals ( operator . getName ( ) ) ) { if ( disjunction == null ) { throw new RuntimeException ( String . format ( "[disjunction error] " + "the following rule requires a disjunction operator:\n%s" , text ) ) ; } return disjunction . compute ( activationDegree ( conjunction , disjunction , operator . getLeft ( ) ) , activationDegree ( conjunction , disjunction , operator . getRight ( ) ) ) ; } throw new RuntimeException ( String . format ( "[syntax error] operator <%s> not recognized" , operator . getName ( ) ) ) ; } else { throw new RuntimeException ( "[expression error] unknown instance of Expression" ) ; }
public class CmsMessageBundleEditorOptions { /** * Creates the " Add key " button . * @ return the " Add key " button . */ private Component createAddKeyButton ( ) { } }
// the " + " button Button addKeyButton = new Button ( ) ; addKeyButton . addStyleName ( "icon-only" ) ; addKeyButton . addStyleName ( "borderless-colored" ) ; addKeyButton . setDescription ( m_messages . key ( Messages . GUI_ADD_KEY_0 ) ) ; addKeyButton . setIcon ( FontOpenCms . CIRCLE_PLUS , m_messages . key ( Messages . GUI_ADD_KEY_0 ) ) ; addKeyButton . addClickListener ( new ClickListener ( ) { private static final long serialVersionUID = 1L ; public void buttonClick ( ClickEvent event ) { handleAddKey ( ) ; } } ) ; return addKeyButton ;
public class SimpleDocTreeVisitor { /** * { @ inheritDoc } This implementation calls { @ code defaultAction } . * @ param node { @ inheritDoc } * @ param p { @ inheritDoc } * @ return the result of { @ code defaultAction } */ @ Override public R visitProvides ( ProvidesTree node , P p ) { } }
return defaultAction ( node , p ) ;
public class GDLLoader { /** * Updates the source or target vertex identifier of the last seen edge . * @ param v current vertex */ private void updateLastSeenEdge ( Vertex v ) { } }
Edge lastSeenEdge = getLastSeenEdge ( ) ; if ( lastSeenEdge != null ) { if ( lastSeenEdge . getSourceVertexId ( ) == null ) { lastSeenEdge . setSourceVertexId ( v . getId ( ) ) ; } else if ( lastSeenEdge . getTargetVertexId ( ) == null ) { lastSeenEdge . setTargetVertexId ( v . getId ( ) ) ; } }
public class HalCachingLinkResolver { /** * Returns a list with all resources which are cached . * @ param linkedIds the ids to resolve * @ param cache the cache to use * @ param notCachedLinkedIds a list with ids which are not found in the cache * @ return the cached resources */ protected List < HalResource < ? > > resolveCachedLinks ( String [ ] linkedIds , Cache cache , List < String > notCachedLinkedIds ) { } }
ArrayList < HalResource < ? > > resolvedResources = new ArrayList < HalResource < ? > > ( ) ; for ( String linkedId : linkedIds ) { HalResource < ? > resource = ( HalResource < ? > ) cache . get ( linkedId ) ; if ( resource != null ) { resolvedResources . add ( resource ) ; } else { notCachedLinkedIds . add ( linkedId ) ; } } return resolvedResources ;
public class CocoaPodsDependencyCollector { /** * / * - - - private methods - - - */ private boolean getPodsAndDependenciesSection ( List < String > directDependenciesLines , List < String > allDependenciesLines , String podFileLock ) { } }
boolean successReadPodfile = true ; boolean podsSection = false ; boolean dependenciesSection = false ; try ( BufferedReader br = new BufferedReader ( new FileReader ( podFileLock ) ) ) { String line ; logger . debug ( "The content of Podfile.lock - {}:" , podFileLock ) ; while ( ( line = br . readLine ( ) ) != null ) { logger . debug ( line ) ; if ( line . startsWith ( PODS ) ) { dependenciesSection = false ; podsSection = true ; } else if ( line . startsWith ( DEPENDENCIES ) ) { podsSection = false ; dependenciesSection = true ; } else if ( line . trim ( ) . equals ( Constants . EMPTY_STRING ) ) { podsSection = false ; dependenciesSection = false ; } else if ( podsSection ) { allDependenciesLines . add ( line ) ; } else if ( dependenciesSection ) { directDependenciesLines . add ( line ) ; } } } catch ( IOException e ) { logger . warn ( "Couldn't read the Podfile.lock: {}" , podFileLock ) ; successReadPodfile = false ; } return successReadPodfile ;
public class MeasureTraitType { /** * Gets the value of the customAssertionScore property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the customAssertionScore property . * For example , to add a new item , do as follows : * < pre > * getCustomAssertionScore ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link MeasureTraitType . CustomAssertionScore } */ public List < MeasureTraitType . CustomAssertionScore > getCustomAssertionScore ( ) { } }
if ( customAssertionScore == null ) { customAssertionScore = new ArrayList < MeasureTraitType . CustomAssertionScore > ( ) ; } return this . customAssertionScore ;
public class JvmModelCompleter { /** * Replace the variables contained in the comment to be written to the < code > @ Generated < / code > annotation . */ protected String replaceVariables ( String commentForGenerated , JvmDeclaredType jvmType ) { } }
String result = commentForGenerated ; if ( result . contains ( GENERATED_COMMENT_VAR_SOURCE_FILE ) ) { Resource resource = jvmType . eResource ( ) ; if ( resource != null ) { URI uri = resource . getURI ( ) ; if ( uri != null ) { String sourceFile = uri . lastSegment ( ) ; if ( sourceFile == null ) sourceFile = uri . toString ( ) ; result = result . replace ( GENERATED_COMMENT_VAR_SOURCE_FILE , sourceFile ) ; } } } return result ;
public class SpecUtil { /** * Returns the number of features contained in the given specification . * Because Spock allows for the dynamic creation of new features at * specification run time , this number is only an estimate . */ public static int getFeatureCount ( Class < ? > spec ) { } }
checkIsSpec ( spec ) ; int count = 0 ; do { for ( Method method : spec . getDeclaredMethods ( ) ) if ( method . isAnnotationPresent ( FeatureMetadata . class ) ) count ++ ; spec = spec . getSuperclass ( ) ; } while ( spec != null && isSpec ( spec ) ) ; return count ;
public class AppServicePlansInner { /** * List all capabilities of an App Service plan . * List all capabilities of an App Service plan . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service plan . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; CapabilityInner & gt ; object */ public Observable < List < CapabilityInner > > listCapabilitiesAsync ( String resourceGroupName , String name ) { } }
return listCapabilitiesWithServiceResponseAsync ( resourceGroupName , name ) . map ( new Func1 < ServiceResponse < List < CapabilityInner > > , List < CapabilityInner > > ( ) { @ Override public List < CapabilityInner > call ( ServiceResponse < List < CapabilityInner > > response ) { return response . body ( ) ; } } ) ;
public class BaseHttpRequest { /** * Sends to a URL . * @ param url URL to which to send . * @ param content content to send . * @ throws IOException if the data cannot be sent . */ @ Override public void send ( @ NonNull URL url , @ NonNull T content ) throws IOException { } }
final HttpURLConnection urlConnection = createConnection ( url ) ; if ( urlConnection instanceof HttpsURLConnection ) { try { configureHttps ( ( HttpsURLConnection ) urlConnection ) ; } catch ( GeneralSecurityException e ) { ACRA . log . e ( LOG_TAG , "Could not configure SSL for ACRA request to " + url , e ) ; } } configureTimeouts ( urlConnection , connectionTimeOut , socketTimeOut ) ; configureHeaders ( urlConnection , login , password , headers , content ) ; if ( ACRA . DEV_LOGGING ) { ACRA . log . d ( LOG_TAG , "Sending request to " + url ) ; ACRA . log . d ( LOG_TAG , "Http " + method . name ( ) + " content : " ) ; ACRA . log . d ( LOG_TAG , content . toString ( ) ) ; } try { writeContent ( urlConnection , method , content ) ; handleResponse ( urlConnection . getResponseCode ( ) , urlConnection . getResponseMessage ( ) ) ; urlConnection . disconnect ( ) ; } catch ( SocketTimeoutException e ) { if ( senderConfiguration . dropReportsOnTimeout ( ) ) { Log . w ( ACRA . LOG_TAG , "Dropped report due to timeout" ) ; } else { throw e ; } }
public class PlainTextMessageTypeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PlainTextMessageType plainTextMessageType , ProtocolMarshaller protocolMarshaller ) { } }
if ( plainTextMessageType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( plainTextMessageType . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; protocolMarshaller . marshall ( plainTextMessageType . getText ( ) , TEXT_BINDING ) ; protocolMarshaller . marshall ( plainTextMessageType . getVoiceId ( ) , VOICEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ListResolverRuleAssociationsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListResolverRuleAssociationsRequest listResolverRuleAssociationsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listResolverRuleAssociationsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listResolverRuleAssociationsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listResolverRuleAssociationsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listResolverRuleAssociationsRequest . getFilters ( ) , FILTERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FileSystem { /** * Opens an FSDataOutputStream at the indicated Path . * Files are overwritten by default . */ public FSDataOutputStream create ( Path f ) throws IOException { } }
return create ( f , CreateOptions . writeOptions ( true , null ) ) ;
public class WSJPAUrlUtils { /** * Encapsulates the specified URL within a wsjpa URL . * @ param url - the URL to encapsulate * @ return - a wsjpa URL encapsulating the argument URL * @ throws MalformedURLException */ @ Trivial public static URL createWSJPAURL ( URL url ) throws MalformedURLException { } }
if ( url == null ) { return null ; } // Encode the URL to be embedded into the wsjpa URL ' s path final String encodedURLPathStr = encode ( url . toExternalForm ( ) ) ; URL returnURL ; try { returnURL = AccessController . doPrivileged ( new PrivilegedExceptionAction < URL > ( ) { @ Override @ Trivial public URL run ( ) throws MalformedURLException { return new URL ( WSJPA_PROTOCOL_NAME + ":" + encodedURLPathStr ) ; } } ) ; } catch ( PrivilegedActionException e ) { throw ( MalformedURLException ) e . getException ( ) ; } return returnURL ;
public class JsonApiResponseFilter { /** * Creates JSON API responses for custom JAX - RS actions returning Crnk resources . */ @ Override public void filter ( ContainerRequestContext requestContext , ContainerResponseContext responseContext ) { } }
Object response = responseContext . getEntity ( ) ; if ( response == null ) { if ( feature . getBoot ( ) . isNullDataResponseEnabled ( ) ) { Document document = new Document ( ) ; document . setData ( Nullable . nullValue ( ) ) ; responseContext . setEntity ( document ) ; responseContext . setStatus ( Response . Status . OK . getStatusCode ( ) ) ; responseContext . getHeaders ( ) . put ( "Content-Type" , Collections . singletonList ( JsonApiMediaType . APPLICATION_JSON_API ) ) ; } return ; } // only modify responses which contain a single or a list of Crnk resources Optional < RegistryEntry > registryEntry = getRegistryEntry ( response ) ; if ( registryEntry . isPresent ( ) ) { CrnkBoot boot = feature . getBoot ( ) ; DocumentMapper documentMapper = boot . getDocumentMapper ( ) ; HttpRequestContextProvider httpRequestContextProvider = boot . getModuleRegistry ( ) . getHttpRequestContextProvider ( ) ; try { HttpRequestContext context = new HttpRequestContextBaseAdapter ( new JaxrsRequestContext ( requestContext , feature ) ) ; httpRequestContextProvider . onRequestStarted ( context ) ; JsonApiResponse jsonApiResponse = new JsonApiResponse ( ) ; jsonApiResponse . setEntity ( response ) ; // use the Crnk document mapper to create a JSON API response DocumentMappingConfig mappingConfig = new DocumentMappingConfig ( ) ; ResourceInformation resourceInformation = registryEntry . get ( ) . getResourceInformation ( ) ; Map < String , Set < String > > jsonApiParameters = context . getRequestParameters ( ) . entrySet ( ) . stream ( ) . filter ( entry -> isJsonApiParameter ( entry . getKey ( ) ) ) . collect ( Collectors . toMap ( Map . Entry :: getKey , Map . Entry :: getValue ) ) ; QuerySpecUrlMapper urlMapper = feature . getBoot ( ) . getUrlMapper ( ) ; QuerySpec querySpec = urlMapper . deserialize ( resourceInformation , jsonApiParameters ) ; ResourceRegistry resourceRegistry = feature . getBoot ( ) . getResourceRegistry ( ) ; QueryAdapter queryAdapter = new QuerySpecAdapter ( querySpec , resourceRegistry , context . getQueryContext ( ) ) ; responseContext . setEntity ( documentMapper . toDocument ( jsonApiResponse , queryAdapter , mappingConfig ) . get ( ) ) ; responseContext . getHeaders ( ) . put ( "Content-Type" , Collections . singletonList ( JsonApiMediaType . APPLICATION_JSON_API ) ) ; } finally { httpRequestContextProvider . onRequestFinished ( ) ; } } else if ( isJsonApiResponse ( responseContext ) && ! doNotWrap ( response ) ) { Document document = new Document ( ) ; document . setData ( Nullable . of ( response ) ) ; responseContext . setEntity ( document ) ; }
public class MockRepository { /** * Retrieve state based on the supplied key . */ @ SuppressWarnings ( "unchecked" ) public static synchronized < T > T getAdditionalState ( String key ) { } }
return ( T ) additionalState . get ( key ) ;
public class NameTable { /** * Return the full name of a type , including its package . For outer types , * is the type ' s full name ; for example , java . lang . Object ' s full name is * " JavaLangObject " . For inner classes , the full name is their outer class ' * name plus the inner class name ; for example , java . util . ArrayList . ListItr ' s * name is " JavaUtilArrayList _ ListItr " . */ public String getFullName ( TypeElement element ) { } }
element = typeUtil . getObjcClass ( element ) ; String fullName = fullNameCache . get ( element ) ; if ( fullName == null ) { fullName = getFullNameImpl ( element ) ; fullNameCache . put ( element , fullName ) ; } return fullName ;
public class MappedParametrizedObjectEntry { /** * Parse named parameter as Long . * @ param name * parameter name * @ param defaultValue * default Long value * @ return Long value */ public Long getParameterLong ( String name , Long defaultValue ) { } }
String value = getParameterValue ( name , null ) ; if ( value != null ) { try { return StringNumberParser . parseLong ( value ) ; } catch ( NumberFormatException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } } return defaultValue ;
public class NamespacesInner { /** * Creates / Updates a service namespace . Once created , this namespace ' s resource manifest is immutable . This operation is idempotent . * @ param resourceGroupName The name of the resource group . * @ param namespaceName The namespace name . * @ param parameters Parameters supplied to create a Namespace Resource . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < NamespaceResourceInner > createOrUpdateAsync ( String resourceGroupName , String namespaceName , NamespaceCreateOrUpdateParameters parameters , final ServiceCallback < NamespaceResourceInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , namespaceName , parameters ) , serviceCallback ) ;
public class ExcelFunctions { /** * Rounds a number to a specified number of digits */ public static BigDecimal round ( EvaluationContext ctx , Object number , Object numDigits ) { } }
BigDecimal _number = Conversions . toDecimal ( number , ctx ) ; int _numDigits = Conversions . toInteger ( numDigits , ctx ) ; return ExpressionUtils . decimalRound ( _number , _numDigits , RoundingMode . HALF_UP ) ;
public class ForEachTag { /** * / * Evaluates expressions as necessary */ private void evaluateExpressions ( ) throws JspException { } }
/* * Note : we don ' t check for type mismatches here ; we assume * the expression evaluator will return the expected type * ( by virtue of knowledge we give it about what that type is ) . * A ClassCastException here is truly unexpected , so we let it * propagate up . */ if ( begin_ != null ) { Object r = ExpressionEvaluatorManager . evaluate ( "begin" , begin_ , Integer . class , this , pageContext ) ; if ( r == null ) { throw new NullAttributeException ( "forEach" , "begin" ) ; } begin = ( ( Integer ) r ) . intValue ( ) ; validateBegin ( ) ; } if ( end_ != null ) { Object r = ExpressionEvaluatorManager . evaluate ( "end" , end_ , Integer . class , this , pageContext ) ; if ( r == null ) { throw new NullAttributeException ( "forEach" , "end" ) ; } end = ( ( Integer ) r ) . intValue ( ) ; validateEnd ( ) ; } if ( step_ != null ) { Object r = ExpressionEvaluatorManager . evaluate ( "step" , step_ , Integer . class , this , pageContext ) ; if ( r == null ) { throw new NullAttributeException ( "forEach" , "step" ) ; } step = ( ( Integer ) r ) . intValue ( ) ; validateStep ( ) ; } if ( items_ != null ) { rawItems = ExpressionEvaluatorManager . evaluate ( "items" , items_ , Object . class , this , pageContext ) ; // use an empty list to indicate " no iteration " , if relevant if ( rawItems == null ) { rawItems = new ArrayList ( ) ; } }
public class CreationShanksAgentCapability { /** * " Removes " an agent with the given name from the simulation * Be careful : what this actually do is to stop the agent execution . * @ param sim * - The Shanks Simulation * @ param agentID * - The name of the agent to remove * @ throws ShanksException * An UnkownAgentException if the Agent ID is not found on the * simulation . */ public static void removeAgent ( ShanksSimulation sim , String agentID ) throws ShanksException { } }
sim . logger . info ( "Stoppable not fount. Attempting direct stop..." ) ; sim . unregisterShanksAgent ( agentID ) ; sim . logger . info ( "Agent " + agentID + " stopped." ) ;
public class aaauser { /** * Use this API to add aaauser resources . */ public static base_responses add ( nitro_service client , aaauser resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { aaauser addresources [ ] = new aaauser [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { addresources [ i ] = new aaauser ( ) ; addresources [ i ] . username = resources [ i ] . username ; addresources [ i ] . password = resources [ i ] . password ; } result = add_bulk_request ( client , addresources ) ; } return result ;
public class BasePool { /** * Gets the freelist for the specified bucket . Create the freelist if there isn ' t one * @ param bucketedSize the bucket size * @ return the freelist for the bucket */ @ VisibleForTesting synchronized Bucket < V > getBucket ( int bucketedSize ) { } }
// get an existing bucket Bucket < V > bucket = mBuckets . get ( bucketedSize ) ; if ( bucket != null || ! mAllowNewBuckets ) { return bucket ; } // create a new bucket if ( FLog . isLoggable ( FLog . VERBOSE ) ) { FLog . v ( TAG , "creating new bucket %s" , bucketedSize ) ; } Bucket < V > newBucket = newBucket ( bucketedSize ) ; mBuckets . put ( bucketedSize , newBucket ) ; return newBucket ;
public class AbstractTicketRegistry { /** * Decode ticket . * @ param result the result * @ return the ticket */ @ SneakyThrows protected Ticket decodeTicket ( final Ticket result ) { } }
if ( ! isCipherExecutorEnabled ( ) ) { LOGGER . trace ( MESSAGE ) ; return result ; } if ( result == null ) { LOGGER . warn ( "Ticket passed is null and cannot be decoded" ) ; return null ; } if ( ! result . getClass ( ) . isAssignableFrom ( EncodedTicket . class ) ) { LOGGER . warn ( "Ticket passed is not an encoded ticket type; rather it's a [{}], no decoding is necessary." , result . getClass ( ) . getSimpleName ( ) ) ; return result ; } LOGGER . debug ( "Attempting to decode [{}]" , result ) ; val encodedTicket = ( EncodedTicket ) result ; val ticket = SerializationUtils . decodeAndDeserializeObject ( encodedTicket . getEncodedTicket ( ) , this . cipherExecutor , Ticket . class ) ; LOGGER . debug ( "Decoded ticket to [{}]" , ticket ) ; return ticket ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcElectricDistributionBoardTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class Utils { /** * Fast get parent directory using substring * @ param path path whose parent path has to be returned * @ return parent path of the argument */ public static String getParentPath ( String path ) { } }
if ( path == null ) return null ; path = path . trim ( ) ; if ( path . endsWith ( PATH_ROOT ) && path . length ( ) > 1 ) { path = path . substring ( 0 , path . length ( ) - 1 ) ; } int lastIndex = path . lastIndexOf ( PATH_ROOT ) ; if ( path . length ( ) > 1 && lastIndex > 0 ) { return path . substring ( 0 , lastIndex ) ; } return PATH_ROOT ;
public class XAssignmentImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setAssignable ( XExpression newAssignable ) { } }
if ( newAssignable != assignable ) { NotificationChain msgs = null ; if ( assignable != null ) msgs = ( ( InternalEObject ) assignable ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XASSIGNMENT__ASSIGNABLE , null , msgs ) ; if ( newAssignable != null ) msgs = ( ( InternalEObject ) newAssignable ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XASSIGNMENT__ASSIGNABLE , null , msgs ) ; msgs = basicSetAssignable ( newAssignable , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XASSIGNMENT__ASSIGNABLE , newAssignable , newAssignable ) ) ;
public class AnalyticFormulas { /** * Calculates the Black - Scholes option value of a call , i . e . , the payoff max ( S ( T ) - K , 0 ) P , where S follows a log - normal process with constant log - volatility . * The method also handles cases where the forward and / or option strike is negative * and some limit cases where the forward and / or the option strike is zero . * @ param forward The forward of the underlying . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param optionStrike The option strike . If the option strike is & le ; 0.0 the method returns the value of the forward contract paying S ( T ) - K in T . * @ param payoffUnit The payoff unit ( e . g . , the discount factor ) * @ return Returns the value of a European call option under the Black - Scholes model . */ public static double blackScholesGeneralizedOptionValue ( double forward , double volatility , double optionMaturity , double optionStrike , double payoffUnit ) { } }
if ( optionMaturity < 0 ) { return 0 ; } else if ( forward < 0 ) { // We use max ( X , 0 ) = X + max ( - X , 0) return ( forward - optionStrike ) * payoffUnit + blackScholesGeneralizedOptionValue ( - forward , volatility , optionMaturity , - optionStrike , payoffUnit ) ; } else if ( ( forward == 0 ) || ( optionStrike <= 0.0 ) || ( volatility <= 0.0 ) || ( optionMaturity <= 0.0 ) ) { // Limit case ( where dPlus = + / - infty ) return Math . max ( forward - optionStrike , 0 ) * payoffUnit ; } else { // Calculate analytic value double dPlus = ( Math . log ( forward / optionStrike ) + 0.5 * volatility * volatility * optionMaturity ) / ( volatility * Math . sqrt ( optionMaturity ) ) ; double dMinus = dPlus - volatility * Math . sqrt ( optionMaturity ) ; double valueAnalytic = ( forward * NormalDistribution . cumulativeDistribution ( dPlus ) - optionStrike * NormalDistribution . cumulativeDistribution ( dMinus ) ) * payoffUnit ; return valueAnalytic ; }
public class DefaultClusterElector { /** * ( non - Javadoc ) * @ see org . mobicents . ftf . election . SingletonElector # elect ( java . util . List ) */ public Address elect ( List < Address > list ) { } }
// Jgroups return addresses always in sorted order , jbcache does not change it . // For buddies its ok , since we get list from failing node : ) // in case shift is bigger than size int size = list . size ( ) ; int index = ( this . shift % size ) + size ; index = index % size ; return list . get ( index ) ;
public class IoTDataManager { /** * Try to read out a things momentary values . * @ param jid the full JID of the thing to read data from . * @ return a list with the read out data . * @ throws NoResponseException * @ throws XMPPErrorException * @ throws NotConnectedException * @ throws InterruptedException */ public List < IoTFieldsExtension > requestMomentaryValuesReadOut ( EntityFullJid jid ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
final XMPPConnection connection = connection ( ) ; final int seqNr = nextSeqNr . incrementAndGet ( ) ; IoTDataRequest iotDataRequest = new IoTDataRequest ( seqNr , true ) ; iotDataRequest . setTo ( jid ) ; StanzaFilter doneFilter = new IoTFieldsExtensionFilter ( seqNr , true ) ; StanzaFilter dataFilter = new IoTFieldsExtensionFilter ( seqNr , false ) ; // Setup the IoTFieldsExtension message collectors before sending the IQ to avoid a data race . StanzaCollector doneCollector = connection . createStanzaCollector ( doneFilter ) ; StanzaCollector . Configuration dataCollectorConfiguration = StanzaCollector . newConfiguration ( ) . setStanzaFilter ( dataFilter ) . setCollectorToReset ( doneCollector ) ; StanzaCollector dataCollector = connection . createStanzaCollector ( dataCollectorConfiguration ) ; try { connection . createStanzaCollectorAndSend ( iotDataRequest ) . nextResultOrThrow ( ) ; // Wait until a message with an IoTFieldsExtension and the done flag comes in . doneCollector . nextResult ( ) ; } finally { // Canceling dataCollector will also cancel the doneCollector since it is configured as dataCollector ' s // collector to reset . dataCollector . cancel ( ) ; } int collectedCount = dataCollector . getCollectedCount ( ) ; List < IoTFieldsExtension > res = new ArrayList < > ( collectedCount ) ; for ( int i = 0 ; i < collectedCount ; i ++ ) { Message message = dataCollector . pollResult ( ) ; IoTFieldsExtension iotFieldsExtension = IoTFieldsExtension . from ( message ) ; res . add ( iotFieldsExtension ) ; } return res ;
public class CommerceOrderPersistenceImpl { /** * Clears the cache for the commerce order . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( CommerceOrder commerceOrder ) { } }
entityCache . removeResult ( CommerceOrderModelImpl . ENTITY_CACHE_ENABLED , CommerceOrderImpl . class , commerceOrder . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; clearUniqueFindersCache ( ( CommerceOrderModelImpl ) commerceOrder , true ) ;
public class HTMLEncoder { /** * Encode a unicode char value in percentEncode , decoding its bytes using a specified * characterEncoding . * @ param c * @ param characterEncoding * @ return */ private static void percentEncode ( Writer writer , char c , String characterEncoding ) throws IOException { } }
String app = null ; if ( c > ( char ) ( ( short ) 0x007F ) ) { // percent encode in the proper encoding to be consistent // app = percentEncodeNonUsAsciiCharacter ( writer c , characterEncoding ) ; percentEncodeNonUsAsciiCharacter ( writer , c , characterEncoding ) ; } else { // percent encode US - ASCII char ( 0x00-0x7F range ) // app = " % " + HEX _ CHARSET . charAt ( ( ( c > > 0x4 ) % 0x10 ) ) + HEX _ CHARSET . charAt ( c % 0x10 ) ; writer . write ( '%' ) ; writer . write ( HEX_CHARSET . charAt ( ( ( c >> 0x4 ) % 0x10 ) ) ) ; writer . write ( HEX_CHARSET . charAt ( c % 0x10 ) ) ; } // return app ;
public class ProjectiveInitializeAllCommon { /** * Computes camera matrix between the seed view and a connected view * @ param seed This will be the source view . It ' s observations have already been added to assocPixel * @ param edge The edge which connects them * @ param featsB The dst view * @ param cameraMatrix ( Output ) resulting camera matrix * @ return true if successful */ private boolean computeCameraMatrix ( View seed , Motion edge , FastQueue < Point2D_F64 > featsB , DMatrixRMaj cameraMatrix ) { } }
boolean seedSrc = edge . src == seed ; int matched = 0 ; for ( int i = 0 ; i < edge . inliers . size ; i ++ ) { // need to go from i to index of detected features in view ' seed ' to index index of feature in // the reconstruction AssociatedIndex a = edge . inliers . get ( i ) ; int featId = seedToStructure . data [ seedSrc ? a . src : a . dst ] ; if ( featId == - 1 ) continue ; assocPixel . get ( featId ) . p2 . set ( featsB . get ( seedSrc ? a . dst : a . src ) ) ; matched ++ ; } // All views should have matches for all features , simple sanity check if ( matched != assocPixel . size ) throw new RuntimeException ( "BUG! Didn't find all features in the view" ) ; // Estimate the camera matrix given homogenous pixel observations if ( poseEstimator . processHomogenous ( assocPixel . toList ( ) , points3D . toList ( ) ) ) { cameraMatrix . set ( poseEstimator . getProjective ( ) ) ; return true ; } else { return false ; }
public class QueryParser { /** * src / riemann / Query . g : 58:1 : lesser _ equal : field ( WS ) * LESSER _ EQUAL ( WS ) * value ; */ public final QueryParser . lesser_equal_return lesser_equal ( ) throws RecognitionException { } }
QueryParser . lesser_equal_return retval = new QueryParser . lesser_equal_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token WS51 = null ; Token LESSER_EQUAL52 = null ; Token WS53 = null ; QueryParser . field_return field50 = null ; QueryParser . value_return value54 = null ; CommonTree WS51_tree = null ; CommonTree LESSER_EQUAL52_tree = null ; CommonTree WS53_tree = null ; try { // src / riemann / Query . g : 59:2 : ( field ( WS ) * LESSER _ EQUAL ( WS ) * value ) // src / riemann / Query . g : 59:4 : field ( WS ) * LESSER _ EQUAL ( WS ) * value { root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_field_in_lesser_equal413 ) ; field50 = field ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , field50 . getTree ( ) ) ; // src / riemann / Query . g : 59:10 : ( WS ) * loop19 : do { int alt19 = 2 ; int LA19_0 = input . LA ( 1 ) ; if ( ( LA19_0 == WS ) ) { alt19 = 1 ; } switch ( alt19 ) { case 1 : // src / riemann / Query . g : 59:10 : WS { WS51 = ( Token ) match ( input , WS , FOLLOW_WS_in_lesser_equal415 ) ; WS51_tree = ( CommonTree ) adaptor . create ( WS51 ) ; adaptor . addChild ( root_0 , WS51_tree ) ; } break ; default : break loop19 ; } } while ( true ) ; LESSER_EQUAL52 = ( Token ) match ( input , LESSER_EQUAL , FOLLOW_LESSER_EQUAL_in_lesser_equal418 ) ; LESSER_EQUAL52_tree = ( CommonTree ) adaptor . create ( LESSER_EQUAL52 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( LESSER_EQUAL52_tree , root_0 ) ; // src / riemann / Query . g : 59:28 : ( WS ) * loop20 : do { int alt20 = 2 ; int LA20_0 = input . LA ( 1 ) ; if ( ( LA20_0 == WS ) ) { alt20 = 1 ; } switch ( alt20 ) { case 1 : // src / riemann / Query . g : 59:28 : WS { WS53 = ( Token ) match ( input , WS , FOLLOW_WS_in_lesser_equal421 ) ; WS53_tree = ( CommonTree ) adaptor . create ( WS53 ) ; adaptor . addChild ( root_0 , WS53_tree ) ; } break ; default : break loop20 ; } } while ( true ) ; pushFollow ( FOLLOW_value_in_lesser_equal424 ) ; value54 = value ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , value54 . getTree ( ) ) ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ;
public class StreamDecoder { /** * Factories for java . io . InputStreamReader */ public static StreamDecoder forInputStreamReader ( InputStream in , Object lock , String charsetName ) throws UnsupportedEncodingException { } }
String csn = charsetName ; if ( csn == null ) csn = Charset . defaultCharset ( ) . name ( ) ; try { if ( Charset . isSupported ( csn ) ) return new StreamDecoder ( in , lock , Charset . forName ( csn ) ) ; } catch ( IllegalCharsetNameException x ) { } throw new UnsupportedEncodingException ( csn ) ;
public class WatchdogRegistry { /** * killt alle Threads der Gruppe und wartet bis auch der letzte beendet ist . * Es wird der evtl . Exceptionhandler geschlossen . */ public synchronized void shutdown ( ) { } }
this . shutdownManagementWatchdogs ( ) ; // deactivate all Watchdogs . . . . Iterator iter = registeredWachdogs . values ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { IWatchdog th = ( IWatchdog ) iter . next ( ) ; th . deactivate ( ) ; if ( log . isInfoEnabled ( ) ) { log . info ( ". . . Deactivating Watchdog " + th . getId ( ) ) ; } } this . kill ( ) ;
public class ProductCollection { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime . * Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time . * Cashflows prior evaluationTime are not considered . * @ param evaluationTime The time on which this products value should be observed . * @ param model The model used to price the product . * @ return The random variable representing the value of the product discounted to evaluation time * @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . * @ see net . finmath . montecarlo . AbstractMonteCarloProduct # getValue ( double , net . finmath . montecarlo . MonteCarloSimulationModel ) */ @ Override public RandomVariable getValue ( final double evaluationTime , final LIBORModelMonteCarloSimulationModel model ) throws CalculationException { } }
// Ignite asynchronous calculation if possible ArrayList < Future < RandomVariable > > results = new ArrayList < > ( ) ; for ( final MonteCarloProduct product : products ) { Future < RandomVariable > valueFuture ; try { valueFuture = getExecutor ( ) . submit ( new Callable < RandomVariable > ( ) { @ Override public RandomVariable call ( ) throws CalculationException { return product . getValue ( evaluationTime , model ) ; } } ) ; } catch ( RejectedExecutionException e ) { valueFuture = new FutureWrapper < > ( product . getValue ( evaluationTime , model ) ) ; } results . add ( valueFuture ) ; } // Collect results RandomVariable values = model . getRandomVariableForConstant ( 0.0 ) ; try { for ( Future < RandomVariable > valueFuture : results ) { values = values . add ( valueFuture . get ( ) ) ; } } catch ( InterruptedException e ) { throw e . getCause ( ) instanceof CalculationException ? ( CalculationException ) ( e . getCause ( ) ) : new CalculationException ( e . getCause ( ) ) ; } catch ( ExecutionException e ) { if ( CalculationException . class . isInstance ( e . getCause ( ) ) ) { throw ( CalculationException ) ( e . getCause ( ) ) ; } else if ( RuntimeException . class . isInstance ( e . getCause ( ) ) ) { throw ( RuntimeException ) ( e . getCause ( ) ) ; } else { throw new CalculationException ( e . getCause ( ) ) ; } } // Return values return values ;
public class ShortField { /** * Get the SQL type of this field . * Typically SHORT or SMALLINT . * @ param bIncludeLength Include the field length in this description . * @ param properties Database properties to determine the SQL type . * @ return The SQL Type . */ public String getSQLType ( boolean bIncludeLength , Map < String , Object > properties ) { } }
String strType = ( String ) properties . get ( DBSQLTypes . SHORT ) ; if ( strType == null ) strType = DBSQLTypes . SMALLINT ; // The default SQL Type ( Byte ) return strType ; // The default SQL Type
public class SearchView { /** * Specifies if a query refinement button should be displayed alongside each suggestion * or if it should depend on the flags set in the individual items retrieved from the * suggestions provider . Clicking on the query refinement button will replace the text * in the query text field with the text from the suggestion . This flag only takes effect * if a SearchableInfo has been specified with { @ link # setSearchableInfo ( SearchableInfo ) } * and not when using a custom adapter . * @ param enable true if all items should have a query refinement button , false if only * those items that have a query refinement flag set should have the button . * @ see SearchManager # SUGGEST _ COLUMN _ FLAGS * @ see SearchManager # FLAG _ QUERY _ REFINEMENT */ public void setQueryRefinementEnabled ( boolean enable ) { } }
mQueryRefinement = enable ; if ( mSuggestionsAdapter instanceof SuggestionsAdapter ) { ( ( SuggestionsAdapter ) mSuggestionsAdapter ) . setQueryRefinement ( enable ? SuggestionsAdapter . REFINE_ALL : SuggestionsAdapter . REFINE_BY_ENTRY ) ; }
public class CertificateX509Key { /** * Return an enumeration of names of attributes existing within this * attribute . */ public Enumeration < String > getElements ( ) { } }
AttributeNameEnumeration elements = new AttributeNameEnumeration ( ) ; elements . addElement ( KEY ) ; return ( elements . elements ( ) ) ;
public class ReportingDetectorFactorySelector { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . plan . DetectorFactorySelector # selectFactory ( edu . umd * . cs . findbugs . DetectorFactory ) */ @ Override public boolean selectFactory ( DetectorFactory factory ) { } }
return ( plugin == null || plugin == factory . getPlugin ( ) ) && factory . isReportingDetector ( ) ;
public class CPDefinitionLinkUtil { /** * Returns the first cp definition link in the ordered set where CPDefinitionId = & # 63 ; . * @ param CPDefinitionId the cp definition ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp definition link , or < code > null < / code > if a matching cp definition link could not be found */ public static CPDefinitionLink fetchByCPDefinitionId_First ( long CPDefinitionId , OrderByComparator < CPDefinitionLink > orderByComparator ) { } }
return getPersistence ( ) . fetchByCPDefinitionId_First ( CPDefinitionId , orderByComparator ) ;
public class VarnaUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */ / * / public static String getAntyaVarna ( String str ) { } }
if ( str . length ( ) == 0 ) return null ; String antyaVarna = String . valueOf ( str . charAt ( str . length ( ) - 1 ) ) ; if ( str . length ( ) > 1 && antyaVarna . equals ( "3" ) ) // for pluta { antyaVarna = str . charAt ( str . length ( ) - 2 ) + antyaVarna ; } return antyaVarna ;
public class ResourceLimitExceededException { /** * The Amazon EKS cluster associated with the exception . * @ param clusterName * The Amazon EKS cluster associated with the exception . */ @ com . fasterxml . jackson . annotation . JsonProperty ( "clusterName" ) public void setClusterName ( String clusterName ) { } }
this . clusterName = clusterName ;
public class LoggerFactory { /** * Prints a warning message on the console if multiple bindings were found * on the class path . No reporting is done otherwise . */ private static void reportMultipleBindingAmbiguity ( List < SLF4JServiceProvider > providerList ) { } }
if ( isAmbiguousProviderList ( providerList ) ) { Util . report ( "Class path contains multiple SLF4J providers." ) ; for ( SLF4JServiceProvider provider : providerList ) { Util . report ( "Found provider [" + provider + "]" ) ; } Util . report ( "See " + MULTIPLE_BINDINGS_URL + " for an explanation." ) ; }
public class DatasourceJBossASClient { /** * Checks if the { @ code path } has one of the expected prefixes { @ link # DATASOURCE _ PATH _ PREFIX } or * { @ link # XA _ DATASOURCE _ PATH _ PREFIX } . * @ param modelNodePath the path to check * @ throws IllegalArgumentException is the { @ code path } is not valid */ public static void checkDatasourcePath ( String modelNodePath ) { } }
if ( ! modelNodePath . startsWith ( DATASOURCE_PATH_PREFIX ) && ! modelNodePath . startsWith ( XA_DATASOURCE_PATH_PREFIX ) ) { String msg = String . format ( "[%s] is not a datasource path. It must start with either [%s] or [%s]" , modelNodePath , DATASOURCE_PATH_PREFIX , XA_DATASOURCE_PATH_PREFIX ) ; throw new IllegalArgumentException ( msg ) ; }
public class JSON { /** * Deserializes a JSON formatted string to a specific class type * @ param json The string to deserialize * @ param type A type definition for a complex object * @ return An object of the { @ code pojo } type * @ throws IllegalArgumentException if the data or type was null or parsing * failed * @ throws JSONException if the data could not be parsed */ @ SuppressWarnings ( "unchecked" ) public static final < T > T parseToObject ( final String json , final TypeReference < T > type ) { } }
if ( json == null || json . isEmpty ( ) ) throw new IllegalArgumentException ( "Incoming data was null or empty" ) ; if ( type == null ) throw new IllegalArgumentException ( "Missing type reference" ) ; try { return ( T ) jsonMapper . readValue ( json , type ) ; } catch ( JsonParseException e ) { throw new IllegalArgumentException ( e ) ; } catch ( JsonMappingException e ) { throw new IllegalArgumentException ( e ) ; } catch ( IOException e ) { throw new JSONException ( e ) ; }
public class CodedInput { /** * Read a raw Varint from the stream . If larger than 32 bits , discard the upper bits . */ public int readRawVarint32 ( ) throws IOException { } }
byte tmp = readRawByte ( ) ; if ( tmp >= 0 ) { return tmp ; } int result = tmp & 0x7f ; if ( ( tmp = readRawByte ( ) ) >= 0 ) { result |= tmp << 7 ; } else { result |= ( tmp & 0x7f ) << 7 ; if ( ( tmp = readRawByte ( ) ) >= 0 ) { result |= tmp << 14 ; } else { result |= ( tmp & 0x7f ) << 14 ; if ( ( tmp = readRawByte ( ) ) >= 0 ) { result |= tmp << 21 ; } else { result |= ( tmp & 0x7f ) << 21 ; result |= ( tmp = readRawByte ( ) ) << 28 ; if ( tmp < 0 ) { // Discard upper 32 bits . for ( int i = 0 ; i < 5 ; i ++ ) { if ( readRawByte ( ) >= 0 ) { return result ; } } throw ProtobufException . malformedVarint ( ) ; } } } } return result ;
public class RebalanceUtils { /** * Given the initial and final cluster dumps it into the output directory * @ param currentCluster Initial cluster metadata * @ param finalCluster Final cluster metadata * @ param outputDirName Output directory where to dump this file * @ param filePrefix String to prepend to the initial & final cluster * metadata files * @ throws IOException */ public static void dumpClusters ( Cluster currentCluster , Cluster finalCluster , String outputDirName , String filePrefix ) { } }
dumpClusterToFile ( outputDirName , filePrefix + currentClusterFileName , currentCluster ) ; dumpClusterToFile ( outputDirName , filePrefix + finalClusterFileName , finalCluster ) ;
public class LockSet { /** * Destructively intersect this lock set with another . Note that this is * < em > not < / em > a dataflow merge : we are interested in finding out which * locks are held in both sets , not in the exact lock counts . * @ param other * the other LockSet */ public void intersectWith ( LockSet other ) { } }
for ( int i = 0 ; i + 1 < array . length ; i += 2 ) { int valueNumber = array [ i ] ; if ( valueNumber < 0 ) { break ; } int myLockCount = array [ i + 1 ] ; if ( myLockCount <= 0 ) { continue ; } int otherLockCount = other . getLockCount ( valueNumber ) ; if ( otherLockCount <= 0 ) { /* This set holds the lock , but the other one doesn ' t . */ array [ i + 1 ] = 0 ; } }
public class SecurityContextImpl { /** * ( non - Javadoc ) * @ see javax . security . enterprise . SecurityContext # authenticate ( javax . servlet . http . HttpServletRequest , javax . servlet . http . HttpServletResponse , * javax . security . enterprise . authentication . mechanism . http . AuthenticationParameters ) */ @ Override public AuthenticationStatus authenticate ( HttpServletRequest req , HttpServletResponse res , AuthenticationParameters params ) { } }
AuthenticationStatus authStatus = AuthenticationStatus . SEND_FAILURE ; req . setAttribute ( JavaEESecConstants . SECURITY_CONTEXT_AUTH_PARAMS , params ) ; try { boolean result = req . authenticate ( res ) ; if ( result ) { authStatus = AuthenticationStatus . SUCCESS ; } else { // TODO some error handling . } } catch ( Exception e ) { // TODO need to handle error . e . printStackTrace ( ) ; } return authStatus ;
public class AtomicStampedReference { /** * Atomically sets the value of both the reference and stamp * to the given update values if the * current reference is { @ code = = } to the expected reference * and the current stamp is equal to the expected stamp . * < p > < a href = " package - summary . html # weakCompareAndSet " > May fail * spuriously and does not provide ordering guarantees < / a > , so is * only rarely an appropriate alternative to { @ code compareAndSet } . * @ param expectedReference the expected value of the reference * @ param newReference the new value for the reference * @ param expectedStamp the expected value of the stamp * @ param newStamp the new value for the stamp * @ return { @ code true } if successful */ public boolean weakCompareAndSet ( V expectedReference , V newReference , int expectedStamp , int newStamp ) { } }
return compareAndSet ( expectedReference , newReference , expectedStamp , newStamp ) ;
public class Table { /** * Adds an element if the element at the given index is null . Returns true if no element existed at the given index , * else returns false and doesn ' t set the element . * @ param seqno * @ param element * @ param remove _ filter If not null , a filter used to remove all consecutive messages passing the filter * @ return True if the element at the computed index was null , else false */ public boolean add ( long seqno , T element , Predicate < T > remove_filter ) { } }
lock . lock ( ) ; try { return _add ( seqno , element , true , remove_filter ) ; } finally { lock . unlock ( ) ; }
public class Cluster { /** * The list of cluster parameter groups that are associated with this cluster . Each parameter group in the list is * returned with its status . * @ param clusterParameterGroups * The list of cluster parameter groups that are associated with this cluster . Each parameter group in the * list is returned with its status . */ public void setClusterParameterGroups ( java . util . Collection < ClusterParameterGroupStatus > clusterParameterGroups ) { } }
if ( clusterParameterGroups == null ) { this . clusterParameterGroups = null ; return ; } this . clusterParameterGroups = new com . amazonaws . internal . SdkInternalList < ClusterParameterGroupStatus > ( clusterParameterGroups ) ;
public class HttpOutboundServiceContextImpl { /** * Utility method to check whether the upcoming read for the response body * is either valid at this point or even necessary . * @ return boolean - - false means there is no need to read for a body * @ throws IOException * - - if this not a valid time to get the body */ private boolean checkBodyValidity ( ) throws IOException { } }
// LI4335 - allow response body reading if early reads are in place if ( isImmediateReadEnabled ( ) || this . bEarlyReads ) { if ( ! headersParsed ( ) ) { // this means they are requesting body buffers prior to sending // the minimum request headers IOException ioe = new IOException ( "Request headers not sent yet" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Attempt to read response prior to sendRequest" ) ; } throw ioe ; } // otherwise continue as normal } else if ( ! isMessageSent ( ) ) { // request message must be fully sent prior to reading any part of // the response body IOException ioe = new IOException ( "Request not finished yet" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Attempt to read response prior to finishRequest" ) ; } throw ioe ; } // check to see if we should be reading for data return isIncomingBodyValid ( ) ;
public class Period { /** * Converts this period to a duration assuming a * 7 day week , 24 hour day , 60 minute hour and 60 second minute . * This method allows you to convert from a period to a duration . * However to achieve this it makes the assumption that all * weeks are 7 days , all days are 24 hours , all hours are 60 minutes and * all minutes are 60 seconds . This is not true when daylight savings time * is considered , and may also not be true for some unusual chronologies . * However , it is included as it is a useful operation for many * applications and business rules . * If the period contains years or months , an exception will be thrown . * @ return a duration equivalent to this period * @ throws UnsupportedOperationException if the period contains years or months * @ since 1.5 */ public Duration toStandardDuration ( ) { } }
checkYearsAndMonths ( "Duration" ) ; long millis = getMillis ( ) ; // no overflow can happen , even with Integer . MAX _ VALUEs millis += ( ( ( long ) getSeconds ( ) ) * ( ( long ) DateTimeConstants . MILLIS_PER_SECOND ) ) ; millis += ( ( ( long ) getMinutes ( ) ) * ( ( long ) DateTimeConstants . MILLIS_PER_MINUTE ) ) ; millis += ( ( ( long ) getHours ( ) ) * ( ( long ) DateTimeConstants . MILLIS_PER_HOUR ) ) ; millis += ( ( ( long ) getDays ( ) ) * ( ( long ) DateTimeConstants . MILLIS_PER_DAY ) ) ; millis += ( ( ( long ) getWeeks ( ) ) * ( ( long ) DateTimeConstants . MILLIS_PER_WEEK ) ) ; return new Duration ( millis ) ;
public class Balancer { /** * parse command line arguments */ private void parse ( String [ ] args ) { } }
Options cliOpts = setupOptions ( ) ; BasicParser parser = new BasicParser ( ) ; CommandLine cl = null ; try { try { cl = parser . parse ( cliOpts , args ) ; } catch ( ParseException ex ) { throw new IllegalArgumentException ( "args = " + Arrays . toString ( args ) ) ; } int newThreshold = Integer . parseInt ( cl . getOptionValue ( "threshold" , "10" ) ) ; int iterationTime = Integer . parseInt ( cl . getOptionValue ( "iter_len" , String . valueOf ( maxIterationTime / ( 60 * 1000 ) ) ) ) ; maxConcurrentMoves = Integer . parseInt ( cl . getOptionValue ( "node_par_moves" , String . valueOf ( MAX_NUM_CONCURRENT_MOVES ) ) ) ; moveThreads = Integer . parseInt ( cl . getOptionValue ( "par_moves" , String . valueOf ( MOVER_THREAD_POOL_SIZE ) ) ) ; maxIterationTime = iterationTime * 60 * 1000L ; threshold = checkThreshold ( newThreshold ) ; System . out . println ( "Running with threshold of " + threshold + " and iteration time of " + maxIterationTime + " milliseconds" ) ; } catch ( RuntimeException e ) { printUsage ( cliOpts ) ; throw e ; }
public class DegradationExpansionRule { /** * { @ inheritDoc } */ @ Override public List < Statement > expand ( Term degradation ) { } }
List < Statement > statements = new ArrayList < Statement > ( ) ; BELObject firstArgument = degradation . getFunctionArguments ( ) . get ( 0 ) ; Term protein = ( Term ) firstArgument ; // only argument of a protein // degradation term is the protein // Protein degradation term connects to its protein with // DIRECTLY _ DECREASES relationship final Object obj = new Object ( protein ) ; Statement statement = new Statement ( degradation , null , null , obj , DIRECTLY_DECREASES ) ; attachExpansionRuleCitation ( statement ) ; statements . add ( statement ) ; return statements ;
public class WorkerInfo { /** * < code > optional . alluxio . grpc . WorkerNetAddress address = 2 ; < / code > */ public alluxio . grpc . WorkerNetAddressOrBuilder getAddressOrBuilder ( ) { } }
return address_ == null ? alluxio . grpc . WorkerNetAddress . getDefaultInstance ( ) : address_ ;
public class Predicates { /** * Returns a predicate that evaluates to true if the object reference being tested is a member * of the given collection . */ public < T > Predicate < T > in ( final Collection < ? extends T > target ) { } }
return new Predicate < T > ( ) { public boolean apply ( T arg ) { try { return target . contains ( arg ) ; } catch ( NullPointerException e ) { return false ; } catch ( ClassCastException e ) { return false ; } } } ;
public class GrapesServer { /** * Runs Grapes */ public static void main ( final String [ ] args ) throws ExceptionInInitializerError { } }
try { final GrapesServer grapesServer = new GrapesServer ( ) ; grapesServer . run ( args ) ; } catch ( Exception e ) { LOG . error ( "Grapes server failed to start:" + e . getMessage ( ) ) ; throw new ExceptionInInitializerError ( e ) ; }
public class SwapSpaceMonitor { /** * Returns the HTML representation of the space . */ public String toHtml ( MemoryUsage usage ) { } }
if ( usage . availableSwapSpace == - 1 ) return "N/A" ; String humanReadableSpace = Functions . humanReadableByteSize ( usage . availableSwapSpace ) ; long free = usage . availableSwapSpace ; free /= 1024L ; // convert to KB free /= 1024L ; // convert to MB if ( free > 256 || usage . totalSwapSpace < usage . availableSwapSpace * 5 ) return humanReadableSpace ; // if we have more than 256MB free or less than 80 % filled up , it ' s OK // Otherwise considered dangerously low . return Util . wrapToErrorSpan ( humanReadableSpace ) ;
public class AbstractCurrencyConversion { /** * Method that converts the source { @ link MonetaryAmount } to an * { @ link MonetaryAmount } based on the { @ link ExchangeRate } of this * conversion . * @ param amount The source amount * @ return The converted amount , never null . * @ throws CurrencyConversionException if conversion failed , or the required data is not available . * @ see # getExchangeRate ( MonetaryAmount ) */ @ Override public MonetaryAmount apply ( MonetaryAmount amount ) { } }
if ( termCurrency . equals ( Objects . requireNonNull ( amount ) . getCurrency ( ) ) ) { return amount ; } ExchangeRate rate = getExchangeRate ( amount ) ; if ( Objects . isNull ( rate ) || ! amount . getCurrency ( ) . equals ( rate . getBaseCurrency ( ) ) ) { throw new CurrencyConversionException ( amount . getCurrency ( ) , this . termCurrency , null ) ; } NumberValue factor = rate . getFactor ( ) ; factor = roundFactor ( amount , factor ) ; Integer scale = rate . getContext ( ) . get ( KEY_SCALE , Integer . class ) ; if ( Objects . isNull ( scale ) || scale < 0 ) { return amount . multiply ( factor ) . getFactory ( ) . setCurrency ( rate . getCurrency ( ) ) . create ( ) ; } else { return amount . multiply ( factor ) . getFactory ( ) . setCurrency ( rate . getCurrency ( ) ) . create ( ) . with ( MonetaryOperators . rounding ( scale ) ) ; }
public class CommerceCurrencyPersistenceImpl { /** * Removes all the commerce currencies where groupId = & # 63 ; and active = & # 63 ; from the database . * @ param groupId the group ID * @ param active the active */ @ Override public void removeByG_A ( long groupId , boolean active ) { } }
for ( CommerceCurrency commerceCurrency : findByG_A ( groupId , active , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceCurrency ) ; }
public class JStormDebugger { /** * one of the rootIds has been chosen , the logs should be output */ private static boolean sample ( Set < Long > rootIds ) { } }
if ( Double . compare ( sampleRate , 1.0d ) >= 0 ) return true ; int threshold = ( int ) ( sampleRate * PRECISION ) ; for ( Long id : rootIds ) { int mod = ( int ) ( Math . abs ( id ) % PRECISION ) ; if ( mod < threshold ) { return true ; } } return false ;
public class DenyAssignmentsInner { /** * Get the specified deny assignment . * @ param scope The scope of the deny assignment . * @ param denyAssignmentId The ID of the deny assignment to get . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DenyAssignmentInner object if successful . */ public DenyAssignmentInner get ( String scope , String denyAssignmentId ) { } }
return getWithServiceResponseAsync ( scope , denyAssignmentId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class KeyRep { /** * Resolve the Key object . * < p > This method supports three Type / format combinations : * < ul > * < li > Type . SECRET / " RAW " - returns a SecretKeySpec object * constructed using encoded key bytes and algorithm * < li > Type . PUBLIC / " X . 509 " - gets a KeyFactory instance for * the key algorithm , constructs an X509EncodedKeySpec with the * encoded key bytes , and generates a public key from the spec * < li > Type . PRIVATE / " PKCS # 8 " - gets a KeyFactory instance for * the key algorithm , constructs a PKCS8EncodedKeySpec with the * encoded key bytes , and generates a private key from the spec * < / ul > * @ return the resolved Key object * @ exception ObjectStreamException if the Type / format * combination is unrecognized , if the algorithm , key format , or * encoded key bytes are unrecognized / invalid , of if the * resolution of the key fails for any reason */ protected Object readResolve ( ) throws ObjectStreamException { } }
try { if ( type == Type . SECRET && RAW . equals ( format ) ) { return new SecretKeySpec ( encoded , algorithm ) ; } else if ( type == Type . PUBLIC && X509 . equals ( format ) ) { KeyFactory f = KeyFactory . getInstance ( algorithm ) ; return f . generatePublic ( new X509EncodedKeySpec ( encoded ) ) ; } else if ( type == Type . PRIVATE && PKCS8 . equals ( format ) ) { KeyFactory f = KeyFactory . getInstance ( algorithm ) ; return f . generatePrivate ( new PKCS8EncodedKeySpec ( encoded ) ) ; } else { throw new NotSerializableException ( "unrecognized type/format combination: " + type + "/" + format ) ; } } catch ( NotSerializableException nse ) { throw nse ; } catch ( Exception e ) { NotSerializableException nse = new NotSerializableException ( "java.security.Key: " + "[" + type + "] " + "[" + algorithm + "] " + "[" + format + "]" ) ; nse . initCause ( e ) ; throw nse ; }
public class Dater { /** * Returns a AddsOrSets instance that add the specified field to a delegate date * @ return */ public DateUnit add ( ) { } }
if ( this . add . isPresent ( ) ) { return add . get ( ) ; } return ( this . add = Optional . of ( ( DateUnit ) new DateUnit ( this ) { @ Override protected DateUnit handle ( int calendarField , int amount ) { Calendar c = asCalendar ( ) ; c . add ( calendarField , amount ) ; target = c . getTime ( ) ; return this ; } } ) ) . get ( ) ;
public class KeycloakUserServlet { /** * With Keycloak integration , the Authorization header is available in the request to the UserServlet . */ protected String getKeycloakUsername ( final HttpServletRequest req , HttpServletResponse resp ) { } }
AtomicReference < String > username = new AtomicReference < > ( ) ; Authenticator . authenticate ( authConfiguration , req , subject -> { username . set ( AuthHelpers . getUsername ( subject ) ) ; // Start httpSession req . getSession ( true ) ; } ) ; return username . get ( ) ;
public class MtasToken { /** * Adds the positions . * @ param list the list */ final public void addPositions ( Set < Integer > list ) { } }
int [ ] positions = ArrayUtils . toPrimitive ( list . toArray ( new Integer [ list . size ( ) ] ) ) ; addPositions ( positions ) ;
public class OgnlEvaluator { /** * Eagerly compile this OGNL expression * @ param root * @ return */ Node compile ( final Object root ) { } }
if ( compiled == null ) { compiled = compileExpression ( root , this . expr ) ; parsed = null ; if ( this . notifyOnCompiled != null ) this . notifyOnCompiled . accept ( this . expr , this ) ; } return compiled ;
public class PublicIPAddressesInner { /** * Gets information about all public IP addresses on a virtual machine scale set level . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; PublicIPAddressInner & gt ; object */ public Observable < Page < PublicIPAddressInner > > listVirtualMachineScaleSetPublicIPAddressesNextAsync ( final String nextPageLink ) { } }
return listVirtualMachineScaleSetPublicIPAddressesNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < PublicIPAddressInner > > , Page < PublicIPAddressInner > > ( ) { @ Override public Page < PublicIPAddressInner > call ( ServiceResponse < Page < PublicIPAddressInner > > response ) { return response . body ( ) ; } } ) ;
public class Instruction { /** * Get the { @ link Kind } of this instruction . */ public Kind getKind ( ) { } }
Opcode opcode = getOpcode ( ) ; return ( opcode != null ? opcode . kind : Kind . UNKNOWN ) ;
public class RSAWithBase64 { /** * 获取RAS Base64 公密钥对 * < p > Function : keyGen < / p > * < p > Description : < / p > * @ param * @ return * @ author acexy @ thankjava . com * @ date 2016年8月10日 下午6:31:26 * @ version 1.0 */ public static RSAKeyString keyGen ( int keySize ) { } }
try { RSAKey keys = RSA . keyGen ( keySize ) ; return new RSAKeyString ( Base64Util . encode2String ( keys . getPublicKey ( ) . getEncoded ( ) ) , Base64Util . encode2String ( keys . getPrivateKey ( ) . getEncoded ( ) ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return null ;
public class EFapsClassLoader { /** * Loads the wanted Resource with the EFapsResourceStore into a byte - Array * to pass it on to findClass . * @ param _ resourceName name of the Resource to load * @ return byte [ ] containing the compiled javaclass */ protected byte [ ] loadClassData ( final String _resourceName ) { } }
EFapsClassLoader . LOG . debug ( "Loading Class '{}' from Database." , _resourceName ) ; final byte [ ] x = read ( _resourceName ) ; return x ;
public class BasicParallelSearch { /** * Remove the given search . If the search was never added , < code > false < / code > is returned . * Note that this method may only be called when the search is idle . * @ param search search to be removed from parallel algorithm * @ return < code > true < / code > if search is successfully removed * @ throws SearchException if the search is not idle */ public boolean removeSearch ( Search < SolutionType > search ) { } }
// synchronize with status updates synchronized ( getStatusLock ( ) ) { // assert idle assertIdle ( "Cannot remove search from basic parallel search algorithm." ) ; // check if search was added if ( searches . contains ( search ) ) { // remove search searches . remove ( search ) ; // stop listening to events fired by this search search . removeSearchListener ( subsearchListener ) ; } return false ; }
public class AbstractComponentDecoration { /** * Updates the decoration painter with respect to the decorated component . * This method is to be called whenever changes on the decorated component have an impact on the decoration ( for * instance , its size , location , etc . ) . * This method has been made protected so that it can be easily called from the implementing sub - classes . */ protected void followDecoratedComponent ( ) { } }
if ( ( anchorLink != null ) && ( decoratedComponent != null ) ) { if ( attachedLayeredPane == null ) { // Try to attach to a layered pane attachToLayeredPane ( ) ; } followDecoratedComponent ( attachedLayeredPane ) ; }
public class NodeSequence { /** * Create a batch of nodes around the supplied iterator . Note that the supplied iterator is accessed lazily only when the * batch is { @ link Batch # nextRow ( ) used } . * @ param keys the iterator over the keys of the nodes to be returned ; if null , an { @ link # emptySequence empty instance } is * returned * @ param nodeCount the number of nodes in the iterator ; must be - 1 if not known , 0 if known to be empty , or a positive number * if the number of nodes is known * @ param score the score to return for all of the nodes * @ param workspaceName the name of the workspace in which all of the nodes exist * @ param cache the workspace cache used to access the cached nodes ; may be null only if the key sequence is null or empty * @ return the batch of nodes ; never null */ public static Batch batchOfKeys ( final Iterator < NodeKey > keys , final long nodeCount , final float score , final String workspaceName , final NodeCache cache ) { } }
assert nodeCount >= - 1 ; if ( keys == null ) return emptyBatch ( workspaceName , 1 ) ; return new Batch ( ) { private CachedNode current ; @ Override public int width ( ) { return 1 ; } @ Override public long rowCount ( ) { return nodeCount ; } @ Override public boolean isEmpty ( ) { return nodeCount == 0 ; } @ Override public String getWorkspaceName ( ) { return workspaceName ; } @ Override public boolean hasNext ( ) { return keys . hasNext ( ) ; } @ Override public void nextRow ( ) { NodeKey key = keys . next ( ) ; current = cache . getNode ( key ) ; } @ Override public CachedNode getNode ( ) { return current ; } @ Override public CachedNode getNode ( int index ) { if ( index != 0 ) { throw new IndexOutOfBoundsException ( ) ; } return current ; } @ Override public float getScore ( ) { return score ; } @ Override public float getScore ( int index ) { if ( index != 0 ) throw new IndexOutOfBoundsException ( ) ; return score ; } @ Override public String toString ( ) { return "(batch key-count=" + rowCount ( ) + " score=" + getScore ( ) + " " + keys + ")" ; } } ;
public class AckEventType { /** * Create a new { @ link AckEventType } from an enum value . * @ param value String value of the error code . * @ return An { @ link AckEventType } or null if the value was null . */ public static AckEventType of ( Values value ) { } }
if ( value == Values . SDK_UNKNOWN ) { throw new IllegalArgumentException ( "SDK_UNKNOWN cannot be used to create an AckEventType. Use the raw value to" + "create an AckEventType from a string instead" ) ; } return value == null ? null : new AckEventType ( value , value . toString ( ) ) ;
public class LightMetaBean { /** * Adds an alias to the meta - bean . * When using { @ link # metaProperty ( String ) } , the alias will return the * meta - property of the real name . * @ param alias the alias * @ param realName the real name * @ return the new meta - bean instance * @ throws IllegalArgumentException if the realName is invalid */ public LightMetaBean < T > withAlias ( String alias , String realName ) { } }
if ( ! metaPropertyMap . containsKey ( realName ) ) { throw new IllegalArgumentException ( "Invalid property name: " + realName ) ; } Map < String , String > aliasMap = new HashMap < > ( this . aliasMap ) ; aliasMap . put ( alias , realName ) ; return new LightMetaBean < > ( beanType , metaPropertyMap , aliasMap , constructorFn , constructionData ) ;
public class AbstractJPAQuery { /** * Transforms results using FactoryExpression if ResultTransformer can ' t be used * @ param query query * @ return results */ private List < ? > getResultList ( Query query ) { } }
// TODO : use lazy fetch here ? if ( projection != null ) { List < ? > results = query . getResultList ( ) ; List < Object > rv = new ArrayList < Object > ( results . size ( ) ) ; for ( Object o : results ) { if ( o != null ) { if ( ! o . getClass ( ) . isArray ( ) ) { o = new Object [ ] { o } ; } rv . add ( projection . newInstance ( ( Object [ ] ) o ) ) ; } else { rv . add ( null ) ; } } return rv ; } else { return query . getResultList ( ) ; }
public class GetModelRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetModelRequest getModelRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getModelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getModelRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( getModelRequest . getModelName ( ) , MODELNAME_BINDING ) ; protocolMarshaller . marshall ( getModelRequest . getFlatten ( ) , FLATTEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BaseStreamWriter { /** * Method called by { @ link javax . xml . stream . XMLEventWriter } * ( instead of more generic * text output methods ) , so that we can verify ( if necessary ) that * this character output type is legal in this context . Specifically , * it ' s not acceptable to add non - whitespace content outside root * element ( in prolog / epilog ) . * Note : cut ' n pasted from the main < code > writeCharacters < / code > ; not * good . . . but done to optimize white - space cases . */ public void writeCharacters ( Characters ch ) throws XMLStreamException { } }
// Need to finish an open start element ? if ( mStartElementOpen ) { closeStartElement ( mEmptyElement ) ; } /* Not legal outside main element tree , except if it ' s all * white space */ if ( mCheckStructure ) { if ( inPrologOrEpilog ( ) ) { if ( ! ch . isIgnorableWhiteSpace ( ) && ! ch . isWhiteSpace ( ) ) { reportNwfStructure ( ErrorConsts . WERR_PROLOG_NONWS_TEXT ) ; } } } if ( mVldContent <= XMLValidator . CONTENT_ALLOW_WS ) { if ( mVldContent == XMLValidator . CONTENT_ALLOW_NONE ) { // never ok reportInvalidContent ( CHARACTERS ) ; } else { // all - ws is ok . . . if ( ! ch . isIgnorableWhiteSpace ( ) && ! ch . isWhiteSpace ( ) ) { reportInvalidContent ( CHARACTERS ) ; } } } else if ( mVldContent == XMLValidator . CONTENT_ALLOW_VALIDATABLE_TEXT ) { if ( mValidator != null ) { /* Last arg is false , since we do not know if more text * may be added with additional calls */ mValidator . validateText ( ch . getData ( ) , false ) ; } } // Ok , let ' s just write it out : try { mWriter . writeCharacters ( ch . getData ( ) ) ; } catch ( IOException ioe ) { throw new WstxIOException ( ioe ) ; }
public class RedisBase { /** * 返回给定 key 的剩余生存时间 ( 单位 : 秒 ) * @ param keyBytes * @ return 当 key 不存在时 , 返回 - 2 。 * 当 key 存在但没有设置剩余生存时间时 , 返回 - 1 。 * 否则返回 key的剩余生存时间 。 */ public Long getTtl ( ) { } }
try { if ( ! isBinary ) return getJedisCommands ( groupName ) . ttl ( key ) ; long result = 0 ; if ( isCluster ( groupName ) ) { result = getBinaryJedisClusterCommands ( groupName ) . ttl ( keyBytes ) ; } else { result = getBinaryJedisCommands ( groupName ) . ttl ( keyBytes ) ; } return result ; } finally { getJedisProvider ( groupName ) . release ( ) ; }
public class CheckpointStatsTracker { /** * Callback when a checkpoint is restored . * @ param restored The restored checkpoint stats . */ void reportRestoredCheckpoint ( RestoredCheckpointStats restored ) { } }
checkNotNull ( restored , "Restored checkpoint" ) ; statsReadWriteLock . lock ( ) ; try { counts . incrementRestoredCheckpoints ( ) ; latestRestoredCheckpoint = restored ; dirty = true ; } finally { statsReadWriteLock . unlock ( ) ; }
public class FilterChain { /** * Traverses the DOM and applies the filters for each visited node . * @ param walker * @ param indent * @ param sb * Optional { @ link StringBuilder } used to track progress for logging purposes . */ public void traverseAndFilter ( final TreeWalker walker , final String indent , final Metadata metadata , final StringBuilder sb ) { } }
final Node parend = walker . getCurrentNode ( ) ; final boolean isLogged = appendText ( indent , ( Element ) parend , sb ) ; for ( final Filter filter : filterList ) { if ( filter . filter ( metadata , walker ) ) { appendText ( " catched by: " + filter . getClass ( ) . getSimpleName ( ) , sb ) ; break ; } } if ( isLogged ) { appendText ( "\n" , sb ) ; } for ( Node n = walker . firstChild ( ) ; n != null ; n = walker . nextSibling ( ) ) { traverseAndFilter ( walker , indent + " " , metadata , sb ) ; } walker . setCurrentNode ( parend ) ;
public class CPOptionPersistenceImpl { /** * Returns all the cp options . * @ return the cp options */ @ Override public List < CPOption > findAll ( ) { } }
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class CacheHandler { /** * 获取CacheOpType , 从三个地方获取 : < br > * 1 . Cache注解中获取 ; < br > * 2 . 从ThreadLocal中获取 ; < br > * 3 . 从参数中获取 ; < br > * 上面三者的优先级 : 从低到高 。 * @ param cache 注解 * @ param arguments 参数 * @ return CacheOpType */ private CacheOpType getCacheOpType ( Cache cache , Object [ ] arguments ) { } }
CacheOpType opType = cache . opType ( ) ; CacheOpType tmpOpType = CacheHelper . getCacheOpType ( ) ; if ( null != tmpOpType ) { opType = tmpOpType ; } if ( null != arguments && arguments . length > 0 ) { for ( Object tmp : arguments ) { if ( null != tmp && tmp instanceof CacheOpType ) { opType = ( CacheOpType ) tmp ; break ; } } } if ( null == opType ) { opType = CacheOpType . READ_WRITE ; } return opType ;
public class AbstractMailFaxClientSpi { /** * This function will send the mail message . * @ param faxJob * The fax job object containing the needed information * @ param mailConnection * The mail connection ( will be released if not persistent ) * @ param message * The message to send */ protected void sendMail ( FaxJob faxJob , Connection < MailResourcesHolder > mailConnection , Message message ) { } }
if ( message == null ) { this . throwUnsupportedException ( ) ; } else { // get holder MailResourcesHolder mailResourcesHolder = mailConnection . getResource ( ) ; // get transport Transport transport = mailResourcesHolder . getTransport ( ) ; try { // send message message . saveChanges ( ) ; if ( transport == null ) { Transport . send ( message , message . getAllRecipients ( ) ) ; } else { transport . sendMessage ( message , message . getAllRecipients ( ) ) ; } } catch ( Throwable throwable ) { throw new FaxException ( "Unable to send message." , throwable ) ; } finally { if ( ! this . usePersistentConnection ) { try { // close connection this . closeMailConnection ( mailConnection ) ; } catch ( Exception exception ) { // log error Logger logger = this . getLogger ( ) ; logger . logInfo ( new Object [ ] { "Error while releasing mail connection." } , exception ) ; } } } }