signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class AbstractJSONExpr { /** * conjunction : neg ( ) ( < AND > neg ( ) ) *
* @ throws JSONException */
boolean term ( Object o ) throws JSONException { } } | boolean result = false ; for ( int i = 0 ; o != null && i < getChildrenCount ( ) ; ++ i ) { result = at ( i ) . test ( o ) ; if ( ! result ) break ; // first FALSE stops this eval
} return result ; |
public class ConcurrentLinkedHashMap { /** * Drains the read buffers , each up to an amortized threshold . */
@ GuardedBy ( "evictionLock" ) void drainReadBuffers ( ) { } } | final int start = ( int ) Thread . currentThread ( ) . getId ( ) ; final int end = start + NUMBER_OF_READ_BUFFERS ; for ( int i = start ; i < end ; i ++ ) { drainReadBuffer ( i & READ_BUFFERS_MASK ) ; } |
public class PubmedXmlParser { /** * Parse Medline gzipped archives */
public List < MedlineCitation > parseAsArticles ( InputStream is ) { } } | try { MedlineCitationSet parse = ( MedlineCitationSet ) unmarshaller . unmarshal ( is ) ; return parse . getMedlineCitation ( ) ; } catch ( Exception e ) { LOG . warn ( "could not parse article " , e ) ; return null ; } finally { IOUtils . closeQuietly ( is ) ; } |
public class ProcessEngineConfigurationImpl { /** * deployers / / / / / */
protected void initDeployers ( ) { } } | if ( this . deployers == null ) { this . deployers = new ArrayList < Deployer > ( ) ; if ( customPreDeployers != null ) { this . deployers . addAll ( customPreDeployers ) ; } this . deployers . addAll ( getDefaultDeployers ( ) ) ; if ( customPostDeployers != null ) { this . deployers . addAll ( customPostDeployers ) ; } } if ( deploymentCache == null ) { List < Deployer > deployers = new ArrayList < Deployer > ( ) ; if ( customPreDeployers != null ) { deployers . addAll ( customPreDeployers ) ; } deployers . addAll ( getDefaultDeployers ( ) ) ; if ( customPostDeployers != null ) { deployers . addAll ( customPostDeployers ) ; } initCacheFactory ( ) ; deploymentCache = new DeploymentCache ( cacheFactory , cacheCapacity ) ; deploymentCache . setDeployers ( deployers ) ; } |
public class LinkedProperties { /** * { @ inheritDoc } */
@ Override public Set < String > stringPropertyNames ( ) { } } | return linkMap . keySet ( ) . stream ( ) . map ( key -> ( String ) key ) . collect ( Collectors . toCollection ( LinkedHashSet :: new ) ) ; |
public class Counter { /** * Set counter ' s value to expected .
* @ param value
* @ throws MemcachedException
* @ throws InterruptedException
* @ throws TimeoutException */
public void set ( long value ) throws MemcachedException , InterruptedException , TimeoutException { } } | this . memcachedClient . set ( this . key , 0 , String . valueOf ( value ) ) ; |
public class DataMaskingRulesInner { /** * Creates or updates a database data masking rule .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database .
* @ param dataMaskingRuleName The name of the data masking rule .
* @ param parameters The required parameters for creating or updating a data masking rule .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the DataMaskingRuleInner object if successful . */
public DataMaskingRuleInner createOrUpdate ( String resourceGroupName , String serverName , String databaseName , String dataMaskingRuleName , DataMaskingRuleInner parameters ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , dataMaskingRuleName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CreateSyntheticOverheadViewPL { /** * Computes overhead view of input image . All pixels in input image are assumed to be on the ground plane .
* @ param input ( Input ) Camera image .
* @ param output ( Output ) Image containing overhead view . */
public void process ( Planar < T > input , Planar < T > output ) { } } | int N = input . getNumBands ( ) ; for ( int i = 0 ; i < N ; i ++ ) { this . output [ i ] = FactoryGImageGray . wrap ( output . getBand ( i ) , this . output [ i ] ) ; interp [ i ] . setImage ( input . getBand ( i ) ) ; } int indexMap = 0 ; for ( int i = 0 ; i < output . height ; i ++ ) { int indexOut = output . startIndex + i * output . stride ; for ( int j = 0 ; j < output . width ; j ++ , indexOut ++ , indexMap ++ ) { Point2D_F32 p = mapPixels [ indexMap ] ; if ( p != null ) { for ( int k = 0 ; k < N ; k ++ ) { this . output [ k ] . set ( indexOut , interp [ k ] . get ( p . x , p . y ) ) ; } } } } |
public class RebootCacheClusterRequest { /** * A list of cache node IDs to reboot . A node ID is a numeric identifier ( 0001 , 0002 , etc . ) . To reboot an entire
* cluster , specify all of the cache node IDs .
* @ param cacheNodeIdsToReboot
* A list of cache node IDs to reboot . A node ID is a numeric identifier ( 0001 , 0002 , etc . ) . To reboot an
* entire cluster , specify all of the cache node IDs . */
public void setCacheNodeIdsToReboot ( java . util . Collection < String > cacheNodeIdsToReboot ) { } } | if ( cacheNodeIdsToReboot == null ) { this . cacheNodeIdsToReboot = null ; return ; } this . cacheNodeIdsToReboot = new com . amazonaws . internal . SdkInternalList < String > ( cacheNodeIdsToReboot ) ; |
public class CustomerServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . adwords . axis . v201809 . mcm . CustomerServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . mcm . CustomerServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . mcm . CustomerServiceSoapBindingStub ( new java . net . URL ( CustomerServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getCustomerServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class CollectionMapperFactory { /** * Creates a new Mapper for the given field .
* @ param field
* the field
* @ param indexed
* whether or not the field is to be indexed
* @ return the Mapper */
private Mapper createMapper ( Field field , boolean indexed ) { } } | lock . lock ( ) ; try { Mapper mapper ; Class < ? > fieldType = field . getType ( ) ; Type genericType = field . getGenericType ( ) ; String cacheKey = computeCacheKey ( genericType , indexed ) ; mapper = cache . get ( cacheKey ) ; if ( mapper != null ) { return mapper ; } if ( List . class . isAssignableFrom ( fieldType ) ) { mapper = new ListMapper ( genericType , indexed ) ; } else if ( Set . class . isAssignableFrom ( fieldType ) ) { mapper = new SetMapper ( genericType , indexed ) ; } else { // we shouldn ' t be getting here
throw new IllegalArgumentException ( String . format ( "Field type must be List or Set, found %s" , fieldType ) ) ; } cache . put ( cacheKey , mapper ) ; return mapper ; } finally { lock . unlock ( ) ; } |
public class AmazonAutoScalingClient { /** * Describes one or more scaling activities for the specified Auto Scaling group .
* @ param describeScalingActivitiesRequest
* @ return Result of the DescribeScalingActivities operation returned by the service .
* @ throws InvalidNextTokenException
* The < code > NextToken < / code > value is not valid .
* @ throws ResourceContentionException
* You already have a pending update to an Amazon EC2 Auto Scaling resource ( for example , an Auto Scaling
* group , instance , or load balancer ) .
* @ sample AmazonAutoScaling . DescribeScalingActivities
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / autoscaling - 2011-01-01 / DescribeScalingActivities "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeScalingActivitiesResult describeScalingActivities ( DescribeScalingActivitiesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeScalingActivities ( request ) ; |
public class UpdateCampaignRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateCampaignRequest updateCampaignRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateCampaignRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateCampaignRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( updateCampaignRequest . getCampaignId ( ) , CAMPAIGNID_BINDING ) ; protocolMarshaller . marshall ( updateCampaignRequest . getWriteCampaignRequest ( ) , WRITECAMPAIGNREQUEST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MouseStationaryHelper { /** * Sets up the code to fire a { @ code BEGIN } event when the mouse becomes stationary over the node and has not
* moved for the given amount of time ( { @ code delay } ) , and to fire a { @ code END } event when the stationary
* mouse moves again . Note : any previously installed delays will be removed without creating memory leaks . */
public void install ( Duration delay ) { } } | if ( installed != null ) { installed . unsubscribe ( ) ; } installed = events ( delay ) . < Event > map ( either -> either . unify ( pos -> MouseStationaryEvent . beginAt ( node . localToScreen ( pos ) ) , stop -> MouseStationaryEvent . end ( ) ) ) . subscribe ( evt -> Event . fireEvent ( node , evt ) ) ; |
public class Validate { /** * Service to validate an XML document . Input must be given for either " xmlDocument " or for " xmlLocation " .
* The " ' xsdLocation " input is optional , but if specified then the XML document will be validated against the XSD schema .
* @ param xmlDocument XML string to test
* @ param xmlDocumentSource The source type of the xml document .
* Valid values : xmlString , xmlPath , xmlUrl
* Default value : xmlString
* @ param xsdDocument optional - XSD to test given XML against
* @ param xsdDocumentSource The source type of the xsd document .
* Valid values : xsdString , xsdPath
* Default value : xsdString
* @ param username The username used to connect to the remote machine .
* @ param password The password used to connect to the remote machine .
* @ param proxyHost The proxy server used to access the remote host .
* @ param proxyPort The proxy server port .
* @ param proxyUsername The username used when connecting to the proxy .
* @ param proxyPassword The password used when connecting to the proxy .
* @ param trustAllRoots Specifies whether to enable weak security over SSL / TSL . A certificate is trusted even if no trusted certification authority issued it .
* Default value is ' false ' .
* Valid values are ' true ' and ' false ' .
* @ param x509HostnameVerifier Specifies the way the server hostname must match a domain name in the subject ' s Common Name ( CN ) or subjectAltName field of the
* X . 509 certificate . The hostname verification system prevents communication with other hosts other than the ones you intended .
* This is done by checking that the hostname is in the subject alternative name extension of the certificate . This system is
* designed to ensure that , if an attacker ( Man In The Middle ) redirects traffic to his machine , the client will not accept the
* connection . If you set this input to " allow _ all " , this verification is ignored and you become vulnerable to security attacks .
* For the value " browser _ compatible " the hostname verifier works the same way as Curl and Firefox . The hostname must match
* either the first CN , or any of the subject - alts . A wildcard can occur in the CN , and in any of the subject - alts . The only
* difference between " browser _ compatible " and " strict " is that a wildcard ( such as " * . foo . com " ) with " browser _ compatible " matches
* all subdomains , including " a . b . foo . com " . From the security perspective , to provide protection against possible Man - In - The - Middle
* attacks , we strongly recommend to use " strict " option .
* Valid values are ' strict ' , ' browser _ compatible ' , ' allow _ all ' .
* Default value is ' strict ' .
* @ param trustKeystore The pathname of the Java TrustStore file . This contains certificates from other parties that you expect to communicate with , or from
* Certificate Authorities that you trust to identify other parties . If the protocol selected is not ' https ' or if trustAllRoots
* is ' true ' this input is ignored .
* Format of the keystore is Java KeyStore ( JKS ) .
* @ param trustPassword The password associated with the TrustStore file . If trustAllRoots is false and trustKeystore is empty , trustPassword default will be supplied .
* Default value is ' changeit ' .
* @ param keystore The pathname of the Java KeyStore file . You only need this if the server requires client authentication . If the protocol selected is not
* ' https ' or if trustAllRoots is ' true ' this input is ignored .
* Format of the keystore is Java KeyStore ( JKS ) .
* @ param keystorePassword The password associated with the KeyStore file . If trustAllRoots is false and keystore is empty , keystorePassword default will be supplied .
* Default value is ' changeit ' .
* @ param secureProcessing optional - whether to use secure processing
* @ return map of results containing success or failure text and a result message */
@ Action ( name = "Validate" , outputs = { } } | @ Output ( RETURN_CODE ) , @ Output ( RESULT_TEXT ) , @ Output ( RETURN_RESULT ) , @ Output ( ERROR_MESSAGE ) } , responses = { @ Response ( text = ResponseNames . SUCCESS , field = RETURN_CODE , value = SUCCESS , matchType = COMPARE_EQUAL ) , @ Response ( text = ResponseNames . FAILURE , field = RETURN_CODE , value = FAILURE , matchType = COMPARE_EQUAL , isDefault = true , isOnFail = true ) } ) public Map < String , String > execute ( @ Param ( value = XML_DOCUMENT , required = true ) String xmlDocument , @ Param ( value = XML_DOCUMENT_SOURCE ) String xmlDocumentSource , @ Param ( value = XSD_DOCUMENT ) String xsdDocument , @ Param ( value = XSD_DOCUMENT_SOURCE ) String xsdDocumentSource , @ Param ( value = USERNAME ) String username , @ Param ( value = PASSWORD , encrypted = true ) String password , @ Param ( value = TRUST_ALL_ROOTS ) String trustAllRoots , @ Param ( value = KEYSTORE ) String keystore , @ Param ( value = KEYSTORE_PASSWORD , encrypted = true ) String keystorePassword , @ Param ( value = TRUST_KEYSTORE ) String trustKeystore , @ Param ( value = TRUST_PASSWORD , encrypted = true ) String trustPassword , @ Param ( value = X_509_HOSTNAME_VERIFIER ) String x509HostnameVerifier , @ Param ( value = PROXY_HOST ) String proxyHost , @ Param ( value = PROXY_PORT ) String proxyPort , @ Param ( value = PROXY_USERNAME ) String proxyUsername , @ Param ( value = PROXY_PASSWORD , encrypted = true ) String proxyPassword , @ Param ( value = SECURE_PROCESSING ) String secureProcessing ) { final CommonInputs inputs = new CommonInputs . CommonInputsBuilder ( ) . withXmlDocument ( xmlDocument ) . withXmlDocumentSource ( xmlDocumentSource ) . withUsername ( username ) . withPassword ( password ) . withTrustAllRoots ( trustAllRoots ) . withKeystore ( keystore ) . withKeystorePassword ( keystorePassword ) . withTrustKeystore ( trustKeystore ) . withTrustPassword ( trustPassword ) . withX509HostnameVerifier ( x509HostnameVerifier ) . withProxyHost ( proxyHost ) . withProxyPort ( proxyPort ) . withProxyUsername ( proxyUsername ) . withProxyPassword ( proxyPassword ) . withSecureProcessing ( secureProcessing ) . build ( ) ; final CustomInputs customInputs = new CustomInputs . CustomInputsBuilder ( ) . withXsdDocument ( xsdDocument ) . withXsdDocumentSource ( xsdDocumentSource ) . build ( ) ; return new ValidateService ( ) . execute ( inputs , customInputs ) ; |
public class CmsSearchConfiguration { /** * Sets the generated search manager . < p >
* @ param manager the search manager to set */
public void setSearchManager ( CmsSearchManager manager ) { } } | m_searchManager = manager ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SEARCH_MANAGER_FINISHED_0 ) ) ; } |
public class PropertyUtility { /** * Setter .
* @ param beanClass
* the bean class
* @ param property
* the property
* @ param value
* the value
* @ return the string */
private static String setter ( ModelProperty property , String value ) { } } | if ( property . getParent ( ) != null && ( ( ModelClass < ? > ) property . getParent ( ) ) . isImmutablePojo ( ) ) { return ImmutableUtility . IMMUTABLE_PREFIX + property . getName ( ) + "=" + value ; } else { if ( property . isPublicField ( ) ) return property . getName ( ) + "=" + value ; else if ( property . isFieldWithSetter ( ) ) { return "set" + converterField2Method . convert ( property . getName ( ) ) + "(" + value + ")" ; } else { throw new PropertyVisibilityException ( String . format ( "property '%s' of class '%s' can not be modify" , property . getName ( ) , property . getParent ( ) . getElement ( ) . asType ( ) ) ) ; } } |
public class UrlResource { /** * Save a copy in the local cache - in case remote source is not available in future . */
private void cacheStream ( ) { } } | try { File fi = getTemproralCacheFile ( ) ; if ( fi . exists ( ) ) { if ( ! fi . delete ( ) ) { throw new IllegalStateException ( "Cannot delete file " + fi . getAbsolutePath ( ) + "!" ) ; } } FileOutputStream fout = new FileOutputStream ( fi ) ; InputStream in = grabStream ( ) ; byte [ ] buffer = new byte [ DEFAULT_BUFFER_SIZE ] ; int n ; while ( - 1 != ( n = in . read ( buffer ) ) ) { fout . write ( buffer , 0 , n ) ; } fout . flush ( ) ; fout . close ( ) ; in . close ( ) ; File cacheFile = getCacheFile ( ) ; if ( ! fi . renameTo ( cacheFile ) ) { throw new IllegalStateException ( "Cannot rename file \"" + fi . getAbsolutePath ( ) + "\" to \"" + cacheFile . getAbsolutePath ( ) + "\"!" ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } |
public class LakeBTCAdapters { /** * Adapts a LakeBTCAccount to an AccountInfo
* @ param lakeBTCAccount
* @ return Wallet */
public static AccountInfo adaptAccountInfo ( LakeBTCAccount lakeBTCAccount ) { } } | // Adapt to XChange DTOs
LakeBTCProfile profile = lakeBTCAccount . getProfile ( ) ; LakeBTCBalance balance = lakeBTCAccount . getBalance ( ) ; Balance usdBalance = new Balance ( Currency . USD , balance . getUSD ( ) ) ; Balance cnyWBalance = new Balance ( Currency . CNY , balance . getCNY ( ) ) ; Balance btcBalance = new Balance ( Currency . BTC , balance . getBTC ( ) ) ; return new AccountInfo ( profile . getId ( ) , new Wallet ( usdBalance , btcBalance , cnyWBalance ) ) ; |
public class RestClientUtil { /** * 获取文档MapSearchHit对象 , 封装了索引文档的所有属性数据
* @ param indexName
* @ param indexType
* @ param documentId
* @ param options
* @ return
* @ throws ElasticSearchException */
public MapSearchHit getDocumentHit ( String indexName , String indexType , String documentId , Map < String , Object > options ) throws ElasticSearchException { } } | try { MapSearchHit searchResult = this . client . executeRequest ( BuildTool . buildGetDocumentRequest ( indexName , indexType , documentId , options ) , null , new GetDocumentHitResponseHandler ( ) , ClientUtil . HTTP_GET ) ; return searchResult ; } catch ( ElasticSearchException e ) { return ResultUtil . hand404HttpRuntimeException ( e , MapSearchHit . class , ResultUtil . OPERTYPE_getDocument ) ; } |
public class HtmlMessages { /** * < p > Return the value of the < code > errorStyle < / code > property . < / p >
* < p > Contents : CSS style ( s ) to apply to any message
* with a severity class of " ERROR " . */
public java . lang . String getErrorStyle ( ) { } } | return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . errorStyle ) ; |
public class OSGiScriptEngineManager { /** * Adds the JDK build - in JavaScript engine into the given list of scripting engine factories .
* @ param factoryCandidates List of scripting engine factories */
private void addJavaScriptEngine ( List < String > factoryCandidates ) { } } | // Add default script engine manager
factoryCandidates . add ( OSGiScriptEngineFactory . class . getName ( ) ) ; // Rhino is available in java < 8 , Nashorn is available in java > = 8
if ( ClassLoaderUtil . isClassDefined ( RHINO_SCRIPT_ENGINE_FACTORY ) ) { factoryCandidates . add ( RHINO_SCRIPT_ENGINE_FACTORY ) ; } else if ( ClassLoaderUtil . isClassDefined ( NASHORN_SCRIPT_ENGINE_FACTORY ) ) { factoryCandidates . add ( NASHORN_SCRIPT_ENGINE_FACTORY ) ; } else { logger . warning ( "No built-in JavaScript ScriptEngineFactory found." ) ; } |
public class ApiOvhDedicatedserver { /** * Create a new storage backup space associated to server
* REST : POST / dedicated / server / { serviceName } / features / backupCloud
* @ param projectDescription [ required ] Project description of the project to be created ( ignored when an existing project is already specified )
* @ param cloudProjectId [ required ] cloud project id
* @ param serviceName [ required ] The internal name of your dedicated server
* API beta */
public OvhBackupCloud serviceName_features_backupCloud_POST ( String serviceName , String cloudProjectId , String projectDescription ) throws IOException { } } | String qPath = "/dedicated/server/{serviceName}/features/backupCloud" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "cloudProjectId" , cloudProjectId ) ; addBody ( o , "projectDescription" , projectDescription ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhBackupCloud . class ) ; |
public class PriorityQueue { /** * Retrieves the element at the specified index . If such an element doesn ' t exist { @ code null } is returned .
* Iterating the queue by index is < em > not < / em > guaranteed to traverse the elements in any particular order .
* @ return the element at the specified index in this queue . */
@ SuppressWarnings ( "unchecked" ) public E get ( int index ) { } } | return index >= size ? null : ( E ) queue [ index ] ; |
public class ASrvOrm { /** * < p > Make TableSql from XML properties . < / p >
* @ param pTableSql tableSql
* @ param pClazz entity class
* @ throws Exception - any exception */
public final void makeTableSqlFromXml ( final TableSql pTableSql , final Class < ? > pClazz ) throws Exception { } } | pTableSql . setVersionAlgorithm ( Integer . parseInt ( this . mngSettings . lazClsSts ( pClazz ) . get ( "versionAlgorithm" ) ) ) ; pTableSql . setIdColumnsNames ( this . mngSettings . lazClsSts ( pClazz ) . get ( "idColumnsNames" ) . split ( "," ) ) ; pTableSql . setIdFieldName ( this . mngSettings . lazClsSts ( pClazz ) . get ( "idFieldName" ) ) ; if ( pTableSql . getIdFieldName ( ) == null ) { throw new ExceptionWithCode ( ExceptionWithCode . CONFIGURATION_MISTAKE , "Where is no field ID name for class: " + pClazz . getSimpleName ( ) ) ; } pTableSql . setOwnerFieldName ( this . mngSettings . lazClsSts ( pClazz ) . get ( "ownerFieldName" ) ) ; Field [ ] fields = getUtlReflection ( ) . retrieveFields ( pClazz ) ; for ( Field field : fields ) { Map < String , String > fldSts = this . mngSettings . lazFldSts ( pClazz , field . getName ( ) ) ; if ( fldSts != null ) { FieldSql fieldSql = new FieldSql ( ) ; String definition = fldSts . get ( "definition" ) ; if ( definition != null ) { String isNullableStr = fldSts . get ( "isNullable" ) ; if ( ! Boolean . valueOf ( isNullableStr ) && ! definition . contains ( "not null" ) ) { definition += " not null" ; } } fieldSql . setDefinition ( definition ) ; pTableSql . getFieldsMap ( ) . put ( field . getName ( ) , fieldSql ) ; } } |
public class CombinedChannelDuplexHandler { /** * Initialized this handler with the specified handlers .
* @ throws IllegalStateException if this handler was not constructed via the default constructor or
* if this handler does not implement all required handler interfaces
* @ throws IllegalArgumentException if the specified handlers cannot be combined into one due to a conflict
* in the type hierarchy */
protected final void init ( I inboundHandler , O outboundHandler ) { } } | validate ( inboundHandler , outboundHandler ) ; this . inboundHandler = inboundHandler ; this . outboundHandler = outboundHandler ; |
public class MetaClass { /** * Utility method for cast
* @ param type The type to cast into a class
* @ return The class corresponding to the passed in type */
private static Class < ? > type2class ( Type type ) { } } | if ( type instanceof Class < ? > ) { return ( Class < ? > ) type ; // base case
} else if ( type instanceof ParameterizedType ) { return type2class ( ( ( ParameterizedType ) type ) . getRawType ( ) ) ; } else if ( type instanceof TypeVariable < ? > ) { return type2class ( ( ( TypeVariable < ? > ) type ) . getBounds ( ) [ 0 ] ) ; } else if ( type instanceof WildcardType ) { return type2class ( ( ( WildcardType ) type ) . getUpperBounds ( ) [ 0 ] ) ; } else { throw new IllegalArgumentException ( "Cannot convert type to class: " + type ) ; } |
public class MessageAPI { /** * 预览接口
* @ param access _ token access _ token
* @ param preview preview
* @ return MessageSendResult
* @ since 2.6.3 */
public static MessageSendResult messageMassPreview ( String access_token , Preview preview ) { } } | String previewJson = JsonUtil . toJSONString ( preview ) ; HttpUriRequest httpUriRequest = RequestBuilder . post ( ) . setHeader ( jsonHeader ) . setUri ( BASE_URI + "/cgi-bin/message/mass/preview" ) . addParameter ( PARAM_ACCESS_TOKEN , API . accessToken ( access_token ) ) . setEntity ( new StringEntity ( previewJson , Charset . forName ( "utf-8" ) ) ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , MessageSendResult . class ) ; |
public class DefaultClusterManager { /** * Undeploys a module . */
private void doInternalUndeployModule ( final Message < JsonObject > message ) { } } | final String deploymentID = message . body ( ) . getString ( "id" ) ; if ( deploymentID == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "No deployment ID specified." ) ) ; } else { removeDeployment ( deploymentID , new Handler < AsyncResult < String > > ( ) { @ Override public void handle ( AsyncResult < String > result ) { platform . undeployModule ( result . succeeded ( ) && result . result ( ) != null ? result . result ( ) : deploymentID , createUndeployHandler ( message ) ) ; } } ) ; } |
public class ManagedInstanceKeysInner { /** * Creates or updates a managed instance key .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param managedInstanceName The name of the managed instance .
* @ param keyName The name of the managed instance key to be operated on ( updated or created ) .
* @ param parameters The requested managed instance key resource state .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ManagedInstanceKeyInner object if successful . */
public ManagedInstanceKeyInner createOrUpdate ( String resourceGroupName , String managedInstanceName , String keyName , ManagedInstanceKeyInner parameters ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , keyName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class SuffixDictionary { /** * 获取最长的后缀
* @ param word
* @ return */
public int getLongestSuffixLength ( String word ) { } } | word = reverse ( word ) ; LinkedList < Map . Entry < String , Integer > > suffixList = trie . commonPrefixSearchWithValue ( word ) ; if ( suffixList . size ( ) == 0 ) return 0 ; return suffixList . getLast ( ) . getValue ( ) ; |
public class CFG { /** * Get a Collection of basic blocks which contain the bytecode instruction
* with given offset .
* @ param offset
* the bytecode offset of an instruction
* @ return Collection of BasicBlock objects which contain the instruction
* with that offset */
public Collection < BasicBlock > getBlocksContainingInstructionWithOffset ( int offset ) { } } | LinkedList < BasicBlock > result = new LinkedList < > ( ) ; for ( Iterator < BasicBlock > i = blockIterator ( ) ; i . hasNext ( ) ; ) { BasicBlock block = i . next ( ) ; if ( block . containsInstructionWithOffset ( offset ) ) { result . add ( block ) ; } } return result ; |
public class ParameterizedHeaderWithValue { /** * < p > Converts headers that are values followed by optional parameters into a list of values with parameters . < / p >
* < p > Null or blank strings return an empty list . < / p >
* @ param input The value to parse
* @ return A list of ParameterizedHeaderWithValue objects
* @ throws IllegalArgumentException The value cannot be parsed */
public static List < ParameterizedHeaderWithValue > fromString ( String input ) { } } | if ( input == null || input . trim ( ) . isEmpty ( ) ) { return emptyList ( ) ; } StringBuilder buffer = new StringBuilder ( ) ; List < ParameterizedHeaderWithValue > results = new ArrayList < > ( ) ; int i = 0 ; while ( i < input . length ( ) ) { String value = null ; LinkedHashMap < String , String > parameters = null ; State state = State . VALUE ; String paramName = null ; boolean isQuotedString = false ; headerValueLoop : for ( ; i < input . length ( ) ; i ++ ) { char c = input . charAt ( i ) ; if ( state == State . VALUE ) { if ( c == ';' ) { value = buffer . toString ( ) . trim ( ) ; buffer . setLength ( 0 ) ; state = State . PARAM_NAME ; } else if ( c == ',' ) { i ++ ; break headerValueLoop ; } else if ( ParseUtils . isVChar ( c ) || ParseUtils . isOWS ( c ) ) { buffer . append ( c ) ; } else { throw new IllegalArgumentException ( "Got ascii " + ( ( int ) c ) + " while in " + state + " at position " + i ) ; } } else if ( state == State . PARAM_NAME ) { if ( c == ',' && buffer . length ( ) == 0 ) { i ++ ; // a semi - colon without an parameter , like " something ; "
break headerValueLoop ; } else if ( c == '=' ) { paramName = buffer . toString ( ) ; buffer . setLength ( 0 ) ; state = State . PARAM_VALUE ; } else if ( ParseUtils . isTChar ( c ) ) { buffer . append ( c ) ; } else if ( ParseUtils . isOWS ( c ) ) { if ( buffer . length ( ) > 0 ) { throw new IllegalArgumentException ( "Got whitespace in parameter name while in " + state + " - header was " + buffer ) ; } } else { throw new IllegalArgumentException ( "Got ascii " + ( ( int ) c ) + " while in " + state ) ; } } else { boolean isFirst = ! isQuotedString && buffer . length ( ) == 0 ; if ( isFirst && ParseUtils . isOWS ( c ) ) { // ignore it
} else if ( isFirst && c == '"' ) { isQuotedString = true ; } else { if ( isQuotedString ) { char lastChar = input . charAt ( i - 1 ) ; if ( c == '\\' ) { // don ' t append
} else if ( lastChar == '\\' ) { buffer . append ( c ) ; } else if ( c == '"' ) { // this is the end , but we ' ll update on the next go
isQuotedString = false ; } else { buffer . append ( c ) ; } } else { if ( ParseUtils . isTChar ( c ) ) { buffer . append ( c ) ; } else if ( c == ';' ) { if ( parameters == null ) { parameters = new LinkedHashMap < > ( ) ; // keeps insertion order
} parameters . put ( paramName , buffer . toString ( ) ) ; buffer . setLength ( 0 ) ; paramName = null ; state = State . PARAM_NAME ; } else if ( ParseUtils . isOWS ( c ) ) { // ignore it
} else if ( c == ',' ) { i ++ ; break headerValueLoop ; } else { throw new IllegalArgumentException ( "Got character code " + ( ( int ) c ) + " (" + c + ") while parsing parameter value" ) ; } } } } } switch ( state ) { case VALUE : value = buffer . toString ( ) . trim ( ) ; buffer . setLength ( 0 ) ; break ; case PARAM_VALUE : if ( parameters == null ) { parameters = new LinkedHashMap < > ( ) ; // keeps insertion order
} parameters . put ( paramName , buffer . toString ( ) ) ; buffer . setLength ( 0 ) ; break ; default : if ( buffer . length ( ) > 0 ) { throw new IllegalArgumentException ( "Unexpected ending point at state " + state + " for " + input ) ; } } results . add ( new ParameterizedHeaderWithValue ( value , parameters == null ? Collections . emptyMap ( ) : parameters ) ) ; } return results ; |
public class ProblemSummary { /** * Adds a file with the provided description . */
public void addFile ( String description , FileModel fileModel ) { } } | Map < FileModel , ProblemFileSummary > files = addDescription ( description ) ; if ( files . containsKey ( fileModel ) ) { files . get ( fileModel ) . addOccurrence ( ) ; } else { files . put ( fileModel , new ProblemFileSummary ( fileModel , 1 ) ) ; } |
public class MessageScreen { /** * PrintData Method . */
public boolean printData ( PrintWriter out , int iPrintOptions ) { } } | this . addHiddenParam ( out , TrxMessageHeader . LOG_TRX_ID , this . getProperty ( TrxMessageHeader . LOG_TRX_ID ) ) ; return super . printData ( out , iPrintOptions ) ; // Don ' t print |
public class AbstractMatrix { /** * Returns new AbstractMatrix initialized to values .
* < p > E . g . 2x2 matrix has A00 , A01 , A10 , A11
* @ param rows Number of rows
* @ param values Values row by row
* @ return */
public static AbstractMatrix getInstance ( int rows , Object values , Class < ? > cls ) { } } | int length = Array . getLength ( values ) ; if ( length % rows != 0 ) { throw new IllegalArgumentException ( "not full rows" ) ; } return new AbstractMatrix ( rows , copyOf ( values , cls ) ) ; |
public class PDBPolymer { /** * Adds the IPDBAtom oAtom to a specified Monomer of a specified Strand .
* Additionally , it keeps record of the iCode .
* @ param oAtom The IPDBAtom to add
* @ param oMonomer The monomer the atom belongs to */
@ Override public void addAtom ( IPDBAtom oAtom , IMonomer oMonomer , IStrand oStrand ) { } } | super . addAtom ( oAtom , oMonomer , oStrand ) ; if ( ! sequentialListOfMonomers . contains ( oMonomer . getMonomerName ( ) ) ) sequentialListOfMonomers . add ( oMonomer . getMonomerName ( ) ) ; |
public class Recorder { /** * 获取请求来源 */
public SourceBody getSource ( Unit unit , UnitRequest msg ) { } } | SourceBody body = new SourceBody ( ) ; body . setSource ( msg . getContext ( ) . getUri ( ) ) ; if ( body . getSource ( ) == null || body . getSource ( ) . trim ( ) . length ( ) <= 0 ) { body . setSource ( EnvUtil . getApplication ( ) ) ; } return body ; |
public class WebFacesConfigDescriptorImpl { /** * Returns all < code > behavior < / code > elements
* @ return list of < code > behavior < / code > */
public List < FacesConfigBehaviorType < WebFacesConfigDescriptor > > getAllBehavior ( ) { } } | List < FacesConfigBehaviorType < WebFacesConfigDescriptor > > list = new ArrayList < FacesConfigBehaviorType < WebFacesConfigDescriptor > > ( ) ; List < Node > nodeList = model . get ( "behavior" ) ; for ( Node node : nodeList ) { FacesConfigBehaviorType < WebFacesConfigDescriptor > type = new FacesConfigBehaviorTypeImpl < WebFacesConfigDescriptor > ( this , "behavior" , model , node ) ; list . add ( type ) ; } return list ; |
public class RandomUtils { /** * Returns a random double within the specified range .
* @ param startInclusive
* the smallest value that can be returned , must be non - negative
* @ param endInclusive
* the upper bound ( included )
* @ throws IllegalArgumentException
* if { @ code startInclusive > endInclusive } or if
* { @ code startInclusive } is negative
* @ return the random double */
public static double nextDouble ( final double startInclusive , final double endInclusive ) { } } | Validate . isTrue ( endInclusive >= startInclusive , "Start value must be smaller or equal to end value." ) ; Validate . isTrue ( startInclusive >= 0 , "Both range values must be non-negative." ) ; if ( startInclusive == endInclusive ) { return startInclusive ; } return startInclusive + ( ( endInclusive - startInclusive ) * RANDOM . nextDouble ( ) ) ; |
public class Clusterer { /** * makes sure Simplified shape is more robust to transformations . */
static boolean isClusterCandidate_ ( double x_1 , double y1 , double x2 , double y2 , double sqr_tolerance ) { } } | double dx = x_1 - x2 ; double dy = y1 - y2 ; return dx * dx + dy * dy <= sqr_tolerance ; |
public class MessageFactory { /** * The received buffer may not have necessary bytes to decode a message . Instance of this factory keeps data locally till
* next set of data is received and a message can be successfully decoded
* @ param buffer
* @ return */
public Message createMessage ( ByteBuffer buffer ) { } } | if ( ! isHeaderReady ) { int len = Math . min ( MESSAGE_HEADER_SIZE - pos , buffer . remaining ( ) ) ; buffer . get ( header , pos , len ) ; // update cursor postion in the header ' s buffer
pos += len ; // header completed ?
isHeaderReady = pos == header . length ; if ( ! isHeaderReady ) { // no more data available
return null ; } // obtain remaining length of the message and prepare buffer
length = ( ( header [ 0 ] & 0xff ) << 24 ) ; length += ( ( header [ 1 ] & 0xff ) << 16 ) ; length += ( ( header [ 2 ] & 0xff ) << 8 ) ; length += ( header [ 3 ] & 0xff ) ; length -= MESSAGE_HEADER_SIZE ; params = new byte [ length ] ; // finally switch cursor position
pos = 0 ; message = new Message ( ) ; } // at this point we must recheck remainder of the input buffer
// because possible case when input buffer fits exactly to the header
if ( length > 0 && ! buffer . hasRemaining ( ) ) { return null ; } // again , reading all parameters before parsing
// compute available or required data
int len = Math . min ( ( params . length - pos ) , buffer . remaining ( ) ) ; buffer . get ( params , pos , len ) ; // update cursor position
pos += len ; // end of message not reached
if ( pos < params . length ) { return null ; } // end of message reached and most probably some data remains in buffer
// do not touch remainder of the input buffer , next call to this method
// will proceed remainder
// parsing params of this message
message . decode ( params ) ; // switch factory for receiving new message
this . isHeaderReady = false ; this . pos = 0 ; // return
return message ; |
public class PoiWriter { /** * Post - process . */
private void postProcess ( ) throws SQLException { } } | LOGGER . info ( "Post-processing..." ) ; this . conn = DriverManager . getConnection ( "jdbc:sqlite:" + this . configuration . getOutputFile ( ) . getAbsolutePath ( ) ) ; this . conn . createStatement ( ) . execute ( DbConstants . DROP_NODES_STATEMENT ) ; this . conn . createStatement ( ) . execute ( DbConstants . DROP_WAYNODES_STATEMENT ) ; this . conn . close ( ) ; this . conn = DriverManager . getConnection ( "jdbc:sqlite:" + this . configuration . getOutputFile ( ) . getAbsolutePath ( ) ) ; this . conn . createStatement ( ) . execute ( "VACUUM;" ) ; this . conn . close ( ) ; |
public class ThreadContextClassLoader { /** * Cleans up the TCCL instance . Once called , this TCCL is effectively disabled .
* It ' s associated gateway bundle will have been removed . */
private void cleanup ( ) { } } | final String methodName = "cleanup(): " ; try { final Bundle b = bundle . getAndSet ( null ) ; if ( b != null ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + "Uninstalling bundle location: " + b . getLocation ( ) + ", bundle id: " + b . getBundleId ( ) ) ; } SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { AccessController . doPrivileged ( new PrivilegedAction < Void > ( ) { @ Override public Void run ( ) { try { b . uninstall ( ) ; return null ; } catch ( BundleException ignored ) { return null ; } } } ) ; } else { b . uninstall ( ) ; } } } catch ( BundleException ignored ) { } catch ( IllegalStateException ignored ) { } |
public class InternalPureXbaseLexer { /** * $ ANTLR start " T _ _ 33" */
public final void mT__33 ( ) throws RecognitionException { } } | try { int _type = T__33 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalPureXbase . g : 31:7 : ( ' < > ' )
// InternalPureXbase . g : 31:9 : ' < > '
{ match ( "<>" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class V1InstanceCreator { /** * Create a new Goal with a name .
* @ param name The initial name of the entity .
* @ param project The Project this Goal will be in .
* @ param attributes additional attributes for the Goal .
* @ return A newly minted Goal that exists in the VersionOne system . */
public Goal goal ( String name , Project project , Map < String , Object > attributes ) { } } | Goal goal = new Goal ( instance ) ; goal . setName ( name ) ; goal . setProject ( project ) ; addAttributes ( goal , attributes ) ; goal . save ( ) ; return goal ; |
public class FileUtilities { /** * This method converts a size in bytes into a string which can be used to be
* put into UI .
* For example : 1024 will be converted into ' 1kB ' , 1024*1024 bytes into ' 1MB '
* and so forth .
* @ param size
* is the size of the file in Byte to be converted into a
* { @ link String } .
* @ return A { @ link String } is returned . */
public static String createHumanReadableSizeString ( long size ) { } } | DecimalFormat format = new DecimalFormat ( "#.##" ) ; BinaryPrefix prefix = BinaryPrefix . getSuitablePrefix ( size ) ; double doubleSize = size / prefix . getBinaryFactor ( ) . doubleValue ( ) ; return format . format ( doubleSize ) + prefix . getUnit ( ) + "B" ; |
public class NonnullAnnotationVerifier { /** * Checks whether the given field is marked with an Nonnull annotation ,
* whether directly , or through some default annotation mechanism .
* @ param field The field to be checked .
* @ param annotationCache To provide access to the annotations on the field
* and the field ' s class
* @ return True if the field is to be treated as Nonnull . */
public static boolean fieldIsNonnull ( Field field , AnnotationCache annotationCache ) { } } | Class < ? > type = field . getDeclaringClass ( ) ; if ( annotationCache . hasFieldAnnotation ( type , field . getName ( ) , NONNULL ) ) { return true ; } if ( annotationCache . hasFieldAnnotation ( type , field . getName ( ) , NULLABLE ) ) { return false ; } return annotationCache . hasClassAnnotation ( type , FINDBUGS1X_DEFAULT_ANNOTATION_NONNULL ) || annotationCache . hasClassAnnotation ( type , JSR305_DEFAULT_ANNOTATION_NONNULL ) || annotationCache . hasClassAnnotation ( type , ECLIPSE_DEFAULT_ANNOTATION_NONNULL ) ; |
public class DefaultXMLWriter { /** * / * ( non - Javadoc )
* @ see tuwien . auto . calimero . xml . XMLWriter # writeEmptyElement
* ( java . lang . String , java . util . List ) */
public void writeEmptyElement ( String name , List att ) throws KNXMLException { } } | try { final Tag tag = new Tag ( name , att , null , true ) ; tag . endTag ( ) ; } catch ( final IOException e ) { throw new KNXMLException ( e . getMessage ( ) ) ; } |
public class Person { /** * Get the unix timestamp of the first photo uploaded by the user .
* You can use { @ link JinxUtils # parseTimestampToDate ( String ) } to convert this value to a Date object .
* @ return unix timestamp of the first photo uploaded by the user , or null if the value was not returned . */
public String getPhotosFirstDate ( ) { } } | return ( person == null || person . photos == null || person . photos . firstDate == null ) ? null : person . photos . firstDate . _content ; |
public class EnumBuilder { /** * Sets this Pojo ' s name
* @ param pojoPackage
* The Package used to create POJO
* @ param className
* Class to be created
* @ return This instance */
public EnumBuilder withName ( String pojoPackage , String className ) { } } | className = NamingHelper . convertToClassName ( className ) ; final String fullyQualifiedClassName = pojoPackage + "." + className ; // Initiate package if necessary
if ( this . pojoPackage == null ) { withPackage ( pojoPackage ) ; } // Builders should only have 1 active pojo under their responsibility
if ( this . pojo != null ) { throw new IllegalStateException ( "Enum already created" ) ; } try { // create the class
logger . debug ( "Creating Enum " + fullyQualifiedClassName ) ; this . pojo = this . pojoModel . _class ( fullyQualifiedClassName , ClassType . ENUM ) ; // Handle Serialization
// Do enums need to be serializable ?
// implementsSerializable ( ) ;
} catch ( JClassAlreadyExistsException e ) { // class already exists - reuse it !
logger . debug ( "Enum {} already exists. Reusing it!" , fullyQualifiedClassName ) ; this . pojo = this . pojoModel . _getClass ( fullyQualifiedClassName ) ; } // Add to shortcuts
this . codeModels . put ( fullyQualifiedClassName , this . pojo ) ; return this ; |
public class MindMapUtils { /** * Remove duplications and successors for presented topics in array .
* @ param topics array to be processed
* @ return resulted array
* @ since 1.3.1 */
@ Nonnull @ MustNotContainNull public static Topic [ ] removeSuccessorsAndDuplications ( @ Nonnull @ MustNotContainNull final Topic ... topics ) { } } | final List < Topic > result = new ArrayList < Topic > ( ) ; for ( final Topic t : topics ) { final Iterator < Topic > iterator = result . iterator ( ) ; while ( iterator . hasNext ( ) ) { final Topic listed = iterator . next ( ) ; if ( listed == t || listed . hasAncestor ( t ) ) { iterator . remove ( ) ; } } result . add ( t ) ; } return result . toArray ( new Topic [ result . size ( ) ] ) ; |
public class CreateCloudFormationStackRequest { /** * An array of parameters that will be used to create the new Amazon EC2 instance . You can only pass one instance
* entry at a time in this array . You will get an invalid parameter error if you pass more than one instance entry
* in this array .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setInstances ( java . util . Collection ) } or { @ link # withInstances ( java . util . Collection ) } if you want to
* override the existing values .
* @ param instances
* An array of parameters that will be used to create the new Amazon EC2 instance . You can only pass one
* instance entry at a time in this array . You will get an invalid parameter error if you pass more than one
* instance entry in this array .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateCloudFormationStackRequest withInstances ( InstanceEntry ... instances ) { } } | if ( this . instances == null ) { setInstances ( new java . util . ArrayList < InstanceEntry > ( instances . length ) ) ; } for ( InstanceEntry ele : instances ) { this . instances . add ( ele ) ; } return this ; |
public class SafeHtmlUtil { /** * Sanitize user inputs .
* @ param raw the input string to be sanitized
* @ return the sanitized string */
public static String sanitize ( String raw ) { } } | return ( raw == null || raw . length ( ) == 0 ) ? raw : HTMLEntityEncode ( canonicalize ( raw ) ) ; |
public class JobEnvironmentCache { /** * 创建Job的运行时环境 , 加载相应的Jar包资源 .
* @ param jarFilePath jar包本地路径
* @ param packagesToScan 需要扫描的包
* @ param isSpring 是否spring环境
* @ throws Exception 当出现未检查的异常时抛出 */
public void loadJobEnvironment ( String jarFilePath , String packagesToScan , boolean isSpring ) throws Exception { } } | JobBeanFactory jobBeanFactory = jobBeanFactoryMap . get ( jarFilePath ) ; if ( jobBeanFactory != null ) { return ; } synchronized ( jobBeanFactoryMap ) { jobBeanFactory = jobBeanFactoryMap . get ( jarFilePath ) ; if ( jobBeanFactory != null ) { return ; } jobBeanFactory = createJobBeanFactory ( jarFilePath , isSpring ) ; jobBeanFactoryMap . put ( jarFilePath , jobBeanFactory ) ; ClassLoader classLoader = ApplicationClassLoaderFactory . getJarApplicationClassLoader ( jarFilePath ) ; JobScanner jobScanner = JobScannerFactory . createJarFileJobScanner ( classLoader , packagesToScan , jarFilePath ) ; jobDescriptorListMap . put ( jarFilePath , jobScanner . getJobDescriptorList ( ) ) ; } |
public class CollectionUtils { /** * Takes some collections and creates a map from their elements to which collections contain them .
* The Collections must be disjoint or an { @ link java . lang . IllegalArgumentException } will be
* thrown . */
public static < T , C extends Collection < T > > Map < T , C > makeElementsToContainersMap ( final Iterable < C > collections ) { } } | final ImmutableMap . Builder < T , C > ret = ImmutableMap . builder ( ) ; for ( final C collection : collections ) { for ( final T item : collection ) { ret . put ( item , collection ) ; } } return ret . build ( ) ; |
public class PowerOfTwoFileAllocator { /** * Find an item in the tree .
* @ param x
* the item to search for .
* @ return the matching item of null if not found . */
private Region find ( Region x ) { } } | Region current = this . root ; while ( current != NULL_NODE ) { long res = x . orderRelativeTo ( current ) ; if ( res < 0 ) { current = current . left ; } else if ( res > 0 ) { current = current . right ; } else { return current ; } } return null ; |
public class ICUHumanize { /** * Gets the ICU based DecimalFormat instance for the current thread with the
* given pattern and uses it to format the given arguments .
* @ param pattern
* Format pattern that follows the conventions of
* { @ link com . ibm . icu . text . MessageFormat MessageFormat }
* @ param args
* Arguments
* @ return The formatted String */
public static String format ( final String pattern , final Object ... args ) { } } | return messageFormatInstance ( pattern ) . render ( args ) ; |
public class QueryPlannerImpl { /** * Later this can be relaxed e . g . for inner joins . */
private List < ColumnRelation > getIndexedColumnRelations ( String tableAlias , String foreignTableAlias , Map < String , String > tableAliases , ColumnRelationsStorage columnRelations ) { } } | List < ColumnRelation > ret = new ArrayList < > ( ) ; List < ColumnRelation > matchedRelations = columnRelations . getRelations ( tableAlias , foreignTableAlias ) ; boolean hasColumnRelations = ! matchedRelations . isEmpty ( ) ; if ( hasColumnRelations ) { matchedRelations . forEach ( matchedColumnRelation -> { for ( ColumnRelation columnRelation : matchedColumnRelation . toList ( ) ) { String leftTableName = tableAliases . get ( columnRelation . getLeftTableAlias ( ) ) ; IndexSignature leftIndexSignature = new IndexSignature ( leftTableName , Collections . singleton ( columnRelation . getLeftColumn ( ) ) ) ; String rightTableName = tableAliases . get ( columnRelation . getRightTableAlias ( ) ) ; IndexSignature rightIndexSignature = new IndexSignature ( rightTableName , Collections . singleton ( columnRelation . getRightColumn ( ) ) ) ; if ( ! indexStorage . getSignatures ( ) . contains ( leftIndexSignature ) || ! indexStorage . getSignatures ( ) . contains ( rightIndexSignature ) ) { return ; } } ret . add ( matchedColumnRelation ) ; columnRelations . removeRelation ( tableAlias , foreignTableAlias , matchedColumnRelation ) ; } ) ; } return ret ; |
public class SecureAction { /** * Returns a Class .
* Tries to load a class from the System ClassLoader or if that doesn ' t exist tries the boot ClassLoader
* @ param name the name of the class .
* @ return a Class
* @ throws ClassNotFoundException */
public Class < ? > loadSystemClass ( final String name ) throws ClassNotFoundException { } } | if ( System . getSecurityManager ( ) == null ) { ClassLoader systemClassLoader = ClassLoader . getSystemClassLoader ( ) ; return ( systemClassLoader != null ) ? systemClassLoader . loadClass ( name ) : bootClassLoader . loadClass ( name ) ; } try { return AccessController . doPrivileged ( new PrivilegedExceptionAction < Class < ? > > ( ) { @ Override public Class < ? > run ( ) throws Exception { ClassLoader systemClassLoader = ClassLoader . getSystemClassLoader ( ) ; return ( systemClassLoader != null ) ? systemClassLoader . loadClass ( name ) : bootClassLoader . loadClass ( name ) ; } } , controlContext ) ; } catch ( PrivilegedActionException e ) { if ( e . getException ( ) instanceof ClassNotFoundException ) throw ( ClassNotFoundException ) e . getException ( ) ; throw ( RuntimeException ) e . getException ( ) ; } |
public class CmsImportVersion10 { /** * Fills the unset fields for an imported resource with default values . < p >
* @ throws CmsImportExportException if something goes wrong */
protected void setDefaultsForEmptyResourceFields ( ) throws CmsImportExportException { } } | // get UUID for the structure
if ( m_resourceBuilder . getStructureId ( ) == null ) { // if null generate a new structure id
m_resourceBuilder . setStructureId ( new CmsUUID ( ) ) ; } // get UUIDs for the resource
if ( ( m_resourceBuilder . getResourceId ( ) == null ) || ( m_resourceBuilder . getType ( ) . isFolder ( ) ) ) { // folders get always a new resource UUID
m_resourceBuilder . setResourceId ( new CmsUUID ( ) ) ; m_resourceIdWasNull = true ; } // read date last modified from the resource , default to currentTime for folders
if ( m_resourceBuilder . getDateLastModified ( ) == DATE_LAST_MODIFICATION_FILETIME ) { if ( null != m_source ) { m_resourceBuilder . setDateLastModified ( m_helper . getFileModification ( m_source ) ) ; } else { m_resourceBuilder . setDateLastModified ( System . currentTimeMillis ( ) ) ; } } if ( m_resourceBuilder . getDateLastModified ( ) == DATE_LAST_MODIFICATION_UNSPECIFIED ) { m_resourceBuilder . setDateLastModified ( System . currentTimeMillis ( ) ) ; } if ( null == m_resourceBuilder . getUserLastModified ( ) ) { m_resourceBuilder . setUserLastModified ( m_cms . getRequestContext ( ) . getCurrentUser ( ) . getId ( ) ) ; } if ( m_resourceBuilder . getDateCreated ( ) == DATE_CREATED_UNSPECIFIED ) { m_resourceBuilder . setDateCreated ( System . currentTimeMillis ( ) ) ; } if ( m_resourceBuilder . getUserCreated ( ) . isNullUUID ( ) ) { m_resourceBuilder . setUserCreated ( getRequestContext ( ) . getCurrentUser ( ) . getId ( ) ) ; } if ( m_properties == null ) { m_properties = new HashMap < String , CmsProperty > ( ) ; } |
public class Parser { /** * Xml attribute expression : < p >
* { @ code @ attr } , { @ code @ ns : : attr } , { @ code @ ns : : * } , { @ code @ ns : : * } ,
* { @ code @ * } , { @ code @ * : : attr } , { @ code @ * : : * } , { @ code @ ns : : [ expr ] } ,
* { @ code @ * : : [ expr ] } , { @ code @ [ expr ] } < p >
* Called if we peeked an ' @ ' token . */
private AstNode attributeAccess ( ) throws IOException { } } | int tt = nextToken ( ) , atPos = ts . tokenBeg ; switch ( tt ) { // handles : @ name , @ ns : : name , @ ns : : * , @ ns : : [ expr ]
case Token . NAME : return propertyName ( atPos , ts . getString ( ) , 0 ) ; // handles : @ * , @ * : : name , @ * : : * , @ * : : [ expr ]
case Token . MUL : saveNameTokenData ( ts . tokenBeg , "*" , ts . lineno ) ; return propertyName ( atPos , "*" , 0 ) ; // handles @ [ expr ]
case Token . LB : return xmlElemRef ( atPos , null , - 1 ) ; default : reportError ( "msg.no.name.after.xmlAttr" ) ; return makeErrorNode ( ) ; } |
public class CharacterTypeConverter { /** * { @ inheritDoc } */
@ Override public Character convert ( final String value ) { } } | checkArgument ( value != null , "Value to convert must not be null" ) ; checkArgument ( ! value . isEmpty ( ) , "Value to convert must not be empty" ) ; return value . charAt ( 0 ) ; |
public class ReadOnlyStyledDocument { /** * Creates a { @ link ReadOnlyStyledDocument } from the given segment .
* @ param segment the only segment in the only paragraph in the document
* @ param paragraphStyle the paragraph style to use for each paragraph in the returned document
* @ param style the style to use for each segment in the document
* @ param segmentOps the operations object that can create a segment froma given text
* @ param < PS > The type of the paragraph style .
* @ param < SEG > The type of the segments in the paragraph ( e . g . { @ link String } ) .
* @ param < S > The type of the style of individual segments . */
public static < PS , SEG , S > ReadOnlyStyledDocument < PS , SEG , S > fromSegment ( SEG segment , PS paragraphStyle , S style , SegmentOps < SEG , S > segmentOps ) { } } | Paragraph < PS , SEG , S > content = new Paragraph < PS , SEG , S > ( paragraphStyle , segmentOps , segment , style ) ; List < Paragraph < PS , SEG , S > > res = Collections . singletonList ( content ) ; return new ReadOnlyStyledDocument < > ( res ) ; |
public class WikipediaTemplateInfo { /** * Returns the names of all templates contained in the specified page .
* @ param pageTitle
* the title of the page for which the templates should be
* retrieved
* @ return A List with the names of the templates contained in the specified
* page
* @ throws WikiApiException
* If there was any error retrieving the page object ( most
* likely if the templates are corrupted ) */
public List < String > getTemplateNamesFromPage ( String pageTitle ) throws WikiApiException { } } | Page p = null ; try { p = wiki . getPage ( pageTitle ) ; } catch ( WikiApiException e ) { return new ArrayList < String > ( ) ; } return getTemplateNamesFromPage ( p ) ; |
public class Point2D { /** * returns signed distance of point from infinite line represented by
* pt _ 1 . . . pt _ 2 . The returned distance is positive if this point lies on the
* right - hand side of the line , negative otherwise . If the two input points
* are equal , the ( positive ) distance of this point to p _ 1 is returned . */
double offset ( /* const */
Point2D pt1 , /* const */
Point2D pt2 ) { } } | double newDistance = distance ( pt1 , pt2 ) ; Point2D p = construct ( x , y ) ; if ( newDistance == 0.0 ) return distance ( p , pt1 ) ; // get vectors relative to pt _ 1
Point2D p2 = new Point2D ( ) ; p2 . setCoords ( pt2 ) ; p2 . sub ( pt1 ) ; p . sub ( pt1 ) ; double cross = p . crossProduct ( p2 ) ; return cross / newDistance ; |
public class URI { /** * Initialize the path for this URI from a URI string spec .
* @ param p _ uriSpec the URI specification ( cannot be null )
* @ throws MalformedURIException if p _ uriSpec violates syntax rules */
private void initializePath ( String p_uriSpec ) throws MalformedURIException { } } | if ( p_uriSpec == null ) { throw new MalformedURIException ( "Cannot initialize path from null string!" ) ; } int index = 0 ; int start = 0 ; int end = p_uriSpec . length ( ) ; char testChar = '\0' ; // path - everything up to query string or fragment
while ( index < end ) { testChar = p_uriSpec . charAt ( index ) ; if ( testChar == '?' || testChar == '#' ) { break ; } // check for valid escape sequence
if ( testChar == '%' ) { if ( index + 2 >= end || ! isHex ( p_uriSpec . charAt ( index + 1 ) ) || ! isHex ( p_uriSpec . charAt ( index + 2 ) ) ) { throw new MalformedURIException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_PATH_CONTAINS_INVALID_ESCAPE_SEQUENCE , null ) ) ; // " Path contains invalid escape sequence ! " ) ;
} } else if ( ! isReservedCharacter ( testChar ) && ! isUnreservedCharacter ( testChar ) ) { if ( '\\' != testChar ) throw new MalformedURIException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_PATH_INVALID_CHAR , new Object [ ] { String . valueOf ( testChar ) } ) ) ; // " Path contains invalid character : "
// + testChar ) ;
} index ++ ; } m_path = p_uriSpec . substring ( start , index ) ; // query - starts with ? and up to fragment or end
if ( testChar == '?' ) { index ++ ; start = index ; while ( index < end ) { testChar = p_uriSpec . charAt ( index ) ; if ( testChar == '#' ) { break ; } if ( testChar == '%' ) { if ( index + 2 >= end || ! isHex ( p_uriSpec . charAt ( index + 1 ) ) || ! isHex ( p_uriSpec . charAt ( index + 2 ) ) ) { throw new MalformedURIException ( "Query string contains invalid escape sequence!" ) ; } } else if ( ! isReservedCharacter ( testChar ) && ! isUnreservedCharacter ( testChar ) ) { throw new MalformedURIException ( "Query string contains invalid character:" + testChar ) ; } index ++ ; } m_queryString = p_uriSpec . substring ( start , index ) ; } // fragment - starts with #
if ( testChar == '#' ) { index ++ ; start = index ; while ( index < end ) { testChar = p_uriSpec . charAt ( index ) ; if ( testChar == '%' ) { if ( index + 2 >= end || ! isHex ( p_uriSpec . charAt ( index + 1 ) ) || ! isHex ( p_uriSpec . charAt ( index + 2 ) ) ) { throw new MalformedURIException ( "Fragment contains invalid escape sequence!" ) ; } } else if ( ! isReservedCharacter ( testChar ) && ! isUnreservedCharacter ( testChar ) ) { throw new MalformedURIException ( "Fragment contains invalid character:" + testChar ) ; } index ++ ; } m_fragment = p_uriSpec . substring ( start , index ) ; } |
public class ValidationStampFilterJdbcRepository { /** * Note : in H2 , null columns are not taken into account in the index */
private void checkUnicity ( ValidationStampFilter filter ) { } } | // Check project vs branch
if ( filter . getProject ( ) != null && filter . getBranch ( ) != null ) { throw new IllegalStateException ( "Filter cannot be associated with both a project and a branch." ) ; } // Gets the existing filter for the name in this scope
MapSqlParameterSource params = params ( "name" , filter . getName ( ) ) . addValue ( "project" , filter . getProject ( ) != null ? filter . getProject ( ) . id ( ) : null ) . addValue ( "branch" , filter . getBranch ( ) != null ? filter . getBranch ( ) . id ( ) : null ) . addValue ( "id" , filter . getId ( ) != null && filter . getId ( ) . isSet ( ) ? filter . id ( ) : null ) ; String sql ; // Branch
if ( filter . getBranch ( ) != null ) { sql = "SELECT ID FROM VALIDATION_STAMP_FILTERS WHERE PROJECT IS NULL AND BRANCH = :branch AND NAME = :name" ; } // Project
else if ( filter . getProject ( ) != null ) { sql = "SELECT ID FROM VALIDATION_STAMP_FILTERS WHERE PROJECT = :project AND BRANCH IS NULL AND NAME = :name" ; } // Global
else { sql = "SELECT ID FROM VALIDATION_STAMP_FILTERS WHERE PROJECT IS NULL AND BRANCH IS NULL AND NAME = :name" ; } // ID
if ( filter . getId ( ) != null && filter . getId ( ) . isSet ( ) ) { sql += " AND ID <> :id" ; } // Check
Optional < Integer > o = getOptional ( sql , params , Integer . class ) ; if ( o . isPresent ( ) ) { throw new ValidationStampFilterNameAlreadyDefinedException ( filter . getName ( ) ) ; } |
public class ExpressionFactory { /** * Adds more functions to the map that depend on an instantiated TSDB object .
* Only call this once please .
* @ param tsdb The TSDB object to initialize with */
public static void addTSDBFunctions ( final TSDB tsdb ) { } } | available_functions . put ( "divideSeries" , new DivideSeries ( tsdb ) ) ; available_functions . put ( "divide" , new DivideSeries ( tsdb ) ) ; available_functions . put ( "sumSeries" , new SumSeries ( tsdb ) ) ; available_functions . put ( "sum" , new SumSeries ( tsdb ) ) ; available_functions . put ( "diffSeries" , new DiffSeries ( tsdb ) ) ; available_functions . put ( "difference" , new DiffSeries ( tsdb ) ) ; available_functions . put ( "multiplySeries" , new MultiplySeries ( tsdb ) ) ; available_functions . put ( "multiply" , new MultiplySeries ( tsdb ) ) ; |
public class ReplicationInternal { /** * in CBL _ Replicator . m
* - ( void ) saveLastSequence
* @ exclude */
@ InterfaceAudience . Private public void saveLastSequence ( ) { } } | if ( ! lastSequenceChanged ) { return ; } if ( savingCheckpoint ) { // If a save is already in progress , don ' t do anything . ( The completion block will trigger
// another save after the first one finishes . )
overdueForCheckpointSave = true ; return ; } lastSequenceChanged = false ; overdueForCheckpointSave = false ; Log . d ( Log . TAG_SYNC , "%s: saveLastSequence() called. lastSequence: %s remoteCheckpoint: %s" , this , lastSequence , remoteCheckpoint ) ; final Map < String , Object > body = new HashMap < String , Object > ( ) ; if ( remoteCheckpoint != null ) { body . putAll ( remoteCheckpoint ) ; } body . put ( "lastSequence" , lastSequence ) ; savingCheckpoint = true ; final String remoteCheckpointDocID = remoteCheckpointDocID ( ) ; if ( remoteCheckpointDocID == null ) { Log . w ( Log . TAG_SYNC , "%s: remoteCheckpointDocID is null, aborting saveLastSequence()" , this ) ; return ; } final String checkpointID = remoteCheckpointDocID ; Log . d ( Log . TAG_SYNC , "%s: start put remote _local document. checkpointID: %s body: %s" , this , checkpointID , body ) ; Future future = sendAsyncRequest ( "PUT" , "_local/" + checkpointID , false , body , new RemoteRequestCompletion ( ) { @ Override public void onCompletion ( RemoteRequest remoteRequest , Response httpResponse , Object result , Throwable e ) { Log . d ( Log . TAG_SYNC , "%s: put remote _local document request finished. checkpointID: %s body: %s" , this , checkpointID , body ) ; try { if ( e != null ) { // Failed to save checkpoint :
switch ( Utils . getStatusFromError ( e ) ) { case Status . NOT_FOUND : Log . i ( Log . TAG_SYNC , "%s: could not save remote checkpoint: 404 NOT FOUND" , this ) ; remoteCheckpoint = null ; // doc deleted or db reset
overdueForCheckpointSave = true ; // try saving again
break ; case Status . CONFLICT : Log . i ( Log . TAG_SYNC , "%s: could not save remote checkpoint: 409 CONFLICT" , this ) ; refreshRemoteCheckpointDoc ( ) ; break ; default : Log . i ( Log . TAG_SYNC , "%s: could not save remote checkpoint: %s" , this , e ) ; // TODO : On 401 or 403 , and this is a pull , remember that remote
// TODo : is read - only & don ' t attempt to read its checkpoint next time .
break ; } } else { // Saved checkpoint :
Map < String , Object > response = ( Map < String , Object > ) result ; body . put ( "_rev" , response . get ( "rev" ) ) ; remoteCheckpoint = body ; if ( db != null && db . isOpen ( ) ) { Log . d ( Log . TAG_SYNC , "%s: saved remote checkpoint, updating local checkpoint. RemoteCheckpoint: %s" , this , remoteCheckpoint ) ; setLastSequenceFromWorkExecutor ( lastSequence , checkpointID ) ; } else { Log . w ( Log . TAG_SYNC , "%s: Database is null or closed, not calling db.setLastSequence() " , this ) ; } } } finally { savingCheckpoint = false ; if ( overdueForCheckpointSave ) { Log . i ( Log . TAG_SYNC , "%s: overdueForCheckpointSave == true, calling saveLastSequence()" , this ) ; overdueForCheckpointSave = false ; saveLastSequence ( ) ; } } } } ) ; pendingFutures . add ( future ) ; |
public class CacheImpl { /** * { @ inheritDoc } */
@ Override public Collection < Object > getKeys ( String scope ) throws InvalidScopeException , InternalCacheEngineException { } } | try { return cacheStoreAdapter . getKeys ( scope ) ; } catch ( InvalidScopeException ex ) { throw ex ; } catch ( Exception ex ) { throw new InternalCacheEngineException ( "Exception while getting keys of items within scope " + scope , ex ) ; } |
public class CheckDelayStat { /** * 超时的最大时间 , 单位分钟 */
private static Map < Long , Long > parseAlert ( String alert ) { } } | if ( alert == null ) { return null ; } Map < Long , Long > alertMap = new HashMap < Long , Long > ( ) ; String [ ] alerts = alert . split ( "," ) ; for ( int i = 0 ; i < alerts . length ; i ++ ) { String [ ] ncidAlert = alerts [ i ] . split ( "-" ) ; alertMap . put ( NumberUtils . toLong ( ncidAlert [ 0 ] , 0 ) , NumberUtils . toLong ( ncidAlert [ 1 ] , 0 ) ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( ncidAlert [ 0 ] + " : " + ncidAlert [ 1 ] ) ; } } return alertMap ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertMFCMFCFlgsToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class CalculateUtils { /** * 将long整数与小数相乘 , 计算结四舍五入保留整数位 。
* @ param num1 数字1
* @ param num2 数字2
* @ return 数字相乘计算结果 */
public static int multiply ( long num1 , double num2 ) { } } | double num1D = ( ( Long ) num1 ) . doubleValue ( ) ; return multiply ( num1D , num2 ) ; |
public class IcuSyntaxUtils { /** * Private helper for { @ code convertMsgPartsToEmbeddedIcuSyntax ( ) } to convert msg parts .
* @ param newMsgPartsBuilder The new msg parts being built .
* @ param currRawTextSb The collector for the current raw text , which hasn ' t yet been turned into
* a SoyMsgRawTextPart and added to newMsgPartsBuilder because it might not be complete .
* @ param origMsgParts The msg parts to convert .
* @ param isInPlrselPart Whether we ' re currently within a plural / select part ' s subtree . */
private static void convertMsgPartsHelper ( ImmutableList . Builder < SoyMsgPart > newMsgPartsBuilder , StringBuilder currRawTextSb , List < SoyMsgPart > origMsgParts , boolean isInPlrselPart ) { } } | for ( SoyMsgPart origMsgPart : origMsgParts ) { if ( origMsgPart instanceof SoyMsgRawTextPart ) { String rawText = ( ( SoyMsgRawTextPart ) origMsgPart ) . getRawText ( ) ; if ( isInPlrselPart ) { rawText = icuEscape ( rawText ) ; } currRawTextSb . append ( rawText ) ; } else if ( origMsgPart instanceof SoyMsgPlaceholderPart ) { // A placeholder ends the curr raw text , so if the collected curr raw text is nonempty , add
// a msg part for it and clear the collector .
if ( currRawTextSb . length ( ) > 0 ) { newMsgPartsBuilder . add ( SoyMsgRawTextPart . of ( currRawTextSb . toString ( ) ) ) ; currRawTextSb . setLength ( 0 ) ; } // Reuse the msg part for the placeholder since it ' s immutable .
newMsgPartsBuilder . add ( origMsgPart ) ; } else if ( origMsgPart instanceof SoyMsgPluralRemainderPart ) { currRawTextSb . append ( getPluralRemainderString ( ) ) ; } else if ( origMsgPart instanceof SoyMsgPluralPart ) { convertPluralPartHelper ( newMsgPartsBuilder , currRawTextSb , ( SoyMsgPluralPart ) origMsgPart ) ; } else if ( origMsgPart instanceof SoyMsgSelectPart ) { convertSelectPartHelper ( newMsgPartsBuilder , currRawTextSb , ( SoyMsgSelectPart ) origMsgPart ) ; } } |
public class Metadata { /** * Returns all the metadata entries named ' name ' , in the order they were received , parsed as T , or
* null if there are none . The iterator is not guaranteed to be " live . " It may or may not be
* accurate if Metadata is mutated . */
@ Nullable public < T > Iterable < T > getAll ( final Key < T > key ) { } } | for ( int i = 0 ; i < size ; i ++ ) { if ( bytesEqual ( key . asciiName ( ) , name ( i ) ) ) { return new IterableAt < > ( key , i ) ; } } return null ; |
public class GuavaUtils { /** * To fix semantic difference of Longs . tryParse ( ) from Long . parseLong ( Longs . tryParse ( ) returns null for ' + ' started value ) */
@ Nullable public static Long tryParseLong ( @ Nullable String string ) { } } | return Strings . isNullOrEmpty ( string ) ? null : Longs . tryParse ( string . charAt ( 0 ) == '+' ? string . substring ( 1 ) : string ) ; |
public class AdviceActivity { /** * Gets the after advice result .
* @ param < T > the generic type
* @ param aspectId the aspect id
* @ return the after advice result */
@ SuppressWarnings ( "unchecked" ) public < T > T getAfterAdviceResult ( String aspectId ) { } } | return ( aspectAdviceResult != null ? ( T ) aspectAdviceResult . getAfterAdviceResult ( aspectId ) : null ) ; |
public class URLUtil { /** * 编码字符为URL中查询语句 < br >
* 将需要转换的内容 ( ASCII码形式之外的内容 ) , 用十六进制表示法转换出来 , 并在之前加上 % 开头 。 < br >
* 此方法用于POST请求中的请求体自动编码 , 转义大部分特殊字符
* @ param url 被编码内容
* @ param charset 编码
* @ return 编码后的字符
* @ since 4.4.1 */
public static String encodeQuery ( String url , Charset charset ) { } } | if ( StrUtil . isEmpty ( url ) ) { return url ; } if ( null == charset ) { charset = CharsetUtil . defaultCharset ( ) ; } return URLEncoder . QUERY . encode ( url , charset ) ; |
public class CmsLoginUserAgreement { /** * Returns the content value of the given path as String . < p >
* @ param path the path to get the content value for
* @ return the content value of the given path as String */
public String getConfigurationContentStringValue ( String path ) { } } | if ( getConfigurationContent ( ) != null ) { return getConfigurationContent ( ) . getStringValue ( getCms ( ) , path , getLocale ( ) ) ; } return "" ; |
public class ConnectionFactory { /** * Initializes the connection factory . Ensuring that the appropriate drivers
* are loaded and that a connection can be made successfully .
* @ throws DatabaseException thrown if we are unable to connect to the
* database */
public synchronized void initialize ( ) throws DatabaseException { } } | // this only needs to be called once .
if ( connectionString != null ) { return ; } Connection conn = null ; try { // load the driver if necessary
final String driverName = settings . getString ( Settings . KEYS . DB_DRIVER_NAME , "" ) ; final String driverPath = settings . getString ( Settings . KEYS . DB_DRIVER_PATH , "" ) ; LOGGER . debug ( "Loading driver '{}'" , driverName ) ; try { if ( ! driverPath . isEmpty ( ) ) { LOGGER . debug ( "Loading driver from: {}" , driverPath ) ; driver = DriverLoader . load ( driverName , driverPath ) ; } else { driver = DriverLoader . load ( driverName ) ; } } catch ( DriverLoadException ex ) { LOGGER . debug ( "Unable to load database driver" , ex ) ; throw new DatabaseException ( "Unable to load database driver" , ex ) ; } userName = settings . getString ( Settings . KEYS . DB_USER , "dcuser" ) ; // yes , yes - hard - coded password - only if there isn ' t one in the properties file .
password = settings . getString ( Settings . KEYS . DB_PASSWORD , "DC-Pass1337!" ) ; try { connectionString = settings . getConnectionString ( Settings . KEYS . DB_CONNECTION_STRING , Settings . KEYS . DB_FILE_NAME ) ; } catch ( IOException ex ) { LOGGER . debug ( "Unable to retrieve the database connection string" , ex ) ; throw new DatabaseException ( "Unable to retrieve the database connection string" , ex ) ; } boolean shouldCreateSchema = false ; try { if ( connectionString . startsWith ( "jdbc:h2:file:" ) ) { // H2
shouldCreateSchema = ! h2DataFileExists ( ) ; LOGGER . debug ( "Need to create DB Structure: {}" , shouldCreateSchema ) ; } } catch ( IOException ioex ) { LOGGER . debug ( "Unable to verify database exists" , ioex ) ; throw new DatabaseException ( "Unable to verify database exists" , ioex ) ; } LOGGER . debug ( "Loading database connection" ) ; LOGGER . debug ( "Connection String: {}" , connectionString ) ; LOGGER . debug ( "Database User: {}" , userName ) ; try { if ( connectionString . toLowerCase ( ) . contains ( "integrated security=true" ) || connectionString . toLowerCase ( ) . contains ( "trusted_connection=true" ) ) { conn = DriverManager . getConnection ( connectionString ) ; } else { conn = DriverManager . getConnection ( connectionString , userName , password ) ; } } catch ( SQLException ex ) { if ( ex . getMessage ( ) . contains ( "java.net.UnknownHostException" ) && connectionString . contains ( "AUTO_SERVER=TRUE;" ) ) { connectionString = connectionString . replace ( "AUTO_SERVER=TRUE;" , "" ) ; try { conn = DriverManager . getConnection ( connectionString , userName , password ) ; settings . setString ( Settings . KEYS . DB_CONNECTION_STRING , connectionString ) ; LOGGER . debug ( "Unable to start the database in server mode; reverting to single user mode" ) ; } catch ( SQLException sqlex ) { LOGGER . debug ( "Unable to connect to the database" , ex ) ; throw new DatabaseException ( "Unable to connect to the database" , ex ) ; } } else { LOGGER . debug ( "Unable to connect to the database" , ex ) ; throw new DatabaseException ( "Unable to connect to the database" , ex ) ; } } if ( shouldCreateSchema ) { try { createTables ( conn ) ; } catch ( DatabaseException dex ) { LOGGER . debug ( "" , dex ) ; throw new DatabaseException ( "Unable to create the database structure" , dex ) ; } } try { ensureSchemaVersion ( conn ) ; } catch ( DatabaseException dex ) { LOGGER . debug ( "" , dex ) ; throw new DatabaseException ( "Database schema does not match this version of dependency-check" , dex ) ; } } finally { if ( conn != null ) { try { conn . close ( ) ; } catch ( SQLException ex ) { LOGGER . debug ( "An error occurred closing the connection" , ex ) ; } } } |
public class RunMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Run run , ProtocolMarshaller protocolMarshaller ) { } } | if ( run == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( run . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( run . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( run . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( run . getPlatform ( ) , PLATFORM_BINDING ) ; protocolMarshaller . marshall ( run . getCreated ( ) , CREATED_BINDING ) ; protocolMarshaller . marshall ( run . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( run . getResult ( ) , RESULT_BINDING ) ; protocolMarshaller . marshall ( run . getStarted ( ) , STARTED_BINDING ) ; protocolMarshaller . marshall ( run . getStopped ( ) , STOPPED_BINDING ) ; protocolMarshaller . marshall ( run . getCounters ( ) , COUNTERS_BINDING ) ; protocolMarshaller . marshall ( run . getMessage ( ) , MESSAGE_BINDING ) ; protocolMarshaller . marshall ( run . getTotalJobs ( ) , TOTALJOBS_BINDING ) ; protocolMarshaller . marshall ( run . getCompletedJobs ( ) , COMPLETEDJOBS_BINDING ) ; protocolMarshaller . marshall ( run . getBillingMethod ( ) , BILLINGMETHOD_BINDING ) ; protocolMarshaller . marshall ( run . getDeviceMinutes ( ) , DEVICEMINUTES_BINDING ) ; protocolMarshaller . marshall ( run . getNetworkProfile ( ) , NETWORKPROFILE_BINDING ) ; protocolMarshaller . marshall ( run . getParsingResultUrl ( ) , PARSINGRESULTURL_BINDING ) ; protocolMarshaller . marshall ( run . getResultCode ( ) , RESULTCODE_BINDING ) ; protocolMarshaller . marshall ( run . getSeed ( ) , SEED_BINDING ) ; protocolMarshaller . marshall ( run . getAppUpload ( ) , APPUPLOAD_BINDING ) ; protocolMarshaller . marshall ( run . getEventCount ( ) , EVENTCOUNT_BINDING ) ; protocolMarshaller . marshall ( run . getJobTimeoutMinutes ( ) , JOBTIMEOUTMINUTES_BINDING ) ; protocolMarshaller . marshall ( run . getDevicePoolArn ( ) , DEVICEPOOLARN_BINDING ) ; protocolMarshaller . marshall ( run . getLocale ( ) , LOCALE_BINDING ) ; protocolMarshaller . marshall ( run . getRadios ( ) , RADIOS_BINDING ) ; protocolMarshaller . marshall ( run . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( run . getCustomerArtifactPaths ( ) , CUSTOMERARTIFACTPATHS_BINDING ) ; protocolMarshaller . marshall ( run . getWebUrl ( ) , WEBURL_BINDING ) ; protocolMarshaller . marshall ( run . getSkipAppResign ( ) , SKIPAPPRESIGN_BINDING ) ; protocolMarshaller . marshall ( run . getTestSpecArn ( ) , TESTSPECARN_BINDING ) ; protocolMarshaller . marshall ( run . getDeviceSelectionResult ( ) , DEVICESELECTIONRESULT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ReflectionUtils { /** * Find and replace the generic parameters with the real types .
* @ param targetType the type for which to resolve the parameters
* @ param rootType an extending class as Type
* @ return the Type with resolved parameters */
public static Type resolveType ( Type targetType , Type rootType ) { } } | Type resolvedType ; if ( targetType instanceof ParameterizedType && rootType instanceof ParameterizedType ) { ParameterizedType parameterizedType = ( ParameterizedType ) targetType ; resolvedType = resolveType ( ( Class < ? > ) parameterizedType . getRawType ( ) , parameterizedType . getActualTypeArguments ( ) , getTypeClass ( rootType ) ) ; } else { resolvedType = resolveType ( getTypeClass ( rootType ) , null , getTypeClass ( targetType ) ) ; } return resolvedType ; |
public class Scriptable { /** * Provides objects to the JavaScript engine under various names . */
public void setObjects ( Map < String , Object > objects ) { } } | for ( Map . Entry < String , Object > entry : objects . entrySet ( ) ) { js . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } |
public class MetricRegistry { /** * Return the { @ link Counter } registered under this name ; or create and register
* a new { @ link Counter } using the provided MetricSupplier if none is registered .
* @ param name the name of the metric
* @ param supplier a MetricSupplier that can be used to manufacture a counter .
* @ return a new or pre - existing { @ link Counter } */
public Counter counter ( String name , final MetricSupplier < Counter > supplier ) { } } | return getOrAdd ( name , new MetricBuilder < Counter > ( ) { @ Override public Counter newMetric ( ) { return supplier . newMetric ( ) ; } @ Override public boolean isInstance ( Metric metric ) { return Counter . class . isInstance ( metric ) ; } } ) ; |
public class DeprecatedListWriter { /** * Add the anchor .
* @ param builder the deprecated list builder
* @ param type the type of list being documented
* @ param htmlTree the content tree to which the anchor will be added */
private void addAnchor ( DeprecatedAPIListBuilder builder , DeprElementKind kind , Content htmlTree ) { } } | if ( builder . hasDocumentation ( kind ) ) { htmlTree . addContent ( getMarkerAnchor ( getAnchorName ( kind ) ) ) ; } |
public class JavaScriptUtils { /** * Creates and returns a JavaScript line for setting a cookie with the specified name , value , and cookie properties . Note : The
* properties will be HTML - encoded but the name and value will not be . */
public String getUnencodedHtmlCookieString ( String name , String value , Map < String , String > cookieProperties ) { } } | return createHtmlCookieString ( name , value , cookieProperties , false ) ; |
public class ExifGpsWriter { /** * Private method - Reads the exif metadata for an image
* @ param app1EXIFNode app1 Node of the image ( where the exif data is stored )
* @ return the exif metadata */
private ArrayList < IIOMetadata > readExif ( IIOMetadataNode app1EXIFNode ) { } } | // Set up input skipping EXIF ID 6 - byte sequence .
byte [ ] app1Params = ( byte [ ] ) app1EXIFNode . getUserObject ( ) ; MemoryCacheImageInputStream app1EXIFInput = new MemoryCacheImageInputStream ( new ByteArrayInputStream ( app1Params , 6 , app1Params . length - 6 ) ) ; // only the tiff reader knows how to interpret the exif metadata
ImageReader tiffReader = null ; Iterator < ImageReader > readers = ImageIO . getImageReadersByFormatName ( "tiff" ) ; while ( readers . hasNext ( ) ) { tiffReader = ( ImageReader ) readers . next ( ) ; if ( tiffReader . getClass ( ) . getName ( ) . startsWith ( "com.sun.media" ) ) { // Break on finding the core provider .
break ; } } if ( tiffReader == null ) { throw new RuntimeException ( "Cannot find core TIFF reader!" ) ; } ArrayList < IIOMetadata > out = new ArrayList < IIOMetadata > ( 1 ) ; tiffReader . setInput ( app1EXIFInput ) ; IIOMetadata tiffMetadata = null ; try { tiffMetadata = tiffReader . getImageMetadata ( 0 ) ; // IIOMetadata meta = tiffReader . getImageMetadata ( 0 ) ;
TIFFImageReadParam rParam = ( TIFFImageReadParam ) tiffReader . getDefaultReadParam ( ) ; rParam . setTIFFDecompressor ( null ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } ; tiffReader . dispose ( ) ; out . add ( 0 , tiffMetadata ) ; return out ; |
public class ConstraintContextProperties { /** * - - - - - Property setting - - - - - */
private void setProperty ( ConstraintContextProperty contextProperty , Object value ) { } } | props . setProperty ( contextProperty . toString ( ) , value . toString ( ) ) ; |
public class CPOptionPersistenceImpl { /** * Returns the first cp option in the ordered set where uuid = & # 63 ; .
* @ param uuid the uuid
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp option
* @ throws NoSuchCPOptionException if a matching cp option could not be found */
@ Override public CPOption findByUuid_First ( String uuid , OrderByComparator < CPOption > orderByComparator ) throws NoSuchCPOptionException { } } | CPOption cpOption = fetchByUuid_First ( uuid , orderByComparator ) ; if ( cpOption != null ) { return cpOption ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCPOptionException ( msg . toString ( ) ) ; |
public class AbstrCFMLExprTransformer { /** * Liest einen Mehrzeiligen Kommentar ein . < br / >
* EBNF : < br / >
* < code > ? - " * < ! - - - - > / " ; < / code >
* @ return bool Wurde ein Kommentar entfernt ?
* @ throws TemplateException */
private boolean multiLineComment ( Data data ) throws TemplateException { } } | SourceCode cfml = data . srcCode ; if ( ! cfml . forwardIfCurrent ( "/*" ) ) return false ; int pos = cfml . getPos ( ) ; boolean isDocComment = cfml . isCurrent ( '*' ) ; while ( cfml . isValidIndex ( ) ) { if ( cfml . isCurrent ( "*/" ) ) break ; cfml . next ( ) ; } if ( ! cfml . forwardIfCurrent ( "*/" ) ) { cfml . setPos ( pos ) ; throw new TemplateException ( cfml , "comment is not closed" ) ; } if ( isDocComment ) { String comment = cfml . substring ( pos - 2 , cfml . getPos ( ) - pos ) ; data . docComment = docCommentTransformer . transform ( data . factory , comment ) ; } return true ; |
public class Gson { /** * This method serializes the specified object into its equivalent Json representation .
* This method should be used when the specified object is not a generic type . This method uses
* { @ link Class # getClass ( ) } to get the type for the specified object , but the
* { @ code getClass ( ) } loses the generic type information because of the Type Erasure feature
* of Java . Note that this method works fine if the any of the object fields are of generic type ,
* just the object itself should not be of a generic type . If the object is of generic type , use
* { @ link # toJson ( Object , Type , Appendable ) } instead .
* @ param src the object for which Json representation is to be created setting for Gson
* @ param writer Writer to which the Json representation needs to be written
* @ throws JsonIOException if there was a problem writing to the writer
* @ since 1.2 */
public void toJson ( Object src , Appendable writer ) throws JsonIOException { } } | if ( src != null ) { toJson ( src , src . getClass ( ) , writer ) ; } else { toJson ( JsonNull . INSTANCE , writer ) ; } |
public class GenMapAndTopicListModule { /** * Update uplevels if needed . If the parameter contains a { @ link org . dita . dost . util . Constants # STICK STICK } , it and
* anything following it is removed .
* @ param file file path */
private void updateUplevels ( final URI file ) { } } | assert file . isAbsolute ( ) ; if ( file . getPath ( ) != null ) { final URI f = file . toString ( ) . contains ( STICK ) ? toURI ( file . toString ( ) . substring ( 0 , file . toString ( ) . indexOf ( STICK ) ) ) : file ; final URI relative = getRelativePath ( rootFile , f ) . normalize ( ) ; final int lastIndex = relative . getPath ( ) . lastIndexOf ( ".." + URI_SEPARATOR ) ; if ( lastIndex != - 1 ) { final int newUplevels = lastIndex / 3 + 1 ; uplevels = Math . max ( newUplevels , uplevels ) ; } } |
public class DefaultTransformationManager { /** * { @ inheritDoc }
* @ deprecated Replaced by { @ link # performTransformations ( Block , TransformationContext ) } */
@ Override @ Deprecated public void performTransformations ( XDOM dom , Syntax syntax ) throws TransformationException { } } | performTransformations ( dom , new TransformationContext ( dom , syntax ) ) ; |
public class DocxModuleAppSetupFixture { private Order create ( final String number , final String customerName , final LocalDate date , final String preferences , final ExecutionContext executionContext ) { } } | return executionContext . add ( this , orders . create ( number , customerName , date , preferences ) ) ; |
public class SampleState { /** * See push ( ) for an explanation .
* @ param model the model to pop this SampleState from */
public void pop ( GraphicalModel model ) { } } | assert ( model . factors . contains ( addedFactor ) ) ; model . factors . remove ( addedFactor ) ; model . getVariableMetaDataByReference ( variable ) . remove ( CliqueTree . VARIABLE_OBSERVED_VALUE ) ; |
public class ProcessCommunicatorImpl { /** * / * ( non - Javadoc )
* @ see tuwien . auto . calimero . process . ProcessCommunicator # write
* ( tuwien . auto . calimero . datapoint . Datapoint , java . lang . String ) */
public void write ( Datapoint dp , String value ) throws KNXException { } } | final DPTXlator t = TranslatorTypes . createTranslator ( dp . getMainNumber ( ) , dp . getDPT ( ) ) ; t . setValue ( value ) ; write ( dp . getMainAddress ( ) , dp . getPriority ( ) , t ) ; |
public class PersistentSettings { /** * Insert property into database if value is not { @ code null } , else delete property from
* database . Session is not committed but { @ link org . sonar . api . config . GlobalPropertyChangeHandler }
* are executed . */
public PersistentSettings saveProperty ( DbSession dbSession , String key , @ Nullable String value ) { } } | savePropertyImpl ( dbSession , key , value ) ; changeNotifier . onGlobalPropertyChange ( key , value ) ; return this ; |
public class DriverController { /** * Displays the result ( manual check ) . */
@ Route ( method = HttpMethod . POST , uri = "samples/manual-validation" ) public Result check ( @ Body Car car ) throws IOException { } } | Set < ConstraintViolation < Car > > violations = validator . validate ( car ) ; if ( ! violations . isEmpty ( ) ) { return badRequest ( violations ) . json ( ) ; } else { return ok ( ) ; } |
public class CPDAvailabilityEstimatePersistenceImpl { /** * Clears the cache for the cpd availability estimate .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( CPDAvailabilityEstimate cpdAvailabilityEstimate ) { } } | entityCache . removeResult ( CPDAvailabilityEstimateModelImpl . ENTITY_CACHE_ENABLED , CPDAvailabilityEstimateImpl . class , cpdAvailabilityEstimate . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; clearUniqueFindersCache ( ( CPDAvailabilityEstimateModelImpl ) cpdAvailabilityEstimate , true ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.