signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class BeanMapOutputHandler { /** * Converts query output into Map of Beans .
* @ param params query output row
* @ return Map of Beans converted from query output
* @ throws org . midao . jdbc . core . exception . MjdbcException */
@ Override protected V createRow ( QueryParameters params ) throws MjdbcException { } } | return this . outputProcessor . toBean ( params , type ) ; |
public class ConstantPool { /** * Returns a utf - 8 constant as a string */
public String getUtf8AsString ( int index ) { } } | Utf8Constant utf8 = ( Utf8Constant ) _entries . get ( index ) ; if ( utf8 == null ) return null ; else return utf8 . getValue ( ) ; |
public class ToStringStyle { /** * < p > Sets the start text to output when an < code > Object < / code > is
* output in summary mode . < / p >
* < p > This is output before the size value . < / p >
* < p > < code > null < / code > is accepted , but will be converted to
* an empty String . < / p >
* @ param summaryObjectStartText the new start of summary text */
protected void setSummaryObjectStartText ( String summaryObjectStartText ) { } } | if ( summaryObjectStartText == null ) { summaryObjectStartText = StringUtils . EMPTY ; } this . summaryObjectStartText = summaryObjectStartText ; |
public class XAttributeUtils { /** * For the given attribute , returns its type , i . e . , the most high - level ,
* typed interface this attribute implements .
* @ param attribute
* Attribute to analyze .
* @ return High - level type interface of this attribute . */
public static Class < ? extends XAttribute > getType ( XAttribute attribute ) { } } | if ( attribute instanceof XAttributeList ) { return XAttributeList . class ; } else if ( attribute instanceof XAttributeContainer ) { return XAttributeContainer . class ; } else if ( attribute instanceof XAttributeLiteral ) { return XAttributeLiteral . class ; } else if ( attribute instanceof XAttributeBoolean ) { return XAttributeBoolean . class ; } else if ( attribute instanceof XAttributeContinuous ) { return XAttributeContinuous . class ; } else if ( attribute instanceof XAttributeDiscrete ) { return XAttributeDiscrete . class ; } else if ( attribute instanceof XAttributeTimestamp ) { return XAttributeTimestamp . class ; } else if ( attribute instanceof XAttributeID ) { return XAttributeID . class ; } else { throw new AssertionError ( "Unexpected attribute type!" ) ; } |
public class AmazonRdsReadReplicaAwareDataSourceFactoryBean { /** * Constructs a
* { @ link org . springframework . cloud . aws . jdbc . datasource . ReadOnlyRoutingDataSource }
* data source that contains the regular data source as a default , and all
* read - replicas as additional data source . The
* { @ link org . springframework . cloud . aws . jdbc . datasource . ReadOnlyRoutingDataSource } is
* additionally wrapped with a
* { @ link org . springframework . jdbc . datasource . LazyConnectionDataSourceProxy } , because
* the read - only flag is only available after the transactional context has been
* established . This is only the case if the physical connection is requested after
* the transaction start and not while starting a transaction .
* @ return a ReadOnlyRoutingDataSource that is wrapped with a
* LazyConnectionDataSourceProxy
* @ throws Exception if the underlying data source setup throws any exception */
@ Override protected DataSource createInstance ( ) throws Exception { } } | DBInstance dbInstance = getDbInstance ( getDbInstanceIdentifier ( ) ) ; // If there is no read replica available , delegate to super class
if ( dbInstance . getReadReplicaDBInstanceIdentifiers ( ) . isEmpty ( ) ) { return super . createInstance ( ) ; } HashMap < Object , Object > replicaMap = new HashMap < > ( dbInstance . getReadReplicaDBInstanceIdentifiers ( ) . size ( ) ) ; for ( String replicaName : dbInstance . getReadReplicaDBInstanceIdentifiers ( ) ) { replicaMap . put ( replicaName , createDataSourceInstance ( replicaName ) ) ; } // Create the data source
ReadOnlyRoutingDataSource dataSource = new ReadOnlyRoutingDataSource ( ) ; dataSource . setTargetDataSources ( replicaMap ) ; dataSource . setDefaultTargetDataSource ( createDataSourceInstance ( getDbInstanceIdentifier ( ) ) ) ; // Initialize the class
dataSource . afterPropertiesSet ( ) ; return new LazyConnectionDataSourceProxy ( dataSource ) ; |
public class JMRegex { /** * Gets matched list by group .
* @ param targetString the target string
* @ return the matched list by group */
public List < String > getMatchedListByGroup ( String targetString ) { } } | return getMatcherAsOpt ( targetString ) . map ( matcher -> rangeClosed ( 1 , matcher . groupCount ( ) ) . mapToObj ( matcher :: group ) . map ( Object :: toString ) . collect ( toList ( ) ) ) . orElseGet ( Collections :: emptyList ) ; |
public class ResourceObjectIncludeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setYobjOset ( Integer newYobjOset ) { } } | Integer oldYobjOset = yobjOset ; yobjOset = newYobjOset ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . RESOURCE_OBJECT_INCLUDE__YOBJ_OSET , oldYobjOset , yobjOset ) ) ; |
public class DelegatedClientFactory { /** * Configure saml client .
* @ param properties the properties */
protected void configureSamlClient ( final Collection < BaseClient > properties ) { } } | val index = new AtomicInteger ( ) ; pac4jProperties . getSaml ( ) . stream ( ) . filter ( saml -> StringUtils . isNotBlank ( saml . getKeystorePath ( ) ) && StringUtils . isNotBlank ( saml . getIdentityProviderMetadataPath ( ) ) && StringUtils . isNotBlank ( saml . getServiceProviderEntityId ( ) ) && StringUtils . isNotBlank ( saml . getServiceProviderMetadataPath ( ) ) ) . forEach ( saml -> { val cfg = new SAML2Configuration ( saml . getKeystorePath ( ) , saml . getKeystorePassword ( ) , saml . getPrivateKeyPassword ( ) , saml . getIdentityProviderMetadataPath ( ) ) ; cfg . setMaximumAuthenticationLifetime ( saml . getMaximumAuthenticationLifetime ( ) ) ; cfg . setServiceProviderEntityId ( saml . getServiceProviderEntityId ( ) ) ; cfg . setServiceProviderMetadataPath ( saml . getServiceProviderMetadataPath ( ) ) ; cfg . setAuthnRequestBindingType ( saml . getDestinationBinding ( ) ) ; cfg . setForceAuth ( saml . isForceAuth ( ) ) ; cfg . setPassive ( saml . isPassive ( ) ) ; cfg . setSignMetadata ( saml . isSignServiceProviderMetadata ( ) ) ; if ( StringUtils . isNotBlank ( saml . getPrincipalIdAttribute ( ) ) ) { cfg . setAttributeAsId ( saml . getPrincipalIdAttribute ( ) ) ; } cfg . setWantsAssertionsSigned ( saml . isWantsAssertionsSigned ( ) ) ; cfg . setLogoutHandler ( casServerSpecificLogoutHandler ) ; cfg . setUseNameQualifier ( saml . isUseNameQualifier ( ) ) ; cfg . setAttributeConsumingServiceIndex ( saml . getAttributeConsumingServiceIndex ( ) ) ; if ( saml . getAssertionConsumerServiceIndex ( ) >= 0 ) { cfg . setAssertionConsumerServiceIndex ( saml . getAssertionConsumerServiceIndex ( ) ) ; } if ( StringUtils . isNotBlank ( saml . getAuthnContextClassRef ( ) ) ) { cfg . setComparisonType ( saml . getAuthnContextComparisonType ( ) . toUpperCase ( ) ) ; cfg . setAuthnContextClassRef ( saml . getAuthnContextClassRef ( ) ) ; } if ( StringUtils . isNotBlank ( saml . getKeystoreAlias ( ) ) ) { cfg . setKeystoreAlias ( saml . getKeystoreAlias ( ) ) ; } if ( StringUtils . isNotBlank ( saml . getNameIdPolicyFormat ( ) ) ) { cfg . setNameIdPolicyFormat ( saml . getNameIdPolicyFormat ( ) ) ; } if ( ! saml . getRequestedAttributes ( ) . isEmpty ( ) ) { saml . getRequestedAttributes ( ) . stream ( ) . map ( attribute -> new SAML2ServiceProvicerRequestedAttribute ( attribute . getName ( ) , attribute . getFriendlyName ( ) , attribute . getNameFormat ( ) , attribute . isRequired ( ) ) ) . forEach ( attribute -> cfg . getRequestedServiceProviderAttributes ( ) . add ( attribute ) ) ; } val mappedAttributes = saml . getMappedAttributes ( ) ; if ( ! mappedAttributes . isEmpty ( ) ) { val results = mappedAttributes . stream ( ) . collect ( Collectors . toMap ( Pac4jSamlClientProperties . ServiceProviderMappedAttribute :: getName , Pac4jSamlClientProperties . ServiceProviderMappedAttribute :: getMappedTo ) ) ; cfg . setMappedAttributes ( results ) ; } val client = new SAML2Client ( cfg ) ; val count = index . intValue ( ) ; if ( StringUtils . isBlank ( saml . getClientName ( ) ) ) { client . setName ( client . getClass ( ) . getSimpleName ( ) + count ) ; } configureClient ( client , saml ) ; index . incrementAndGet ( ) ; LOGGER . debug ( "Created delegated client [{}]" , client ) ; properties . add ( client ) ; } ) ; |
public class RemoveUnusedCode { /** * Handle a class that is not the RHS child of an assignment or a variable declaration
* initializer .
* < p > For
* @ param classNode
* @ param scope */
private void traverseClass ( Node classNode , Scope scope ) { } } | checkArgument ( classNode . isClass ( ) ) ; if ( NodeUtil . isClassDeclaration ( classNode ) ) { traverseClassDeclaration ( classNode , scope ) ; } else { traverseClassExpression ( classNode , scope ) ; } |
public class ViewManager { /** * Check if a given string is a template path or template content
* If the string contains anyone the following characters then we assume it
* is content , otherwise it is path :
* * space characters
* * non numeric - alphabetic characters except :
* * * dot " . "
* * * dollar : " $ "
* @ param string
* the string to be tested
* @ return ` true ` if the string literal is template content or ` false ` otherwise */
public static boolean isTemplatePath ( String string ) { } } | int sz = string . length ( ) ; if ( sz == 0 ) { return true ; } for ( int i = 0 ; i < sz ; ++ i ) { char c = string . charAt ( i ) ; switch ( c ) { case ' ' : case '\t' : case '\b' : case '<' : case '>' : case '(' : case ')' : case '[' : case ']' : case '{' : case '}' : case '!' : case '@' : case '#' : case '*' : case '?' : case '%' : case '|' : case ',' : case ':' : case ';' : case '^' : case '&' : return false ; } } return true ; |
public class TcpConnecter { /** * Internal function to start the actual connection establishment . */
private void startConnecting ( ) { } } | // Open the connecting socket .
try { boolean rc = open ( ) ; // Connect may succeed in synchronous manner .
if ( rc ) { handle = ioObject . addFd ( fd ) ; connectEvent ( ) ; } // Connection establishment may be delayed . Poll for its completion .
else { handle = ioObject . addFd ( fd ) ; ioObject . setPollConnect ( handle ) ; socket . eventConnectDelayed ( addr . toString ( ) , - 1 ) ; } } catch ( RuntimeException | IOException e ) { // Handle any other error condition by eventual reconnect .
if ( fd != null ) { close ( ) ; } addReconnectTimer ( ) ; } |
public class ClustersInner { /** * Configures the gateway settings on the specified cluster .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ param parameters The cluster configurations .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < ServiceResponse < Void > > beginUpdateGatewaySettingsWithServiceResponseAsync ( String resourceGroupName , String clusterName , UpdateGatewaySettingsParameters parameters ) { } } | if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; return service . beginUpdateGatewaySettings ( this . client . subscriptionId ( ) , resourceGroupName , clusterName , this . client . apiVersion ( ) , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = beginUpdateGatewaySettingsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class _Private_IonTextAppender { /** * Print an Ion triple - quoted string
* @ param text
* @ throws IOException */
public final void printLongString ( CharSequence text ) throws IOException { } } | if ( text == null ) { appendAscii ( "null.string" ) ; } else { appendAscii ( TRIPLE_QUOTES ) ; printCodePoints ( text , LONG_STRING_ESCAPE_CODES ) ; appendAscii ( TRIPLE_QUOTES ) ; } |
public class Util { /** * Given a base URL ( like ' http : / / localhost : 8080 ' ) this will append the given context string to it and will return
* the URL with a forward - slash as its last character .
* This returns a StringBuilder so the caller can continue building its desired URL by appending to it additional
* context paths , query strings , and the like .
* @ param baseUrl base URL to append the given context to
* @ param context the context to add to the given base URL
* @ return the base URL with the context appended to it
* @ throws MalformedURLException if URL cannot be built */
public static StringBuilder getContextUrlString ( String baseUrl , String context ) throws MalformedURLException { } } | StringBuilder urlStr = new StringBuilder ( baseUrl ) ; ensureEndsWithSlash ( urlStr ) ; if ( context != null && context . length ( ) > 0 ) { if ( context . startsWith ( "/" ) ) { urlStr . append ( context . substring ( 1 ) ) ; } else { urlStr . append ( context ) ; } ensureEndsWithSlash ( urlStr ) ; } return urlStr ; |
public class RendererFactory { /** * Returns the CSS Bundle renderer
* @ param bundler
* the bundler
* @ param useRandomParam
* the flag indicating if we use the random flag
* @ param media
* the media
* @ param alternate
* the alternate flag
* @ param displayAlternateStyles
* the flag indicating if the alternate styles must be displayed
* @ param title
* the title
* @ return the Css Bundle renderer */
public final static CssBundleLinkRenderer getCssBundleRenderer ( ResourceBundlesHandler bundler , Boolean useRandomParam , String media , boolean alternate , boolean displayAlternateStyles , String title ) { } } | CssBundleLinkRenderer renderer = ( CssBundleLinkRenderer ) ClassLoaderResourceUtils . buildObjectInstance ( bundler . getConfig ( ) . getCssBundleLinkRenderClass ( ) ) ; renderer . init ( bundler , useRandomParam , media , alternate , displayAlternateStyles , title ) ; return renderer ; |
public class MySQLAuthenticationHandler { /** * Login .
* @ param username connection username
* @ param authResponse connection auth response
* @ return login success or failure */
public boolean login ( final String username , final byte [ ] authResponse ) { } } | Optional < ProxyUser > user = getUser ( username ) ; return user . isPresent ( ) && ( Strings . isNullOrEmpty ( user . get ( ) . getPassword ( ) ) || Arrays . equals ( getAuthCipherBytes ( user . get ( ) . getPassword ( ) ) , authResponse ) ) ; |
public class BasicDeviceFactory { /** * Remove a device
* @ param id The device id
* @ throws UnknownDeviceException
* @ throws NullIdException */
public void removeDevice ( String id ) throws UnknownDeviceException , NullIdException { } } | if ( ( id == null ) || ( id . trim ( ) . equals ( "" ) ) ) { throw new NullIdException ( ) ; } if ( this . devices . containsKey ( id ) ) { this . devices . remove ( id ) ; } else { throw new UnknownDeviceException ( ) ; } |
public class GetQualificationScoreRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetQualificationScoreRequest getQualificationScoreRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getQualificationScoreRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getQualificationScoreRequest . getQualificationTypeId ( ) , QUALIFICATIONTYPEID_BINDING ) ; protocolMarshaller . marshall ( getQualificationScoreRequest . getWorkerId ( ) , WORKERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StorageBatch { /** * Adds a request representing the " update blob " operation to this batch . The { @ code options } can
* be used in the same way as for { @ link Storage # update ( BlobInfo , BlobTargetOption . . . ) } . Calling
* { @ link StorageBatchResult # get ( ) } on the return value yields the updated { @ link Blob } if
* successful , or throws a { @ link StorageException } if the operation failed . */
public StorageBatchResult < Blob > update ( BlobInfo blobInfo , BlobTargetOption ... options ) { } } | StorageBatchResult < Blob > result = new StorageBatchResult < > ( ) ; RpcBatch . Callback < StorageObject > callback = createUpdateCallback ( this . options , result ) ; Map < StorageRpc . Option , ? > optionMap = StorageImpl . optionMap ( blobInfo , options ) ; batch . addPatch ( blobInfo . toPb ( ) , callback , optionMap ) ; return result ; |
public class JarUtils { /** * Gets a list of jar files in the classpath ( this includes jars in jars , one level deep ) .
* @ return An array of { @ link URL } s found in the classpath
* @ throws IOException If there is trouble reading the classpath */
public static URL [ ] getJarURLs ( ) throws IOException { } } | final List < URL > urlList = new LinkedList < > ( ) ; for ( final JarFile jarFile : ClasspathUtils . getJarFiles ( ) ) { final Manifest manifest = jarFile . getManifest ( ) ; final URL jarURL = new URL ( JAR_URL_PROTOCOL + jarFile . getName ( ) + "!/" ) ; urlList . add ( jarURL ) ; if ( manifest != null ) { final Attributes attributes = manifest . getMainAttributes ( ) ; if ( attributes != null ) { final String classpath = attributes . getValue ( "Class-Path" ) ; if ( classpath != null ) { final StringTokenizer tokenizer = new StringTokenizer ( classpath ) ; while ( tokenizer . hasMoreTokens ( ) ) { final String jarPath = tokenizer . nextToken ( ) ; if ( jarPath . endsWith ( ".jar" ) ) { urlList . add ( new URL ( jarURL . toExternalForm ( ) + jarPath ) ) ; } } } } } jarFile . close ( ) ; } return urlList . toArray ( new URL [ 0 ] ) ; |
public class CheckUpdateApi { /** * 应用自升级接口
* @ param handler 应用自升级结果回调 */
public void checkUpdate ( Activity activity , CheckUpdateHandler handler ) { } } | HMSAgentLog . i ( "checkUpdate:handler=" + StrUtils . objDesc ( handler ) ) ; this . handler = handler ; this . activity = activity ; connect ( ) ; |
public class SnapshotUtil { /** * Read hashinator snapshots into byte buffers .
* @ param path base snapshot path
* @ param nonce unique snapshot name
* @ param maxConfigs max number of good configs to return ( 0 for all )
* @ param logger log writer
* @ return byte buffers for each host
* @ throws IOException */
public static List < ByteBuffer > retrieveHashinatorConfigs ( String path , String nonce , int maxConfigs , VoltLogger logger ) throws IOException { } } | VoltFile directory = new VoltFile ( path ) ; ArrayList < ByteBuffer > configs = new ArrayList < ByteBuffer > ( ) ; if ( directory . listFiles ( ) == null ) { return configs ; } for ( File file : directory . listFiles ( ) ) { if ( file . getName ( ) . startsWith ( nonce + "-host_" ) && file . getName ( ) . endsWith ( HASH_EXTENSION ) ) { byte [ ] rawData = new byte [ ( int ) file . length ( ) ] ; FileInputStream fis = null ; DataInputStream dis = null ; try { fis = new FileInputStream ( file ) ; dis = new DataInputStream ( fis ) ; dis . readFully ( rawData ) ; configs . add ( ByteBuffer . wrap ( rawData ) ) ; } finally { if ( dis != null ) { dis . close ( ) ; } if ( fis != null ) { fis . close ( ) ; } } } } return configs ; |
public class Model { /** * Registers date format for specified attributes . This format will be used to convert between
* Date - > String - > java . sql . Timestamp when using the appropriate getters and setters .
* < p > For example :
* < blockquote > < pre >
* public class Person extends Model {
* static {
* timestampFormat ( " MM / dd / yyyy hh : mm a " , " birth _ datetime " ) ;
* Person p = new Person ( ) ;
* / / will convert String - > java . sql . Timestamp
* p . setTimestamp ( " birth _ datetime " , " 02/29/2000 12:07 PM " ) ;
* / / will convert Date - > String , if dob value in model is of type Date or java . sql . Timestamp
* String str = p . getString ( " birth _ datetime " ) ;
* / / will convert Date - > String
* p . setString ( " birth _ datetime " , new Date ( ) ) ;
* / / will convert String - > java . sql . Timestamp , if dob value in model is of type String
* Timestamp ts = p . getTimestamp ( " birth _ datetime " ) ;
* < / pre > < / blockquote >
* @ param pattern pattern to use for conversion
* @ param attributeNames attribute names */
protected static void timestampFormat ( String pattern , String ... attributeNames ) { } } | ModelDelegate . timestampFormat ( modelClass ( ) , pattern , attributeNames ) ; |
public class JmxClient { /** * Finds connection lost type notifications .
* @ param connectionNotification
* @ return */
private boolean connectionLost ( JMXConnectionNotification connectionNotification ) { } } | return connectionNotification . getType ( ) . equals ( JMXConnectionNotification . NOTIFS_LOST ) || connectionNotification . getType ( ) . equals ( JMXConnectionNotification . CLOSED ) || connectionNotification . getType ( ) . equals ( JMXConnectionNotification . FAILED ) ; |
public class WireFeedOutput { /** * Writes to an Writer the XML representation for the given WireFeed .
* If the feed encoding is not NULL , it will be used in the XML prolog encoding attribute . It is
* the responsibility of the developer to ensure the Writer instance is using the same charset
* encoding .
* NOTE : This method delages to the ' Document WireFeedOutput # outputJDom ( WireFeed ) ' .
* @ param feed Abstract feed to create XML representation from . The type of the WireFeed must
* match the type given to the FeedOuptut constructor .
* @ param writer Writer to write the XML representation for the given WireFeed .
* @ param prettyPrint pretty - print XML ( true ) oder collapsed
* @ throws IllegalArgumentException thrown if the feed type of the WireFeedOutput and WireFeed
* don ' t match .
* @ throws IOException thrown if there was some problem writing to the Writer .
* @ throws FeedException thrown if the XML representation for the feed could not be created . */
public void output ( final WireFeed feed , final Writer writer , final boolean prettyPrint ) throws IllegalArgumentException , IOException , FeedException { } } | final Document doc = outputJDom ( feed ) ; final String encoding = feed . getEncoding ( ) ; Format format ; if ( prettyPrint ) { format = Format . getPrettyFormat ( ) ; } else { format = Format . getCompactFormat ( ) ; } if ( encoding != null ) { format . setEncoding ( encoding ) ; } final XMLOutputter outputter = new XMLOutputter ( format ) ; outputter . output ( doc , writer ) ; |
public class Bpmn2JsonUnmarshaller { /** * Reconnect the sequence flows and the flow nodes .
* Done after the initial pass so that we have all the target information . */
private void reconnectFlows ( ) { } } | // create the reverse id map :
for ( Entry < Object , List < String > > entry : _outgoingFlows . entrySet ( ) ) { for ( String flowId : entry . getValue ( ) ) { if ( entry . getKey ( ) instanceof SequenceFlow ) { // if it is a sequence flow , we can tell its targets
if ( _idMap . get ( flowId ) instanceof FlowNode ) { ( ( SequenceFlow ) entry . getKey ( ) ) . setTargetRef ( ( FlowNode ) _idMap . get ( flowId ) ) ; } if ( _idMap . get ( flowId ) instanceof Association ) { ( ( Association ) _idMap . get ( flowId ) ) . setTargetRef ( ( SequenceFlow ) entry . getKey ( ) ) ; } } else if ( entry . getKey ( ) instanceof Association ) { ( ( Association ) entry . getKey ( ) ) . setTargetRef ( ( BaseElement ) _idMap . get ( flowId ) ) ; } else { // if it is a node , we can map it to its outgoing sequence flows
if ( _idMap . get ( flowId ) instanceof SequenceFlow ) { ( ( FlowNode ) entry . getKey ( ) ) . getOutgoing ( ) . add ( ( SequenceFlow ) _idMap . get ( flowId ) ) ; } else if ( _idMap . get ( flowId ) instanceof Association ) { ( ( Association ) _idMap . get ( flowId ) ) . setSourceRef ( ( BaseElement ) entry . getKey ( ) ) ; } } } } |
public class DNSResolver { /** * Gets a list of service records for the specified service .
* @ param name The symbolic name of the service .
* @ param failedAddresses list of failed addresses .
* @ param dnssecMode security mode .
* @ return The list of SRV records mapped to the service name . */
public final List < SRVRecord > lookupSRVRecords ( DnsName name , List < HostAddress > failedAddresses , DnssecMode dnssecMode ) { } } | checkIfDnssecRequestedAndSupported ( dnssecMode ) ; return lookupSRVRecords0 ( name , failedAddresses , dnssecMode ) ; |
public class SingletonProvider { /** * Initialize with the default instance */
private static void initializeWithDefaultScope ( ) { } } | try { Class < ? > aClass = Class . forName ( DEFAULT_SCOPE_FACTORY ) ; INSTANCE = ( SingletonProvider ) aClass . newInstance ( ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class TableRef { /** * if returns null the rest type is listItems , otherwise queryItems */
private RestType _tryConstructKey ( TableMetadata tm ) { } } | for ( Filter f : filters ) if ( f . operator == StorageFilter . NOTEQUAL || f . operator == StorageFilter . NOTNULL || f . operator == StorageFilter . NULL || f . operator == StorageFilter . CONTAINS || f . operator == StorageFilter . NOTCONTAINS ) return RestType . LISTITEMS ; // because queryItems do not support notEqual , notNull , null , contains and notContains
if ( tm . getSecondaryKeyName ( ) != null ) { if ( filters . size ( ) == 1 ) { Iterator < Filter > itr = filters . iterator ( ) ; Filter f = itr . next ( ) ; if ( f . itemName . equals ( tm . getPrimaryKeyName ( ) ) && f . operator == StorageFilter . EQUALS ) { this . key = new LinkedHashMap < String , Object > ( ) ; this . key . put ( "primary" , f . value ) ; filters . clear ( ) ; return RestType . QUERYITEMS ; } } else if ( filters . size ( ) == 2 ) { Object tValue = null ; Filter tFilter = null ; for ( Filter f : filters ) { if ( f . itemName . equals ( tm . getPrimaryKeyName ( ) ) && f . operator == StorageFilter . EQUALS ) { tValue = f . value ; } if ( f . itemName . equals ( tm . getSecondaryKeyName ( ) ) ) { tFilter = f ; } } if ( tValue != null && tFilter != null ) { filters . clear ( ) ; filters . add ( tFilter ) ; this . key = new LinkedHashMap < String , Object > ( ) ; this . key . put ( "primary" , tValue ) ; return RestType . QUERYITEMS ; } else { return RestType . LISTITEMS ; } } } else { if ( filters . size ( ) == 1 ) { Iterator < Filter > itr = filters . iterator ( ) ; Filter f = itr . next ( ) ; if ( f . itemName . equals ( tm . getPrimaryKeyName ( ) ) && f . operator == StorageFilter . EQUALS ) { return RestType . GETITEM ; } } } return RestType . LISTITEMS ; |
public class cacheselector { /** * Use this API to update cacheselector . */
public static base_response update ( nitro_service client , cacheselector resource ) throws Exception { } } | cacheselector updateresource = new cacheselector ( ) ; updateresource . selectorname = resource . selectorname ; updateresource . rule = resource . rule ; return updateresource . update_resource ( client ) ; |
public class IndexedSet { /** * A shortcut for < code > new IndexedSet & lt ; T & gt ; ( itemToIndex , indexToItem , indices ) < / code > */
private IndexedSet < T > createFromIndices ( IntSet indx ) { } } | return new IndexedSet < T > ( itemToIndex , indexToItem , indx ) ; |
public class XmlUtil { /** * 通过XPath方式读取XML节点等信息 < br >
* Xpath相关文章 : https : / / www . ibm . com / developerworks / cn / xml / x - javaxpathapi . html
* @ param expression XPath表达式
* @ param source 资源 , 可以是Docunent 、 Node节点等
* @ return 匹配返回类型的值
* @ since 4.0.9 */
public static Node getNodeByXPath ( String expression , Object source ) { } } | return ( Node ) getByXPath ( expression , source , XPathConstants . NODE ) ; |
public class Computer { /** * Updates Job by its XML definition .
* @ since 1.526 */
public void updateByXml ( final InputStream source ) throws IOException , ServletException { } } | checkPermission ( CONFIGURE ) ; Node result = ( Node ) Jenkins . XSTREAM2 . fromXML ( source ) ; Jenkins . getInstance ( ) . getNodesObject ( ) . replaceNode ( this . getNode ( ) , result ) ; |
public class ArrayUtil { /** * returns first index of < code > item < code > in given < code > array < / code > starting
* from < code > fromIndex < / code > ( inclusive )
* @ param array object array , can be null
* @ param item item to be searched , can be null
* @ param fromIndex index ( inclusive ) from which search happens .
* @ return - 1 if array is null , or the item is not found
* otherwize returns first index of item in array */
public static < T , S extends T > int indexOf ( T array [ ] , S item , int fromIndex ) { } } | if ( array == null ) return - 1 ; for ( int i = fromIndex ; i < array . length ; i ++ ) { if ( Util . equals ( array [ i ] , item ) ) return i ; } return - 1 ; |
public class ParserEvaluator { /** * TODO : Move this to a test case ! */
public static void main ( final String [ ] args ) { } } | final String goldParseString = "(TOP (S (NP (NNS Sales) (NNS executives)) (VP (VBD were) (VP (VBG examing) (NP (DT the) (NNS figures)) (PP (IN with) (NP (JJ great) (NN care))) )) (NP (NN yesterday)) (. .) ))" ; final Span [ ] goldConsts = getConstituencySpans ( Parse . parseParse ( goldParseString ) ) ; final String testParseString = "(TOP (S (NP (NNS Sales) (NNS executives)) (VP (VBD were) (VP (VBG examing) (NP (DT the) (NNS figures)) (PP (IN with) (NP (JJ great) (NN care) (NN yesterday))) )) (. .) ))" ; final Span [ ] testConsts = getConstituencySpans ( Parse . parseParse ( testParseString ) ) ; final FMeasure measure = new FMeasure ( ) ; measure . updateScores ( goldConsts , testConsts ) ; // Expected output :
// Precision : 0.42857142857142855
// Recall : 0.375
// F - Measure : 0.3999997
System . out . println ( measure . toString ( ) ) ; |
public class ClassReader { /** * Reads the bytecode of a method and makes the given visitor visit it .
* @ param mv
* the visitor that must visit the method ' s code .
* @ param context
* information about the class being parsed .
* @ param u
* the start offset of the code attribute in the class file . */
private void readCode ( final MethodVisitor mv , final Context context , int u ) { } } | // reads the header
byte [ ] b = this . b ; char [ ] c = context . buffer ; int maxStack = readUnsignedShort ( u ) ; int maxLocals = readUnsignedShort ( u + 2 ) ; int codeLength = readInt ( u + 4 ) ; u += 8 ; // reads the bytecode to find the labels
int codeStart = u ; int codeEnd = u + codeLength ; Label [ ] labels = context . labels = new Label [ codeLength + 2 ] ; createLabel ( codeLength + 1 , labels ) ; while ( u < codeEnd ) { int offset = u - codeStart ; int opcode = b [ u ] & 0xFF ; switch ( ClassWriter . TYPE [ opcode ] ) { case ClassWriter . NOARG_INSN : case ClassWriter . IMPLVAR_INSN : u += 1 ; break ; case ClassWriter . LABEL_INSN : createLabel ( offset + readShort ( u + 1 ) , labels ) ; u += 3 ; break ; case ClassWriter . ASM_LABEL_INSN : createLabel ( offset + readUnsignedShort ( u + 1 ) , labels ) ; u += 3 ; break ; case ClassWriter . LABELW_INSN : case ClassWriter . ASM_LABELW_INSN : createLabel ( offset + readInt ( u + 1 ) , labels ) ; u += 5 ; break ; case ClassWriter . WIDE_INSN : opcode = b [ u + 1 ] & 0xFF ; if ( opcode == Opcodes . IINC ) { u += 6 ; } else { u += 4 ; } break ; case ClassWriter . TABL_INSN : // skips 0 to 3 padding bytes
u = u + 4 - ( offset & 3 ) ; // reads instruction
createLabel ( offset + readInt ( u ) , labels ) ; for ( int i = readInt ( u + 8 ) - readInt ( u + 4 ) + 1 ; i > 0 ; -- i ) { createLabel ( offset + readInt ( u + 12 ) , labels ) ; u += 4 ; } u += 12 ; break ; case ClassWriter . LOOK_INSN : // skips 0 to 3 padding bytes
u = u + 4 - ( offset & 3 ) ; // reads instruction
createLabel ( offset + readInt ( u ) , labels ) ; for ( int i = readInt ( u + 4 ) ; i > 0 ; -- i ) { createLabel ( offset + readInt ( u + 12 ) , labels ) ; u += 8 ; } u += 8 ; break ; case ClassWriter . VAR_INSN : case ClassWriter . SBYTE_INSN : case ClassWriter . LDC_INSN : u += 2 ; break ; case ClassWriter . SHORT_INSN : case ClassWriter . LDCW_INSN : case ClassWriter . FIELDORMETH_INSN : case ClassWriter . TYPE_INSN : case ClassWriter . IINC_INSN : u += 3 ; break ; case ClassWriter . ITFMETH_INSN : case ClassWriter . INDYMETH_INSN : u += 5 ; break ; // case MANA _ INSN :
default : u += 4 ; break ; } } // reads the try catch entries to find the labels , and also visits them
for ( int i = readUnsignedShort ( u ) ; i > 0 ; -- i ) { Label start = createLabel ( readUnsignedShort ( u + 2 ) , labels ) ; Label end = createLabel ( readUnsignedShort ( u + 4 ) , labels ) ; Label handler = createLabel ( readUnsignedShort ( u + 6 ) , labels ) ; String type = readUTF8 ( items [ readUnsignedShort ( u + 8 ) ] , c ) ; mv . visitTryCatchBlock ( start , end , handler , type ) ; u += 8 ; } u += 2 ; // reads the code attributes
int [ ] tanns = null ; // start index of each visible type annotation
int [ ] itanns = null ; // start index of each invisible type annotation
int tann = 0 ; // current index in tanns array
int itann = 0 ; // current index in itanns array
int ntoff = - 1 ; // next visible type annotation code offset
int nitoff = - 1 ; // next invisible type annotation code offset
int varTable = 0 ; int varTypeTable = 0 ; boolean zip = true ; boolean unzip = ( context . flags & EXPAND_FRAMES ) != 0 ; int stackMap = 0 ; int stackMapSize = 0 ; int frameCount = 0 ; Context frame = null ; Attribute attributes = null ; for ( int i = readUnsignedShort ( u ) ; i > 0 ; -- i ) { String attrName = readUTF8 ( u + 2 , c ) ; if ( "LocalVariableTable" . equals ( attrName ) ) { if ( ( context . flags & SKIP_DEBUG ) == 0 ) { varTable = u + 8 ; for ( int j = readUnsignedShort ( u + 8 ) , v = u ; j > 0 ; -- j ) { int label = readUnsignedShort ( v + 10 ) ; createDebugLabel ( label , labels ) ; label += readUnsignedShort ( v + 12 ) ; createDebugLabel ( label , labels ) ; v += 10 ; } } } else if ( "LocalVariableTypeTable" . equals ( attrName ) ) { varTypeTable = u + 8 ; } else if ( "LineNumberTable" . equals ( attrName ) ) { if ( ( context . flags & SKIP_DEBUG ) == 0 ) { for ( int j = readUnsignedShort ( u + 8 ) , v = u ; j > 0 ; -- j ) { int label = readUnsignedShort ( v + 10 ) ; createDebugLabel ( label , labels ) ; Label l = labels [ label ] ; while ( l . line > 0 ) { if ( l . next == null ) { l . next = new Label ( ) ; } l = l . next ; } l . line = readUnsignedShort ( v + 12 ) ; v += 4 ; } } } else if ( "RuntimeVisibleTypeAnnotations" . equals ( attrName ) ) { tanns = readTypeAnnotations ( mv , context , u + 8 , true ) ; ntoff = tanns . length == 0 || readByte ( tanns [ 0 ] ) < 0x43 ? - 1 : readUnsignedShort ( tanns [ 0 ] + 1 ) ; } else if ( "RuntimeInvisibleTypeAnnotations" . equals ( attrName ) ) { itanns = readTypeAnnotations ( mv , context , u + 8 , false ) ; nitoff = itanns . length == 0 || readByte ( itanns [ 0 ] ) < 0x43 ? - 1 : readUnsignedShort ( itanns [ 0 ] + 1 ) ; } else if ( "StackMapTable" . equals ( attrName ) ) { if ( ( context . flags & SKIP_FRAMES ) == 0 ) { stackMap = u + 10 ; stackMapSize = readInt ( u + 4 ) ; frameCount = readUnsignedShort ( u + 8 ) ; } /* * here we do not extract the labels corresponding to the
* attribute content . This would require a full parsing of the
* attribute , which would need to be repeated in the second
* phase ( see below ) . Instead the content of the attribute is
* read one frame at a time ( i . e . after a frame has been
* visited , the next frame is read ) , and the labels it contains
* are also extracted one frame at a time . Thanks to the
* ordering of frames , having only a " one frame lookahead " is
* not a problem , i . e . it is not possible to see an offset
* smaller than the offset of the current insn and for which no
* Label exist . */
/* * This is not true for UNINITIALIZED type offsets . We solve
* this by parsing the stack map table without a full decoding
* ( see below ) . */
} else if ( "StackMap" . equals ( attrName ) ) { if ( ( context . flags & SKIP_FRAMES ) == 0 ) { zip = false ; stackMap = u + 10 ; stackMapSize = readInt ( u + 4 ) ; frameCount = readUnsignedShort ( u + 8 ) ; } /* * IMPORTANT ! here we assume that the frames are ordered , as in
* the StackMapTable attribute , although this is not guaranteed
* by the attribute format . */
} else { for ( int j = 0 ; j < context . attrs . length ; ++ j ) { if ( context . attrs [ j ] . type . equals ( attrName ) ) { Attribute attr = context . attrs [ j ] . read ( this , u + 8 , readInt ( u + 4 ) , c , codeStart - 8 , labels ) ; if ( attr != null ) { attr . next = attributes ; attributes = attr ; } } } } u += 6 + readInt ( u + 4 ) ; } u += 2 ; // generates the first ( implicit ) stack map frame
if ( stackMap != 0 ) { /* * for the first explicit frame the offset is not offset _ delta + 1
* but only offset _ delta ; setting the implicit frame offset to - 1
* allow the use of the " offset _ delta + 1 " rule in all cases */
frame = context ; frame . offset = - 1 ; frame . mode = 0 ; frame . localCount = 0 ; frame . localDiff = 0 ; frame . stackCount = 0 ; frame . local = new Object [ maxLocals ] ; frame . stack = new Object [ maxStack ] ; if ( unzip ) { getImplicitFrame ( context ) ; } /* * Finds labels for UNINITIALIZED frame types . Instead of decoding
* each element of the stack map table , we look for 3 consecutive
* bytes that " look like " an UNINITIALIZED type ( tag 8 , offset
* within code bounds , NEW instruction at this offset ) . We may find
* false positives ( i . e . not real UNINITIALIZED types ) , but this
* should be rare , and the only consequence will be the creation of
* an unneeded label . This is better than creating a label for each
* NEW instruction , and faster than fully decoding the whole stack
* map table . */
for ( int i = stackMap ; i < stackMap + stackMapSize - 2 ; ++ i ) { if ( b [ i ] == 8 ) { // UNINITIALIZED FRAME TYPE
int v = readUnsignedShort ( i + 1 ) ; if ( v >= 0 && v < codeLength ) { if ( ( b [ codeStart + v ] & 0xFF ) == Opcodes . NEW ) { createLabel ( v , labels ) ; } } } } } if ( ( context . flags & EXPAND_ASM_INSNS ) != 0 && ( context . flags & EXPAND_FRAMES ) != 0 ) { // Expanding the ASM pseudo instructions can introduce F _ INSERT
// frames , even if the method does not currently have any frame .
// Also these inserted frames must be computed by simulating the
// effect of the bytecode instructions one by one , starting from the
// first one and the last existing frame ( or the implicit first
// one ) . Finally , due to the way MethodWriter computes this ( with
// the compute = INSERTED _ FRAMES option ) , MethodWriter needs to know
// maxLocals before the first instruction is visited . For all these
// reasons we always visit the implicit first frame in this case
// ( passing only maxLocals - the rest can be and is computed in
// MethodWriter ) .
mv . visitFrame ( Opcodes . F_NEW , maxLocals , null , 0 , null ) ; } // visits the instructions
int opcodeDelta = ( context . flags & EXPAND_ASM_INSNS ) == 0 ? - 33 : 0 ; boolean insertFrame = false ; u = codeStart ; while ( u < codeEnd ) { int offset = u - codeStart ; // visits the label and line number for this offset , if any
Label l = labels [ offset ] ; if ( l != null ) { Label next = l . next ; l . next = null ; mv . visitLabel ( l ) ; if ( ( context . flags & SKIP_DEBUG ) == 0 && l . line > 0 ) { mv . visitLineNumber ( l . line , l ) ; while ( next != null ) { mv . visitLineNumber ( next . line , l ) ; next = next . next ; } } } // visits the frame for this offset , if any
while ( frame != null && ( frame . offset == offset || frame . offset == - 1 ) ) { // if there is a frame for this offset , makes the visitor visit
// it , and reads the next frame if there is one .
if ( frame . offset != - 1 ) { if ( ! zip || unzip ) { mv . visitFrame ( Opcodes . F_NEW , frame . localCount , frame . local , frame . stackCount , frame . stack ) ; } else { mv . visitFrame ( frame . mode , frame . localDiff , frame . local , frame . stackCount , frame . stack ) ; } // if there is already a frame for this offset , there is no
// need to insert a new one .
insertFrame = false ; } if ( frameCount > 0 ) { stackMap = readFrame ( stackMap , zip , unzip , frame ) ; -- frameCount ; } else { frame = null ; } } // inserts a frame for this offset , if requested by setting
// insertFrame to true during the previous iteration . The actual
// frame content will be computed in MethodWriter .
if ( insertFrame ) { mv . visitFrame ( ClassWriter . F_INSERT , 0 , null , 0 , null ) ; insertFrame = false ; } // visits the instruction at this offset
int opcode = b [ u ] & 0xFF ; switch ( ClassWriter . TYPE [ opcode ] ) { case ClassWriter . NOARG_INSN : mv . visitInsn ( opcode ) ; u += 1 ; break ; case ClassWriter . IMPLVAR_INSN : if ( opcode > Opcodes . ISTORE ) { opcode -= 59 ; // ISTORE _ 0
mv . visitVarInsn ( Opcodes . ISTORE + ( opcode >> 2 ) , opcode & 0x3 ) ; } else { opcode -= 26 ; // ILOAD _ 0
mv . visitVarInsn ( Opcodes . ILOAD + ( opcode >> 2 ) , opcode & 0x3 ) ; } u += 1 ; break ; case ClassWriter . LABEL_INSN : mv . visitJumpInsn ( opcode , labels [ offset + readShort ( u + 1 ) ] ) ; u += 3 ; break ; case ClassWriter . LABELW_INSN : mv . visitJumpInsn ( opcode + opcodeDelta , labels [ offset + readInt ( u + 1 ) ] ) ; u += 5 ; break ; case ClassWriter . ASM_LABEL_INSN : { // changes temporary opcodes 202 to 217 ( inclusive ) , 218
// and 219 to IFEQ . . . JSR ( inclusive ) , IFNULL and
// IFNONNULL
opcode = opcode < 218 ? opcode - 49 : opcode - 20 ; Label target = labels [ offset + readUnsignedShort ( u + 1 ) ] ; // replaces GOTO with GOTO _ W , JSR with JSR _ W and IFxxx
// < l > with IFNOTxxx < L > GOTO _ W < l > L : . . . , where IFNOTxxx is
// the " opposite " opcode of IFxxx ( i . e . , IFNE for IFEQ )
// and where < L > designates the instruction just after
// the GOTO _ W .
if ( opcode == Opcodes . GOTO || opcode == Opcodes . JSR ) { mv . visitJumpInsn ( opcode + 33 , target ) ; } else { opcode = opcode <= 166 ? ( ( opcode + 1 ) ^ 1 ) - 1 : opcode ^ 1 ; Label endif = createLabel ( offset + 3 , labels ) ; mv . visitJumpInsn ( opcode , endif ) ; mv . visitJumpInsn ( 200 , target ) ; // GOTO _ W
// endif designates the instruction just after GOTO _ W ,
// and is visited as part of the next instruction . Since
// it is a jump target , we need to insert a frame here .
insertFrame = true ; } u += 3 ; break ; } case ClassWriter . ASM_LABELW_INSN : { // replaces the pseudo GOTO _ W instruction with a real one .
mv . visitJumpInsn ( 200 , labels [ offset + readInt ( u + 1 ) ] ) ; // The instruction just after is a jump target ( because pseudo
// GOTO _ W are used in patterns IFNOTxxx < L > GOTO _ W < l > L : . . . ,
// see MethodWriter ) , so we need to insert a frame here .
insertFrame = true ; u += 5 ; break ; } case ClassWriter . WIDE_INSN : opcode = b [ u + 1 ] & 0xFF ; if ( opcode == Opcodes . IINC ) { mv . visitIincInsn ( readUnsignedShort ( u + 2 ) , readShort ( u + 4 ) ) ; u += 6 ; } else { mv . visitVarInsn ( opcode , readUnsignedShort ( u + 2 ) ) ; u += 4 ; } break ; case ClassWriter . TABL_INSN : { // skips 0 to 3 padding bytes
u = u + 4 - ( offset & 3 ) ; // reads instruction
int label = offset + readInt ( u ) ; int min = readInt ( u + 4 ) ; int max = readInt ( u + 8 ) ; Label [ ] table = new Label [ max - min + 1 ] ; u += 12 ; for ( int i = 0 ; i < table . length ; ++ i ) { table [ i ] = labels [ offset + readInt ( u ) ] ; u += 4 ; } mv . visitTableSwitchInsn ( min , max , labels [ label ] , table ) ; break ; } case ClassWriter . LOOK_INSN : { // skips 0 to 3 padding bytes
u = u + 4 - ( offset & 3 ) ; // reads instruction
int label = offset + readInt ( u ) ; int len = readInt ( u + 4 ) ; int [ ] keys = new int [ len ] ; Label [ ] values = new Label [ len ] ; u += 8 ; for ( int i = 0 ; i < len ; ++ i ) { keys [ i ] = readInt ( u ) ; values [ i ] = labels [ offset + readInt ( u + 4 ) ] ; u += 8 ; } mv . visitLookupSwitchInsn ( labels [ label ] , keys , values ) ; break ; } case ClassWriter . VAR_INSN : mv . visitVarInsn ( opcode , b [ u + 1 ] & 0xFF ) ; u += 2 ; break ; case ClassWriter . SBYTE_INSN : mv . visitIntInsn ( opcode , b [ u + 1 ] ) ; u += 2 ; break ; case ClassWriter . SHORT_INSN : mv . visitIntInsn ( opcode , readShort ( u + 1 ) ) ; u += 3 ; break ; case ClassWriter . LDC_INSN : mv . visitLdcInsn ( readConst ( b [ u + 1 ] & 0xFF , c ) ) ; u += 2 ; break ; case ClassWriter . LDCW_INSN : mv . visitLdcInsn ( readConst ( readUnsignedShort ( u + 1 ) , c ) ) ; u += 3 ; break ; case ClassWriter . FIELDORMETH_INSN : case ClassWriter . ITFMETH_INSN : { int cpIndex = items [ readUnsignedShort ( u + 1 ) ] ; boolean itf = b [ cpIndex - 1 ] == ClassWriter . IMETH ; String iowner = readClass ( cpIndex , c ) ; cpIndex = items [ readUnsignedShort ( cpIndex + 2 ) ] ; String iname = readUTF8 ( cpIndex , c ) ; String idesc = readUTF8 ( cpIndex + 2 , c ) ; if ( opcode < Opcodes . INVOKEVIRTUAL ) { mv . visitFieldInsn ( opcode , iowner , iname , idesc ) ; } else { mv . visitMethodInsn ( opcode , iowner , iname , idesc , itf ) ; } if ( opcode == Opcodes . INVOKEINTERFACE ) { u += 5 ; } else { u += 3 ; } break ; } case ClassWriter . INDYMETH_INSN : { int cpIndex = items [ readUnsignedShort ( u + 1 ) ] ; int bsmIndex = context . bootstrapMethods [ readUnsignedShort ( cpIndex ) ] ; Handle bsm = ( Handle ) readConst ( readUnsignedShort ( bsmIndex ) , c ) ; int bsmArgCount = readUnsignedShort ( bsmIndex + 2 ) ; Object [ ] bsmArgs = new Object [ bsmArgCount ] ; bsmIndex += 4 ; for ( int i = 0 ; i < bsmArgCount ; i ++ ) { bsmArgs [ i ] = readConst ( readUnsignedShort ( bsmIndex ) , c ) ; bsmIndex += 2 ; } cpIndex = items [ readUnsignedShort ( cpIndex + 2 ) ] ; String iname = readUTF8 ( cpIndex , c ) ; String idesc = readUTF8 ( cpIndex + 2 , c ) ; mv . visitInvokeDynamicInsn ( iname , idesc , bsm , bsmArgs ) ; u += 5 ; break ; } case ClassWriter . TYPE_INSN : mv . visitTypeInsn ( opcode , readClass ( u + 1 , c ) ) ; u += 3 ; break ; case ClassWriter . IINC_INSN : mv . visitIincInsn ( b [ u + 1 ] & 0xFF , b [ u + 2 ] ) ; u += 3 ; break ; // case MANA _ INSN :
default : mv . visitMultiANewArrayInsn ( readClass ( u + 1 , c ) , b [ u + 3 ] & 0xFF ) ; u += 4 ; break ; } // visit the instruction annotations , if any
while ( tanns != null && tann < tanns . length && ntoff <= offset ) { if ( ntoff == offset ) { int v = readAnnotationTarget ( context , tanns [ tann ] ) ; readAnnotationValues ( v + 2 , c , true , mv . visitInsnAnnotation ( context . typeRef , context . typePath , readUTF8 ( v , c ) , true ) ) ; } ntoff = ++ tann >= tanns . length || readByte ( tanns [ tann ] ) < 0x43 ? - 1 : readUnsignedShort ( tanns [ tann ] + 1 ) ; } while ( itanns != null && itann < itanns . length && nitoff <= offset ) { if ( nitoff == offset ) { int v = readAnnotationTarget ( context , itanns [ itann ] ) ; readAnnotationValues ( v + 2 , c , true , mv . visitInsnAnnotation ( context . typeRef , context . typePath , readUTF8 ( v , c ) , false ) ) ; } nitoff = ++ itann >= itanns . length || readByte ( itanns [ itann ] ) < 0x43 ? - 1 : readUnsignedShort ( itanns [ itann ] + 1 ) ; } } if ( labels [ codeLength ] != null ) { mv . visitLabel ( labels [ codeLength ] ) ; } // visits the local variable tables
if ( ( context . flags & SKIP_DEBUG ) == 0 && varTable != 0 ) { int [ ] typeTable = null ; if ( varTypeTable != 0 ) { u = varTypeTable + 2 ; typeTable = new int [ readUnsignedShort ( varTypeTable ) * 3 ] ; for ( int i = typeTable . length ; i > 0 ; ) { typeTable [ -- i ] = u + 6 ; // signature
typeTable [ -- i ] = readUnsignedShort ( u + 8 ) ; // index
typeTable [ -- i ] = readUnsignedShort ( u ) ; // start
u += 10 ; } } u = varTable + 2 ; for ( int i = readUnsignedShort ( varTable ) ; i > 0 ; -- i ) { int start = readUnsignedShort ( u ) ; int length = readUnsignedShort ( u + 2 ) ; int index = readUnsignedShort ( u + 8 ) ; String vsignature = null ; if ( typeTable != null ) { for ( int j = 0 ; j < typeTable . length ; j += 3 ) { if ( typeTable [ j ] == start && typeTable [ j + 1 ] == index ) { vsignature = readUTF8 ( typeTable [ j + 2 ] , c ) ; break ; } } } mv . visitLocalVariable ( readUTF8 ( u + 4 , c ) , readUTF8 ( u + 6 , c ) , vsignature , labels [ start ] , labels [ start + length ] , index ) ; u += 10 ; } } // visits the local variables type annotations
if ( tanns != null ) { for ( int i = 0 ; i < tanns . length ; ++ i ) { if ( ( readByte ( tanns [ i ] ) >> 1 ) == ( 0x40 >> 1 ) ) { int v = readAnnotationTarget ( context , tanns [ i ] ) ; v = readAnnotationValues ( v + 2 , c , true , mv . visitLocalVariableAnnotation ( context . typeRef , context . typePath , context . start , context . end , context . index , readUTF8 ( v , c ) , true ) ) ; } } } if ( itanns != null ) { for ( int i = 0 ; i < itanns . length ; ++ i ) { if ( ( readByte ( itanns [ i ] ) >> 1 ) == ( 0x40 >> 1 ) ) { int v = readAnnotationTarget ( context , itanns [ i ] ) ; v = readAnnotationValues ( v + 2 , c , true , mv . visitLocalVariableAnnotation ( context . typeRef , context . typePath , context . start , context . end , context . index , readUTF8 ( v , c ) , false ) ) ; } } } // visits the code attributes
while ( attributes != null ) { Attribute attr = attributes . next ; attributes . next = null ; mv . visitAttribute ( attributes ) ; attributes = attr ; } // visits the max stack and max locals values
mv . visitMaxs ( maxStack , maxLocals ) ; |
public class RuleElementImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case SimpleAntlrPackage . RULE_ELEMENT__GUARD : return getGuard ( ) ; case SimpleAntlrPackage . RULE_ELEMENT__GUARDED : return getGuarded ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class ByteRangeList { /** * convert this object to a String , in the format
* of argument of REST GridFTP command , for instance :
* " 0-29,32-89"
* The resulting String will preserve the features
* of ByteRangeList : ( 1 ) order and ( 2 ) separation */
public String toFtpCmdArgument ( ) { } } | char comma = ',' ; boolean first = true ; StringBuffer result = new StringBuffer ( ) ; for ( int i = 0 ; i < vector . size ( ) ; i ++ ) { if ( first ) { first = false ; } else { result . append ( comma ) ; } result . append ( vector . elementAt ( i ) . toString ( ) ) ; } return result . toString ( ) ; |
public class PuiModelSync { /** * Builds basically a JSON structure from the JSF model .
* @ param model
* the model to be built
* @ param key
* variable name
* @ param value
* variable value
* @ param cacheable
* if true , the value is only sent if it ' s different from the
* value of the same attribute in the previous response */
@ SuppressWarnings ( "unchecked" ) public static void addJSFAttrbituteToAngularModel ( Map < String , Object > model , String key , Object value , boolean cacheable ) { } } | Map < String , Object > sessionMap = FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getSessionMap ( ) ; Map < String , Object > cache = ( Map < String , Object > ) sessionMap . get ( JSF_ATTRIBUTES_SESSION_CACHE ) ; if ( cache . containsKey ( key ) ) { if ( cacheable ) if ( FacesContext . getCurrentInstance ( ) . isPostback ( ) ) { Object previousValue = cache . get ( key ) ; if ( null == value && previousValue == null ) { return ; } if ( null != value && value . equals ( previousValue ) ) { return ; } } cache . remove ( key ) ; } cache . put ( key , value ) ; String [ ] keys = key . split ( "\\." ) ; Map < String , Object > currentMap = model ; for ( int i = 0 ; i < keys . length - 1 ; i ++ ) { if ( ! currentMap . containsKey ( keys [ i ] ) ) { currentMap . put ( keys [ i ] , new HashMap < String , Object > ( ) ) ; } final Object object = currentMap . get ( keys [ i ] ) ; if ( ! ( object instanceof Map ) ) { // the parent object has already been stored
return ; } currentMap = ( Map < String , Object > ) object ; } currentMap . put ( keys [ keys . length - 1 ] , value ) ; |
public class HBaseDataHandler { /** * ( non - Javadoc )
* @ see
* com . impetus . client . hbase . admin . DataHandler # findParentEntityFromJoinTable
* ( com . impetus . kundera . metadata . model . EntityMetadata , java . lang . String ,
* java . lang . String , java . lang . String , java . lang . Object ) */
@ Override public List < Object > findParentEntityFromJoinTable ( EntityMetadata parentMetadata , String joinTableName , String joinColumnName , String inverseJoinColumnName , Object childId ) { } } | throw new PersistenceException ( "Not applicable for HBase" ) ; |
public class LocalConnection { /** * Handles a response error . */
private void handleResponseError ( long requestId , Throwable error ) { } } | ContextualFuture future = futures . remove ( requestId ) ; if ( future != null ) { future . context . execute ( ( ) -> future . completeExceptionally ( error ) ) ; } |
public class ApiOvhCloud { /** * Delete a volume snapshot
* REST : DELETE / cloud / project / { serviceName } / volume / snapshot / { snapshotId }
* @ param serviceName [ required ] Project id
* @ param snapshotId [ required ] Snapshot id */
public void project_serviceName_volume_snapshot_snapshotId_DELETE ( String serviceName , String snapshotId ) throws IOException { } } | String qPath = "/cloud/project/{serviceName}/volume/snapshot/{snapshotId}" ; StringBuilder sb = path ( qPath , serviceName , snapshotId ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ; |
public class Postcard { /** * Inserts a CharSequence value into the mapping of this Bundle , replacing
* any existing value for the given key . Either key or value may be null .
* @ param key a String , or null
* @ param value a CharSequence , or null
* @ return current */
public Postcard withCharSequence ( @ Nullable String key , @ Nullable CharSequence value ) { } } | mBundle . putCharSequence ( key , value ) ; return this ; |
public class CardAPI { /** * 设置扫描二维码领取单张卡券
* @ param accessToken accessToken
* @ param action action
* @ return result */
public static QrCodeCreateResult qrcodeCreate ( String accessToken , QrCodeCreate action ) { } } | return qrcodeCreate ( accessToken , JsonUtil . toJSONString ( action ) ) ; |
public class CmsPermissionDialog { /** * Adds list of entries to layout . < p >
* @ param entries the ace list
* @ param layout layout
* @ param editable boolean
* @ param inheritedRes boolean */
private void addEntryTableToLayout ( List < CmsAccessControlEntry > entries , VerticalLayout layout , boolean editable , boolean inheritedRes ) { } } | final CmsPermissionViewTable table = new CmsPermissionViewTable ( m_cms , entries , editable , inheritedRes , m_parents , this ) ; HorizontalLayout hl = new HorizontalLayout ( ) ; Label label = new Label ( CmsVaadinUtils . getMessageText ( Messages . GUI_PERMISSION_COUNT_1 , new Integer ( table . getContainerDataSource ( ) . size ( ) ) ) ) ; label . addStyleName ( "o-report" ) ; hl . addComponent ( label ) ; TextField tableFilter = new TextField ( ) ; tableFilter . setIcon ( FontOpenCms . FILTER ) ; tableFilter . setInputPrompt ( CmsVaadinUtils . getMessageText ( org . opencms . ui . apps . Messages . GUI_EXPLORER_FILTER_0 ) ) ; tableFilter . addStyleName ( ValoTheme . TEXTFIELD_INLINE_ICON ) ; tableFilter . setWidth ( "200px" ) ; tableFilter . addTextChangeListener ( new TextChangeListener ( ) { private static final long serialVersionUID = 1L ; public void textChange ( TextChangeEvent event ) { table . filterTable ( event . getText ( ) ) ; } } ) ; hl . addComponent ( tableFilter ) ; hl . setWidth ( "100%" ) ; hl . setExpandRatio ( label , 1 ) ; hl . setMargin ( true ) ; hl . setComponentAlignment ( tableFilter , com . vaadin . ui . Alignment . MIDDLE_RIGHT ) ; if ( table . getContainerDataSource ( ) . size ( ) == 0 ) { layout . addComponent ( CmsVaadinUtils . getInfoLayout ( Messages . GUI_PERMISSION_EMPTY_0 ) ) ; } else { layout . addComponent ( hl ) ; layout . addComponent ( table ) ; CmsVaadinUtils . centerWindow ( this ) ; } |
public class PrimaveraPMFileWriter { /** * Attempts to locate the activity type value extracted from an existing P6 schedule .
* If necessary converts to the form which can be used in the PMXML file .
* Returns " Resource Dependent " as the default value .
* @ param task parent task
* @ return activity type */
private String extractAndConvertTaskType ( Task task ) { } } | String activityType = ( String ) task . getCachedValue ( m_activityTypeField ) ; if ( activityType == null ) { activityType = "Resource Dependent" ; } else { if ( ACTIVITY_TYPE_MAP . containsKey ( activityType ) ) { activityType = ACTIVITY_TYPE_MAP . get ( activityType ) ; } } return activityType ; |
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 511:1 : ruleAlternatives returns [ EObject current = null ] : ( this _ Group _ 0 = ruleGroup ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ? ) ; */
public final EObject ruleAlternatives ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_2 = null ; EObject this_Group_0 = null ; EObject lv_groups_3_0 = null ; enterRule ( ) ; try { // InternalSimpleAntlr . g : 514:28 : ( ( this _ Group _ 0 = ruleGroup ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ? ) )
// InternalSimpleAntlr . g : 515:1 : ( this _ Group _ 0 = ruleGroup ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ? )
{ // InternalSimpleAntlr . g : 515:1 : ( this _ Group _ 0 = ruleGroup ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ? )
// InternalSimpleAntlr . g : 516:2 : this _ Group _ 0 = ruleGroup ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ?
{ if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAlternativesAccess ( ) . getGroupParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_15 ) ; this_Group_0 = ruleGroup ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_Group_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalSimpleAntlr . g : 527:1 : ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) + ) ?
int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == 24 ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // InternalSimpleAntlr . g : 527:2 : ( ) ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) +
{ // InternalSimpleAntlr . g : 527:2 : ( )
// InternalSimpleAntlr . g : 528:2:
{ if ( state . backtracking == 0 ) { } if ( state . backtracking == 0 ) { current = forceCreateModelElementAndAdd ( grammarAccess . getAlternativesAccess ( ) . getAlternativesGroupsAction_1_0 ( ) , current ) ; } } // InternalSimpleAntlr . g : 536:2 : ( otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) ) ) +
int cnt8 = 0 ; loop8 : do { int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == 24 ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // InternalSimpleAntlr . g : 536:4 : otherlv _ 2 = ' | ' ( ( lv _ groups _ 3_0 = ruleGroup ) )
{ otherlv_2 = ( Token ) match ( input , 24 , FOLLOW_14 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getAlternativesAccess ( ) . getVerticalLineKeyword_1_1_0 ( ) ) ; } // InternalSimpleAntlr . g : 540:1 : ( ( lv _ groups _ 3_0 = ruleGroup ) )
// InternalSimpleAntlr . g : 541:1 : ( lv _ groups _ 3_0 = ruleGroup )
{ // InternalSimpleAntlr . g : 541:1 : ( lv _ groups _ 3_0 = ruleGroup )
// InternalSimpleAntlr . g : 542:3 : lv _ groups _ 3_0 = ruleGroup
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAlternativesAccess ( ) . getGroupsGroupParserRuleCall_1_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_15 ) ; lv_groups_3_0 = ruleGroup ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getAlternativesRule ( ) ) ; } add ( current , "groups" , lv_groups_3_0 , "org.eclipse.xtext.generator.parser.antlr.debug.SimpleAntlr.Group" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : if ( cnt8 >= 1 ) break loop8 ; if ( state . backtracking > 0 ) { state . failed = true ; return current ; } EarlyExitException eee = new EarlyExitException ( 8 , input ) ; throw eee ; } cnt8 ++ ; } while ( true ) ; } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class PreferenceFragment { /** * Initializes the preference , which allows to change the app ' s theme . */
private void initializeThemePreference ( ) { } } | Preference themePreference = findPreference ( getString ( R . string . theme_preference_key ) ) ; themePreference . setOnPreferenceChangeListener ( createThemeChangeListener ( ) ) ; |
public class DescribeTableRestoreStatusResult { /** * A list of status details for one or more table restore requests .
* @ param tableRestoreStatusDetails
* A list of status details for one or more table restore requests . */
public void setTableRestoreStatusDetails ( java . util . Collection < TableRestoreStatus > tableRestoreStatusDetails ) { } } | if ( tableRestoreStatusDetails == null ) { this . tableRestoreStatusDetails = null ; return ; } this . tableRestoreStatusDetails = new com . amazonaws . internal . SdkInternalList < TableRestoreStatus > ( tableRestoreStatusDetails ) ; |
public class ThresholdLogReporter { /** * Reports the given span , but it doesn ' t have to be a potential slow .
* This method , based on its configuration , will figure out if the given
* span is indeed eligible for being part in the log .
* @ param span the span to report . */
public void report ( final ThresholdLogSpan span ) { } } | if ( isOverThreshold ( span ) ) { if ( ! overThresholdQueue . offer ( span ) ) { LOGGER . debug ( "Could not enqueue span {} for over threshold reporting, discarding." , span ) ; } } |
public class DateFieldPivotValidator { /** * { @ inheritDoc } */
@ Override protected boolean isValid ( ) { } } | // Get the date we are validating .
WDateField dateField = ( WDateField ) this . getInputField ( ) ; Date date = dateField . getDate ( ) ; if ( date == null ) { // No date , so nothing to validate .
return true ; } // Determine the pivot date
Date pivot = null ; if ( variablePivot != null ) { pivot = variablePivot . getDate ( ) ; if ( pivot == null ) { // No pivot value , so default to true .
return true ; } } else if ( fixedPivot != null ) { pivot = fixedPivot ; } // We take a null pivot date as meaning " today "
if ( pivot == null ) { pivot = new Date ( ) ; } // Round the dates to nearest day .
pivot = DateUtilities . roundToDay ( pivot ) ; date = DateUtilities . roundToDay ( date ) ; // Perform the comparison with the pivot
switch ( operator ) { case BEFORE : return date . before ( pivot ) ; case BEFORE_OR_EQUAL : return ! pivot . before ( date ) ; case EQUAL : return date . equals ( pivot ) ; case AFTER_OR_EQUAL : return ! pivot . after ( date ) ; case AFTER : return date . after ( pivot ) ; default : throw new SystemException ( "Unknown operator. [" + operator + "]" ) ; } |
public class Table { /** * VoltDB added method to get a non - catalog - dependent
* representation of this HSQLDB object .
* @ param session The current Session object may be needed to resolve
* some names .
* @ return XML , correctly indented , representing this object .
* @ throws HSQLParseException */
VoltXMLElement voltGetTableXML ( Session session ) throws org . hsqldb_voltpatches . HSQLInterface . HSQLParseException { } } | VoltXMLElement table = new VoltXMLElement ( "table" ) ; // add table metadata
String tableName = getName ( ) . name ; table . attributes . put ( "name" , tableName ) ; // read all the columns
VoltXMLElement columns = new VoltXMLElement ( "columns" ) ; columns . attributes . put ( "name" , "columns" ) ; table . children . add ( columns ) ; int [ ] columnIndices = getColumnMap ( ) ; for ( int i : columnIndices ) { ColumnSchema column = getColumn ( i ) ; VoltXMLElement colChild = column . voltGetColumnXML ( session ) ; colChild . attributes . put ( "index" , Integer . toString ( i ) ) ; columns . children . add ( colChild ) ; assert ( colChild != null ) ; } // read all the constraints
VoltXMLElement constraints = new VoltXMLElement ( "constraints" ) ; constraints . attributes . put ( "name" , "constraints" ) ; Map < String , VoltXMLElement > indexConstraintMap = new HashMap < > ( ) ; for ( Constraint constraint : getConstraints ( ) ) { VoltXMLElement constraintChild = constraint . voltGetConstraintXML ( ) ; constraints . children . add ( constraintChild ) ; if ( constraintChild . attributes . containsKey ( "index" ) ) { indexConstraintMap . put ( constraintChild . attributes . get ( "index" ) , constraintChild ) ; } } // read all the indexes
VoltXMLElement indexes = new VoltXMLElement ( "indexes" ) ; indexes . attributes . put ( "name" , "indexes" ) ; for ( Index index : indexList ) { VoltXMLElement indexChild = index . voltGetIndexXML ( session , tableName , indexConstraintMap ) ; indexes . children . add ( indexChild ) ; assert ( indexChild != null ) ; } // Indexes must come before constraints when converting to the catalog .
table . children . add ( indexes ) ; table . children . add ( constraints ) ; if ( timeToLive != null ) { VoltXMLElement ttl = new VoltXMLElement ( TimeToLiveVoltDB . TTL_NAME ) ; ttl . attributes . put ( "name" , TimeToLiveVoltDB . TTL_NAME ) ; ttl . attributes . put ( "value" , Integer . toString ( timeToLive . ttlValue ) ) ; ttl . attributes . put ( "unit" , timeToLive . ttlUnit ) ; ttl . attributes . put ( "column" , timeToLive . ttlColumn . getNameString ( ) ) ; ttl . attributes . put ( "maxFrequency" , Integer . toString ( timeToLive . maxFrequency ) ) ; ttl . attributes . put ( "batchSize" , Integer . toString ( timeToLive . batchSize ) ) ; ttl . attributes . put ( "migrationTarget" , timeToLive . migrationTarget ) ; table . children . add ( ttl ) ; } assert ( indexConstraintMap . isEmpty ( ) ) ; return table ; |
public class EtcdClient { /** * Executes the given method on the given location using the given request
* data .
* @ param uri
* the location
* @ param method
* the HTTP method
* @ param requestData
* the request data
* @ return the etcd response
* @ throws EtcdException
* in case etcd returned an error */
private < T > T execute ( UriComponentsBuilder uriTemplate , HttpMethod method , MultiValueMap < String , String > requestData , Class < T > responseType ) throws EtcdException { } } | long startTimeMillis = System . currentTimeMillis ( ) ; int retry = - 1 ; ResourceAccessException lastException = null ; do { lastException = null ; URI uri = uriTemplate . buildAndExpand ( locations [ locationIndex ] ) . toUri ( ) ; RequestEntity < MultiValueMap < String , String > > requestEntity = new RequestEntity < > ( requestData , null , method , uri ) ; try { ResponseEntity < T > responseEntity = template . exchange ( requestEntity , responseType ) ; return responseEntity . getBody ( ) ; } catch ( HttpStatusCodeException e ) { EtcdError error = null ; try { error = responseConverter . getObjectMapper ( ) . readValue ( e . getResponseBodyAsByteArray ( ) , EtcdError . class ) ; } catch ( IOException ex ) { error = null ; } throw new EtcdException ( error , "Failed to execute " + requestEntity + "." , e ) ; } catch ( ResourceAccessException e ) { log . debug ( "Failed to execute " + requestEntity + ", retrying if possible." , e ) ; if ( locationIndex == locations . length - 1 ) { locationIndex = 0 ; } else { locationIndex ++ ; } lastException = e ; } } while ( retry <= retryCount && System . currentTimeMillis ( ) - startTimeMillis < retryDuration ) ; if ( lastException != null ) { throw lastException ; } else { return null ; } |
public class Simulator { /** * Add the stats to the reporter , print if completed , and stop the simulator . */
private void reportStats ( PolicyStats stats ) throws IOException { } } | reporter . add ( stats ) ; if ( -- remaining == 0 ) { reporter . print ( ) ; context ( ) . stop ( self ( ) ) ; System . out . println ( "Executed in " + stopwatch ) ; } |
public class AnalyzeSpark { /** * Get a list of unique values from the specified columns .
* For sequence data , use { @ link # getUniqueSequence ( String , Schema , JavaRDD ) }
* @ param columnNames Names of the column to get unique values from
* @ param schema Data schema
* @ param data Data to get unique values from
* @ return List of unique values , for each of the specified columns */
public static Map < String , List < Writable > > getUnique ( List < String > columnNames , Schema schema , JavaRDD < List < Writable > > data ) { } } | Map < String , Set < Writable > > m = data . aggregate ( null , new UniqueAddFunction ( columnNames , schema ) , new UniqueMergeFunction ( ) ) ; Map < String , List < Writable > > out = new HashMap < > ( ) ; for ( String s : m . keySet ( ) ) { out . put ( s , new ArrayList < > ( m . get ( s ) ) ) ; } return out ; |
public class Img { /** * 图像切割 ( 按指定起点坐标和宽高切割 )
* @ param rectangle 矩形对象 , 表示矩形区域的x , y , width , height
* @ return this */
public Img cut ( Rectangle rectangle ) { } } | final BufferedImage srcImage = getValidSrcImg ( ) ; rectangle = fixRectangle ( rectangle , srcImage . getWidth ( ) , srcImage . getHeight ( ) ) ; final ImageFilter cropFilter = new CropImageFilter ( rectangle . x , rectangle . y , rectangle . width , rectangle . height ) ; final Image image = Toolkit . getDefaultToolkit ( ) . createImage ( new FilteredImageSource ( srcImage . getSource ( ) , cropFilter ) ) ; this . targetImage = ImgUtil . toBufferedImage ( image ) ; return this ; |
public class CommonSF424BaseGenerator { /** * This method is to get division name using the OwnedByUnit and traversing through the parent units till the top level
* @ param pdDoc Proposal development document .
* @ return divisionName based on the OwnedByUnit . */
public String getDivisionName ( ProposalDevelopmentDocumentContract pdDoc ) { } } | String divisionName = null ; if ( pdDoc != null && pdDoc . getDevelopmentProposal ( ) . getOwnedByUnit ( ) != null ) { UnitContract ownedByUnit = pdDoc . getDevelopmentProposal ( ) . getOwnedByUnit ( ) ; // traverse through the parent units till the top level unit
while ( ownedByUnit . getParentUnit ( ) != null ) { ownedByUnit = ownedByUnit . getParentUnit ( ) ; } divisionName = ownedByUnit . getUnitName ( ) ; if ( divisionName . length ( ) > DIVISION_NAME_MAX_LENGTH ) { divisionName = divisionName . substring ( 0 , DIVISION_NAME_MAX_LENGTH ) ; } } return divisionName ; |
public class ScreenCapture { /** * Takes the screen capture of the designated area . If no dimensions are specified , it will take
* a screenshot of the full screen by default . */
public void take ( ) throws AWTException { } } | Rectangle area = new Rectangle ( dimensions ) ; Robot robot = new Robot ( ) ; BufferedImage image = robot . createScreenCapture ( area ) ; data = getByteArray ( image ) ; |
public class MatrixDrawable { /** * Determines bounds for the underlying drawable and a matrix that should be applied on it . */
private void configureBounds ( ) { } } | Drawable underlyingDrawable = getCurrent ( ) ; Rect bounds = getBounds ( ) ; int underlyingWidth = mUnderlyingWidth = underlyingDrawable . getIntrinsicWidth ( ) ; int underlyingHeight = mUnderlyingHeight = underlyingDrawable . getIntrinsicHeight ( ) ; // In case underlying drawable doesn ' t have intrinsic dimensions , we cannot set its bounds to
// -1 so we use our bounds and discard specified matrix . In normal case we use drawable ' s
// intrinsic dimensions for its bounds and apply specified matrix to it .
if ( underlyingWidth <= 0 || underlyingHeight <= 0 ) { underlyingDrawable . setBounds ( bounds ) ; mDrawMatrix = null ; } else { underlyingDrawable . setBounds ( 0 , 0 , underlyingWidth , underlyingHeight ) ; mDrawMatrix = mMatrix ; } |
public class OutputStreamWriter { /** * Writes the character { @ code oneChar } to this writer . The lowest two bytes
* of the integer { @ code oneChar } are immediately converted to bytes by the
* character converter and stored in a local buffer . If the buffer gets full
* by converting this character , this writer is flushed .
* @ param oneChar
* the character to write .
* @ throws IOException
* if this writer is closed or another I / O error occurs . */
@ Override public void write ( int oneChar ) throws IOException { } } | synchronized ( lock ) { checkStatus ( ) ; CharBuffer chars = CharBuffer . wrap ( new char [ ] { ( char ) oneChar } ) ; convert ( chars ) ; } |
public class RPC { /** * If running on self , just submit to queues & do locally */
private RPC < V > handleLocal ( ) { } } | assert _dt . getCompleter ( ) == null ; _dt . setCompleter ( new H2O . H2OCallback < DTask > ( ) { @ Override public void callback ( DTask dt ) { synchronized ( RPC . this ) { _done = true ; RPC . this . notifyAll ( ) ; } doAllCompletions ( ) ; } @ Override public boolean onExceptionalCompletion ( Throwable ex , CountedCompleter dt ) { synchronized ( RPC . this ) { // Might be called several times
if ( _done ) return true ; // Filter down to 1st exceptional completion
_dt . setException ( ex ) ; // must be the last set before notify call cause the waiting thread
// can wake up at any moment independently on notify
_done = true ; RPC . this . notifyAll ( ) ; } doAllCompletions ( ) ; return true ; } } ) ; H2O . submitTask ( _dt ) ; return this ; |
public class ModClusterContainer { /** * Try to find a failover node within the same load balancing group .
* @ param entry the resolved virtual host entry
* @ param domain the load balancing domain , if known
* @ param session the actual value of JSESSIONID / jsessionid cookie / parameter
* @ param jvmRoute the original jvmRoute ; in case of multiple routes , the first one
* @ param forceStickySession whether sticky sessions are forced
* @ return the context , { @ code null } if not found */
Context findFailoverNode ( final VirtualHost . HostEntry entry , final String domain , final String session , final String jvmRoute , final boolean forceStickySession ) { } } | // If configured , deterministically choose the failover target by calculating hash of the session ID modulo number of electable nodes
if ( modCluster . isDeterministicFailover ( ) ) { List < String > candidates = new ArrayList < > ( entry . getNodes ( ) . size ( ) ) ; for ( String route : entry . getNodes ( ) ) { Node node = nodes . get ( route ) ; if ( node != null && ! node . isInErrorState ( ) && ! node . isHotStandby ( ) ) { candidates . add ( route ) ; } } // If there are no available regular nodes , all hot standby nodes become candidates
if ( candidates . isEmpty ( ) ) { for ( String route : entry . getNodes ( ) ) { Node node = nodes . get ( route ) ; if ( node != null && ! node . isInErrorState ( ) && node . isHotStandby ( ) ) { candidates . add ( route ) ; } } } if ( candidates . isEmpty ( ) ) { return null ; } String sessionId = session . substring ( 0 , session . indexOf ( '.' ) ) ; int index = ( int ) ( Math . abs ( ( long ) sessionId . hashCode ( ) ) % candidates . size ( ) ) ; Collections . sort ( candidates ) ; String electedRoute = candidates . get ( index ) ; UndertowLogger . ROOT_LOGGER . debugf ( "Using deterministic failover target: %s" , electedRoute ) ; return entry . getContextForNode ( electedRoute ) ; } String failOverDomain = null ; if ( domain == null ) { final Node node = nodes . get ( jvmRoute ) ; if ( node != null ) { failOverDomain = node . getNodeConfig ( ) . getDomain ( ) ; } if ( failOverDomain == null ) { failOverDomain = failoverDomains . get ( jvmRoute ) ; } } else { failOverDomain = domain ; } final Collection < Context > contexts = entry . getContexts ( ) ; if ( failOverDomain != null ) { final Context context = electNode ( contexts , true , failOverDomain ) ; if ( context != null ) { return context ; } } if ( forceStickySession ) { return null ; } else { return electNode ( contexts , false , null ) ; } |
public class DefaultWriteCompletionEventEx { /** * Initializes the instance . */
public void init ( Channel channel , long writtenAmount ) { } } | if ( channel == null ) { throw new NullPointerException ( "channel" ) ; } if ( writtenAmount <= 0 ) { throw new IllegalArgumentException ( "writtenAmount must be a positive integer: " + writtenAmount ) ; } this . channel = channel ; this . writtenAmount = writtenAmount ; |
public class VirtualNetworkTapsInner { /** * Updates an VirtualNetworkTap tags .
* @ param resourceGroupName The name of the resource group .
* @ param tapName The name of the tap .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VirtualNetworkTapInner > updateTagsAsync ( String resourceGroupName , String tapName , final ServiceCallback < VirtualNetworkTapInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( updateTagsWithServiceResponseAsync ( resourceGroupName , tapName ) , serviceCallback ) ; |
public class GitlabAPI { /** * Share a project with a group .
* @ param accessLevel The permissions level to grant the group .
* @ param group The group to share with .
* @ param project The project to be shared .
* @ param expiration Share expiration date in ISO 8601 format : 2016-09-26 or { @ code null } .
* @ throws IOException on gitlab api call error */
public void shareProjectWithGroup ( GitlabAccessLevel accessLevel , String expiration , GitlabGroup group , GitlabProject project ) throws IOException { } } | Query query = new Query ( ) . append ( "group_id" , group . getId ( ) . toString ( ) ) . append ( "group_access" , String . valueOf ( accessLevel . accessValue ) ) . appendIf ( "expires_at" , expiration ) ; String tailUrl = GitlabProject . URL + "/" + project . getId ( ) + "/share" + query . toString ( ) ; dispatch ( ) . to ( tailUrl , Void . class ) ; |
public class Configuration { /** * Register the given javaType for the given table and column
* @ param table table
* @ param column column
* @ param javaType java type */
public void register ( String table , String column , Class < ? > javaType ) { } } | register ( table , column , javaTypeMapping . getType ( javaType ) ) ; |
public class DisClientSysConfig { /** * load config normal */
public synchronized void loadConfig ( String filePath ) throws Exception { } } | if ( isLoaded ) { return ; } String filePathInternal = filename ; if ( filePath != null ) { filePathInternal = filePath ; } DisconfAutowareConfig . autowareConfig ( INSTANCE , filePathInternal ) ; isLoaded = true ; |
public class CoverageDataCore { /** * Set the interpolation algorithm
* @ param algorithm
* algorithm type */
public void setAlgorithm ( CoverageDataAlgorithm algorithm ) { } } | if ( algorithm == null ) { algorithm = CoverageDataAlgorithm . NEAREST_NEIGHBOR ; } this . algorithm = algorithm ; |
public class CurrentFamilyNameTypeBuilder { /** * { @ inheritDoc } */
@ Override public CurrentFamilyNameType buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } } | return new CurrentFamilyNameTypeImpl ( namespaceURI , localName , namespacePrefix ) ; |
public class JoinNode { /** * The default estimates build on the principle of inclusion : The smaller input key domain is included in the larger
* input key domain . We also assume that every key from the larger input has one join partner in the smaller input .
* The result cardinality is hence the larger one . */
@ Override protected void computeOperatorSpecificDefaultEstimates ( DataStatistics statistics ) { } } | long card1 = getFirstPredecessorNode ( ) . getEstimatedNumRecords ( ) ; long card2 = getSecondPredecessorNode ( ) . getEstimatedNumRecords ( ) ; this . estimatedNumRecords = ( card1 < 0 || card2 < 0 ) ? - 1 : Math . max ( card1 , card2 ) ; if ( this . estimatedNumRecords >= 0 ) { float width1 = getFirstPredecessorNode ( ) . getEstimatedAvgWidthPerOutputRecord ( ) ; float width2 = getSecondPredecessorNode ( ) . getEstimatedAvgWidthPerOutputRecord ( ) ; float width = ( width1 <= 0 || width2 <= 0 ) ? - 1 : width1 + width2 ; if ( width > 0 ) { this . estimatedOutputSize = ( long ) ( width * this . estimatedNumRecords ) ; } } |
public class AbstractDsAssignmentStrategy { /** * Closes { @ link Action } s that are no longer necessary without sending a
* hint to the controller .
* @ param targetsIds
* to override { @ link Action } s */
protected List < Long > closeObsoleteUpdateActions ( final Collection < Long > targetsIds ) { } } | // Figure out if there are potential target / action combinations that
// need to be considered for cancellation
final List < JpaAction > activeActions = actionRepository . findByActiveAndTargetIdInAndDistributionSetNotRequiredMigrationStep ( targetsIds ) ; return activeActions . stream ( ) . map ( action -> { action . setStatus ( Status . CANCELED ) ; action . setActive ( false ) ; // document that the status has been retrieved
actionStatusRepository . save ( new JpaActionStatus ( action , Status . CANCELED , System . currentTimeMillis ( ) , RepositoryConstants . SERVER_MESSAGE_PREFIX + "close obsolete action due to new update" ) ) ; actionRepository . save ( action ) ; return action . getTarget ( ) . getId ( ) ; } ) . collect ( Collectors . toList ( ) ) ; |
public class NumericTermsCollector { /** * Collects the terms into a { @ link LongHashSet } . */
public NumericTermsSet collect ( HitStream hitStream ) throws IOException { } } | hitStream . initialize ( ) ; // initialise the stream
int nHits = hitStream . getHits ( ) ; NumericTermsSet terms = this . newTermsSet ( this . expectedTerms != - 1 ? this . expectedTerms : nHits , breaker ) ; try { NumericTermStream reusableTermStream = NumericTermStream . get ( context . searcher ( ) . getIndexReader ( ) , indexFieldData ) ; while ( terms . size ( ) < this . maxTerms && hitStream . hasNext ( ) ) { hitStream . next ( ) ; reusableTermStream = hitStream . getTermStream ( reusableTermStream ) ; while ( terms . size ( ) < this . maxTerms && reusableTermStream . hasNext ( ) ) { terms . add ( reusableTermStream . next ( ) ) ; } } boolean isPruned = hitStream . getTotalHits ( ) > hitStream . getHits ( ) ; isPruned |= this . maxTerms < nHits ; terms . setIsPruned ( isPruned ) ; return terms ; } catch ( Throwable t ) { // If something happens during the term collection , release the terms set and adjust the circuit breaker
terms . release ( ) ; throw t ; } |
public class CaseDefinitionEntity { /** * Updates all modifiable fields from another case definition entity .
* @ param updatingCaseDefinition */
@ Override public void updateModifiableFieldsFromEntity ( CaseDefinitionEntity updatingCaseDefinition ) { } } | if ( this . key . equals ( updatingCaseDefinition . key ) && this . deploymentId . equals ( updatingCaseDefinition . deploymentId ) ) { this . revision = updatingCaseDefinition . revision ; this . historyTimeToLive = updatingCaseDefinition . historyTimeToLive ; } else { LOG . logUpdateUnrelatedCaseDefinitionEntity ( this . key , updatingCaseDefinition . key , this . deploymentId , updatingCaseDefinition . deploymentId ) ; } |
public class QRDecomposition { /** * Least squares solution of A * X = B
* @ param B A Matrix with as many rows as A and any number of columns .
* @ return X that minimizes the two norm of Q * R * X - B .
* @ exception IllegalArgumentException Matrix row dimensions must agree .
* @ exception RuntimeException Matrix is rank deficient . */
public Matrix solve ( Matrix B ) { } } | if ( B . getRowDimension ( ) != m ) { throw new IllegalArgumentException ( "Matrix row dimensions must agree." ) ; } if ( ! this . isFullRank ( ) ) { throw new RuntimeException ( "Matrix is rank deficient." ) ; } // Copy right hand side
int nx = B . getColumnDimension ( ) ; double [ ] [ ] X = B . getArrayCopy ( ) ; // Compute Y = transpose ( Q ) * B
for ( int k = 0 ; k < n ; k ++ ) { for ( int j = 0 ; j < nx ; j ++ ) { double s = 0.0 ; for ( int i = k ; i < m ; i ++ ) { s += QR [ i ] [ k ] * X [ i ] [ j ] ; } s = - s / QR [ k ] [ k ] ; for ( int i = k ; i < m ; i ++ ) { X [ i ] [ j ] += s * QR [ i ] [ k ] ; } } } // Solve R * X = Y ;
for ( int k = n - 1 ; k >= 0 ; k -- ) { for ( int j = 0 ; j < nx ; j ++ ) { X [ k ] [ j ] /= Rdiag [ k ] ; } for ( int i = 0 ; i < k ; i ++ ) { for ( int j = 0 ; j < nx ; j ++ ) { X [ i ] [ j ] -= X [ k ] [ j ] * QR [ i ] [ k ] ; } } } return ( new Matrix ( X , n , nx ) . getMatrix ( 0 , n - 1 , 0 , nx - 1 ) ) ; |
public class Resolver { /** * Resolves the value of the current Promise with the given InetAddress .
* @ param value
* the value of the Promise */
public final void resolve ( InetAddress value ) { } } | future . complete ( new Tree ( ( Tree ) null , null , value ) ) ; |
public class LoginHelper { /** * Creates the appropriate login buffer using username , password ,
* connectionkey and the local address to which the UDP channel is bound .
* @ param localUDPAddress
* < b > optional < / b > If passed in , then this address is passed on
* to jetserver , so that it can associate this address with its
* session .
* @ return Returns the ChannelBuffer representation of username , password ,
* connection key , udp local bind address etc .
* @ throws Exception */
public MessageBuffer < ChannelBuffer > getLoginBuffer ( InetSocketAddress localUDPAddress ) throws Exception { } } | ChannelBuffer loginBuffer ; ChannelBuffer credentials = NettyUtils . writeStrings ( username , password , connectionKey ) ; if ( null != localUDPAddress ) { ChannelBuffer udpAddressBuffer = NettyUtils . writeSocketAddress ( localUDPAddress ) ; loginBuffer = ChannelBuffers . wrappedBuffer ( credentials , udpAddressBuffer ) ; } else { loginBuffer = credentials ; } return new NettyMessageBuffer ( loginBuffer ) ; |
public class CalendarPicker { /** * / * [ deutsch ]
* < p > Erzeugt einen neuen { @ code CalendarPicker } f & uuml ; r den islamischen Kalender
* unter Benutzung von aus dem System abgeleiteten Standardwerten f & uuml ; r die Sprach - und
* L & auml ; dereinstellung und die aktuelle Zonenzeit . < / p >
* < p > Folgender Code w & auml ; hlt die Umalqura - Variante von Saudi - Arabien : < / p >
* < pre >
* CalendarPicker & lt ; HijriCalendar & gt ; picker =
* CalendarPicker . hijriWithSystemDefaults ( ( ) - > HijriCalendar . VARIANT _ UMALQURA ) ;
* < / pre >
* @ param variantSource the variant of the underlying islamic calendar
* @ return CalendarPicker
* @ see Locale # getDefault ( Locale . Category ) Locale . getDefault ( Locale . Category . FORMAT )
* @ see SystemClock # inLocalView ( )
* @ see ZonalClock # now ( CalendarFamily , VariantSource , StartOfDay )
* @ see HijriCalendar # VARIANT _ UMALQURA
* @ see net . time4j . calendar . HijriAlgorithm */
public static CalendarPicker < HijriCalendar > hijriWithSystemDefaults ( VariantSource variantSource ) { } } | return CalendarPicker . hijri ( variantSource , Locale . getDefault ( Locale . Category . FORMAT ) , ( ) -> SystemClock . inLocalView ( ) . now ( HijriCalendar . family ( ) , variantSource , StartOfDay . EVENING ) . toDate ( ) ) ; |
public class RandomVariableDifferentiableAADStochasticNonOptimized { /** * / * ( non - Javadoc )
* @ see net . finmath . stochastic . RandomVariable # getQuantile ( double , net . finmath . stochastic . RandomVariable ) */
@ Override public double getQuantile ( double quantile , RandomVariable probabilities ) { } } | return ( ( RandomVariableDifferentiableAADStochasticNonOptimized ) getValues ( ) ) . getValues ( ) . getQuantile ( quantile , probabilities ) ; |
public class ESConfigFactory { /** * Creates a new entity - based ElasticSearch job configuration object .
* @ param entityClass the class instance of the entity class that will be used to map db objects to Java objects .
* @ param < T > the generic type of the entity object implementing IDeepType .
* @ return a new entity - based ElasticSearch job configuration object . */
public static < T extends IDeepType > ESDeepJobConfig < T > createES ( Class < T > entityClass ) { } } | return new ESDeepJobConfig < > ( entityClass ) ; |
public class TTFSubSetFile { /** * Get the checksum as a long
* @ param start The start value
* @ param size The size of the values to checksum
* @ return The long checksum */
private long getLongCheckSum ( int start , int size ) { } } | // All the tables here are aligned on four byte boundaries
// Add remainder to size if it ' s not a multiple of 4
int remainder = size % 4 ; if ( remainder != 0 ) { size += remainder ; } long sum = 0 ; for ( int i = 0 ; i < size ; i += 4 ) { int l = ( output [ start + i ] << 24 ) ; l += ( output [ start + i + 1 ] << 16 ) ; l += ( output [ start + i + 2 ] << 16 ) ; l += ( output [ start + i + 3 ] << 16 ) ; sum += l ; if ( sum > 0xffffffff ) { sum = sum - 0xffffffff ; } } return sum ; |
public class CpoStatementFactory { /** * DOCUMENT ME !
* @ param cpoClass DOCUMENT ME !
* @ param sql DOCUMENT ME !
* @ param wheres DOCUMENT ME !
* @ param orderBy DOCUMENT ME !
* @ return DOCUMENT ME !
* @ throws org . synchronoss . cpo . CpoException DOCUMENT ME ! */
protected < T > String buildSql ( CpoClass cpoClass , String sql , Collection < CpoWhere > wheres , Collection < CpoOrderBy > orderBy , Collection < CpoNativeFunction > nativeQueries , List < BindAttribute > bindValues ) throws CpoException { } } | StringBuilder sqlText = new StringBuilder ( ) ; sqlText . append ( sql ) ; if ( wheres != null ) { for ( CpoWhere where : wheres ) { BindableWhereBuilder < T > jwb = new BindableWhereBuilder < > ( cpoClass ) ; BindableCpoWhere jcw = ( BindableCpoWhere ) where ; // do the where stuff here when ready
try { jcw . acceptDFVisitor ( jwb ) ; } catch ( Exception e ) { throw new CpoException ( "Unable to build WHERE clause" , e ) ; } if ( sqlText . indexOf ( jcw . getName ( ) ) == - 1 ) { sqlText . append ( " " ) ; sqlText . append ( jwb . getWhereClause ( ) ) ; bindValues . addAll ( jwb . getBindValues ( ) ) ; } else { sqlText = replaceMarker ( sqlText , jcw . getName ( ) , jwb , bindValues ) ; } } } // do the order by stuff now
if ( orderBy != null ) { HashMap < String , StringBuilder > mapOrderBy = new HashMap < > ( ) ; try { for ( CpoOrderBy ob : orderBy ) { StringBuilder sb = mapOrderBy . get ( ob . getMarker ( ) ) ; if ( sb == null ) { sb = new StringBuilder ( " ORDER BY " ) ; mapOrderBy . put ( ob . getMarker ( ) , sb ) ; } else { sb . append ( "," ) ; } sb . append ( ob . toString ( cpoClass ) ) ; } } catch ( CpoException ce ) { throw new CpoException ( "Error Processing OrderBy Attribute<" + ExceptionHelper . getLocalizedMessage ( ce ) + "> not Found. JDBC Expression=<" + sqlText . toString ( ) + ">" ) ; } Set < Entry < String , StringBuilder > > entries = mapOrderBy . entrySet ( ) ; for ( Entry < String , StringBuilder > entry : entries ) { if ( sqlText . indexOf ( entry . getKey ( ) ) == - 1 ) { sqlText . append ( entry . getValue ( ) . toString ( ) ) ; } else { sqlText = replaceMarker ( sqlText , entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; } } } if ( nativeQueries != null ) { for ( CpoNativeFunction cnq : nativeQueries ) { if ( cnq . getMarker ( ) == null || sqlText . indexOf ( cnq . getMarker ( ) ) == - 1 ) { if ( cnq . getExpression ( ) != null && cnq . getExpression ( ) . length ( ) > 0 ) { sqlText . append ( " " ) ; sqlText . append ( cnq . getExpression ( ) ) ; } } else { sqlText = replaceMarker ( sqlText , cnq . getMarker ( ) , cnq . getExpression ( ) ) ; } } } // left for backwards compatibility
sqlText = replaceMarker ( sqlText , WHERE_MARKER , "" ) ; sqlText = replaceMarker ( sqlText , ORDERBY_MARKER , "" ) ; return sqlText . toString ( ) ; |
public class Filters { /** * Return an estimated selectivity for bitmaps given by an iterator .
* @ param bitmaps iterator of bitmaps
* @ param totalNumRows number of rows in the column associated with this bitmap index
* @ return estimated selectivity */
public static double estimateSelectivity ( final Iterator < ImmutableBitmap > bitmaps , final long totalNumRows ) { } } | long numMatchedRows = 0 ; while ( bitmaps . hasNext ( ) ) { final ImmutableBitmap bitmap = bitmaps . next ( ) ; numMatchedRows += bitmap . size ( ) ; } return Math . min ( 1. , ( double ) numMatchedRows / totalNumRows ) ; |
public class ReadableInput { /** * Include Readable at current input . Readable is read as part of
* input . When Readable ends , input continues using current input .
* < p > Included reader is closed at eof
* @ param in
* @ param source
* @ throws IOException */
@ Override public void include ( Readable in , String source ) throws IOException { } } | if ( cursor != end ) { release ( ) ; } if ( includeStack == null ) { includeStack = new ArrayDeque < > ( ) ; } includeStack . push ( includeLevel ) ; includeLevel = new IncludeLevel ( in , source ) ; |
public class PeepholeFoldConstants { /** * http : / / www . ecma - international . org / ecma - 262/6.0 / # sec - abstract - relational - comparison */
private static TernaryValue tryAbstractRelationalComparison ( Node left , Node right , boolean willNegate ) { } } | // First , try to evaluate based on the general type .
ValueType leftValueType = NodeUtil . getKnownValueType ( left ) ; ValueType rightValueType = NodeUtil . getKnownValueType ( right ) ; if ( leftValueType != ValueType . UNDETERMINED && rightValueType != ValueType . UNDETERMINED ) { if ( leftValueType == ValueType . STRING && rightValueType == ValueType . STRING ) { String lv = NodeUtil . getStringValue ( left ) ; String rv = NodeUtil . getStringValue ( right ) ; if ( lv != null && rv != null ) { // In JS , browsers parse \ v differently . So do not compare strings if one contains \ v .
if ( lv . indexOf ( '\u000B' ) != - 1 || rv . indexOf ( '\u000B' ) != - 1 ) { return TernaryValue . UNKNOWN ; } else { return TernaryValue . forBoolean ( lv . compareTo ( rv ) < 0 ) ; } } else if ( left . isTypeOf ( ) && right . isTypeOf ( ) && left . getFirstChild ( ) . isName ( ) && right . getFirstChild ( ) . isName ( ) && left . getFirstChild ( ) . getString ( ) . equals ( right . getFirstChild ( ) . getString ( ) ) ) { // Special case : ` typeof a < typeof a ` is always false .
return TernaryValue . FALSE ; } } } // Then , try to evaluate based on the value of the node . Try comparing as numbers .
Double lv = NodeUtil . getNumberValue ( left ) ; Double rv = NodeUtil . getNumberValue ( right ) ; if ( lv == null || rv == null ) { // Special case : ` x < x ` is always false .
// TODO ( moz ) : If we knew the named value wouldn ' t be NaN , it would be nice to handle
// LE and GE . We should use type information if available here .
if ( ! willNegate && left . isName ( ) && right . isName ( ) ) { if ( left . getString ( ) . equals ( right . getString ( ) ) ) { return TernaryValue . FALSE ; } } return TernaryValue . UNKNOWN ; } if ( Double . isNaN ( lv ) || Double . isNaN ( rv ) ) { return TernaryValue . forBoolean ( willNegate ) ; } else { return TernaryValue . forBoolean ( lv . doubleValue ( ) < rv . doubleValue ( ) ) ; } |
public class RemoveUnusedPolyfills { /** * Checks whether the node is ( or was ) a call to $ jscomp . polyfill . */
private static boolean isPolyfillDefinition ( Node callee ) { } } | // If the callee is just $ jscomp . polyfill then it ' s easy .
if ( callee . matchesQualifiedName ( "$jscomp.polyfill" ) || callee . matchesQualifiedName ( "$jscomp$polyfill" ) ) { return true ; } // It ' s possible that the function has been inlined , so look for
// a four - parameter function with parameters who have the correct
// prefix ( since a disambiguate suffix may have been added ) .
if ( callee . isFunction ( ) ) { Node paramList = callee . getSecondChild ( ) ; Node param = paramList . getFirstChild ( ) ; if ( paramList . hasXChildren ( 4 ) ) { for ( String name : POLYFILL_PARAMETERS ) { if ( ! param . isName ( ) || ! param . getString ( ) . startsWith ( name ) ) { return false ; } param = param . getNext ( ) ; } return true ; } } return false ; |
public class Node { /** * Returns the integer value for the property , or 0 if the property
* is not defined . */
public final int getIntProp ( Prop propType ) { } } | PropListItem item = lookupProperty ( propType ) ; if ( item == null ) { return 0 ; } return item . getIntValue ( ) ; |
public class CmsJspTagNavigation { /** * Sets the selected navigation type . < p >
* This must match one of the elements in { @ link Type } . < p >
* @ param type the navigation type to set */
public void setType ( String type ) { } } | if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( type ) ) { m_type = Type . parse ( type . trim ( ) ) ; } |
public class FeatureTileGen { /** * Print usage for the main method */
private static void printUsage ( ) { } } | FeatureTiles featureTiles = new DefaultFeatureTiles ( null ) ; System . out . println ( ) ; System . out . println ( "USAGE" ) ; System . out . println ( ) ; System . out . println ( "\t[" + ARGUMENT_PREFIX + ARGUMENT_MAX_FEATURES_PER_TILE + " max_features_per_tile] [" + ARGUMENT_PREFIX + ARGUMENT_COMPRESS_FORMAT + " compress_format] [" + ARGUMENT_PREFIX + ARGUMENT_COMPRESS_QUALITY + " compress_quality] [" + ARGUMENT_PREFIX + ARGUMENT_GOOGLE_TILES + "] [" + ARGUMENT_PREFIX + ARGUMENT_BOUNDING_BOX + " minLon,minLat,maxLon,maxLat] [" + ARGUMENT_PREFIX + ARGUMENT_EPSG + " epsg] [" + ARGUMENT_PREFIX + ARGUMENT_TILE_WIDTH + " width] [" + ARGUMENT_PREFIX + ARGUMENT_TILE_HEIGHT + " height] [" + ARGUMENT_PREFIX + ARGUMENT_TILE_SCALE + " scale] [" + ARGUMENT_PREFIX + ARGUMENT_POINT_RADIUS + " radius] [" + ARGUMENT_PREFIX + ARGUMENT_POINT_COLOR + " color] [" + ARGUMENT_PREFIX + ARGUMENT_POINT_ICON + " image_file] [" + ARGUMENT_PREFIX + ARGUMENT_ICON_WIDTH + " width] [" + ARGUMENT_PREFIX + ARGUMENT_ICON_HEIGHT + " height] [" + ARGUMENT_PREFIX + ARGUMENT_POINT_CENTER_ICON + "] [" + ARGUMENT_PREFIX + ARGUMENT_LINE_STROKE_WIDTH + " stroke_width] [" + ARGUMENT_PREFIX + ARGUMENT_LINE_COLOR + " color] [" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_STROKE_WIDTH + " stroke_width] [" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_COLOR + " color] [" + ARGUMENT_PREFIX + ARGUMENT_FILL_POLYGON + "] [" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_FILL_COLOR + " color] [" + ARGUMENT_PREFIX + ARGUMENT_SIMPLIFY_GEOMETRIES + " true|false] [" + ARGUMENT_PREFIX + ARGUMENT_IGNORE_GEOPACKAGE_STYLES + " true|false] feature_geopackage_file feature_table tile_geopackage_file tile_table min_zoom max_zoom" ) ; System . out . println ( ) ; System . out . println ( "DESCRIPTION" ) ; System . out . println ( ) ; System . out . println ( "\tGenerates tiles from a GeoPackage feature table into a tile table" ) ; System . out . println ( ) ; System . out . println ( "ARGUMENTS" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_MAX_FEATURES_PER_TILE + " max_features_per_tile" ) ; System . out . println ( "\t\tMax features to generate into a tile before generating a numbered feature count tile (default is " + DEFAULT_MAX_FEATURES_PER_TILE + ", use -1 for no max)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_COMPRESS_FORMAT + " compress_format" ) ; System . out . println ( "\t\tTile compression image format: png, jpg, jpeg (default is no compression, native format)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_COMPRESS_QUALITY + " compress_quality" ) ; System . out . println ( "\t\tTile compression image quality between 0.0 and 1.0 (not valid for png, default is 1.0)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_GOOGLE_TILES ) ; System . out . println ( "\t\tGenerate tiles in Google tile format (default is GeoPackage format with minimum bounds)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_BOUNDING_BOX + " minLon,minLat,maxLon,maxLat" ) ; System . out . println ( "\t\tOnly tiles overlapping the bounding box are requested (default is the world)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_EPSG + " epsg" ) ; System . out . println ( "\t\tEPSG number of the provided bounding box (default is 4326, WGS 84)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_TILE_WIDTH + " width" ) ; System . out . println ( "\t\tWidth used when creating each tile (default is " + featureTiles . getTileWidth ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_TILE_HEIGHT + " height" ) ; System . out . println ( "\t\tHeight used when creating each tile (default is " + featureTiles . getTileHeight ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_TILE_SCALE + " scale" ) ; System . out . println ( "\t\tScale factor used when creating each tile (default is " + featureTiles . getScale ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POINT_RADIUS + " radius" ) ; System . out . println ( "\t\tFloating point circle radius used when drawing points (default is " + featureTiles . getPointRadius ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POINT_COLOR + " color" ) ; System . out . println ( "\t\tColor used when drawing points formatted as one of: [ name | r,g,b | r,g,b,a ] (default is " + colorString ( featureTiles . getPointColor ( ) ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POINT_ICON + " image_file" ) ; System . out . println ( "\t\tImage file containing image to use when drawing points in place of a drawn circle" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_ICON_WIDTH + " width" ) ; System . out . println ( "\t\tPoint icon display width (default is actual icon width)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_ICON_HEIGHT + " height" ) ; System . out . println ( "\t\tPoint icon display height (default is actual icon height)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POINT_CENTER_ICON ) ; System . out . println ( "\t\tDraw point icons by centering the icon image to the location (default is pinning to bottom center)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_LINE_STROKE_WIDTH + " stroke_width" ) ; System . out . println ( "\t\tFloating point stroke width when drawing lines (default is " + featureTiles . getLineStrokeWidth ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_LINE_COLOR + " color" ) ; System . out . println ( "\t\tColor used when drawing lines formatted as one of: [ name | r,g,b | r,g,b,a ] (default is " + colorString ( featureTiles . getLineColor ( ) ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_STROKE_WIDTH + " stroke_width" ) ; System . out . println ( "\t\tFloating point stroke width when drawing polygons (default is " + featureTiles . getPolygonStrokeWidth ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_COLOR + " color" ) ; System . out . println ( "\t\tColor used when drawing polygons formatted as one of: [ name | r,g,b | r,g,b,a ] (default is " + colorString ( featureTiles . getPolygonColor ( ) ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_FILL_POLYGON ) ; System . out . println ( "\t\tFill polygons with color (default is " + featureTiles . isFillPolygon ( ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_POLYGON_FILL_COLOR + " color" ) ; System . out . println ( "\t\tColor used when filling polygons formatted as one of: [ name | r,g,b | r,g,b,a ] (default is " + colorString ( featureTiles . getPolygonFillColor ( ) ) + ")" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_SIMPLIFY_GEOMETRIES + " true|false" ) ; System . out . println ( "\t\tFlag indicating whether geometries should be simplified with a similar curve with fewer points before drawn (default is true)" ) ; System . out . println ( ) ; System . out . println ( "\t" + ARGUMENT_PREFIX + ARGUMENT_IGNORE_GEOPACKAGE_STYLES + " true|false" ) ; System . out . println ( "\t\tFlag indicating whether styles saved within the GeoPackage should be ignored (default is false)" ) ; System . out . println ( ) ; System . out . println ( "\tfeature_geopackage_file" ) ; System . out . println ( "\t\tpath to the GeoPackage file containing the feature table to generate tiles from" ) ; System . out . println ( ) ; System . out . println ( "\tfeature_table" ) ; System . out . println ( "\t\tfeature table name within the GeoPackage file to generate tiles from" ) ; System . out . println ( ) ; System . out . println ( "\ttile_geopackage_file" ) ; System . out . println ( "\t\tpath to the GeoPackage file to create with tiles, or existing file to update" ) ; System . out . println ( ) ; System . out . println ( "\ttile_table" ) ; System . out . println ( "\t\ttile table name within the GeoPackage file to create or update" ) ; System . out . println ( ) ; System . out . println ( "\tmin_zoom" ) ; System . out . println ( "\t\tMinimum zoom level to request tiles for" ) ; System . out . println ( ) ; System . out . println ( "\tmax_zoom" ) ; System . out . println ( "\t\tMaximum zoom level to request tiles for" ) ; System . out . println ( ) ; |
public class CmsFileUtil { /** * Reads the specified number of bytes from the given input stream , conditionally closes the stream
* and returns the result in an array . < p >
* @ param in the input stream to read the bytes from
* @ param size the number of bytes to read
* @ param closeStream if true the given stream will be closed
* @ return the byte content read from the input stream
* @ throws IOException in case of errors in the underlying java . io methods used */
public static byte [ ] readFully ( InputStream in , int size , boolean closeStream ) throws IOException { } } | // create the byte array to hold the data
byte [ ] bytes = new byte [ size ] ; // read in the bytes
int offset = 0 ; try { int numRead = 0 ; while ( offset < size ) { numRead = in . read ( bytes , offset , size - offset ) ; if ( numRead >= 0 ) { offset += numRead ; } else { break ; } } } finally { // close the input stream
if ( closeStream ) { in . close ( ) ; } } // ensure all the bytes have been read in
if ( offset < bytes . length ) { throw new IOException ( "Could not read requested " + size + " bytes from input stream" ) ; } return bytes ; |
public class Kryo { /** * Returns { @ link # REF } if a reference to a previously read object was read , which is stored in { @ link # readObject } . Returns a
* stack size ( > 0 ) if a reference ID has been put on the stack . */
int readReferenceOrNull ( Input input , Class type , boolean mayBeNull ) { } } | if ( type . isPrimitive ( ) ) type = getWrapperClass ( type ) ; boolean referencesSupported = referenceResolver . useReferences ( type ) ; int id ; if ( mayBeNull ) { id = input . readVarInt ( true ) ; if ( id == NULL ) { if ( TRACE || ( DEBUG && depth == 1 ) ) log ( "Read" , null , input . position ( ) ) ; readObject = null ; return REF ; } if ( ! referencesSupported ) { readReferenceIds . add ( NO_REF ) ; return readReferenceIds . size ; } } else { if ( ! referencesSupported ) { readReferenceIds . add ( NO_REF ) ; return readReferenceIds . size ; } id = input . readVarInt ( true ) ; } if ( id == NOT_NULL ) { if ( TRACE ) trace ( "kryo" , "Read: <not null>" + pos ( input . position ( ) ) ) ; // First time object has been encountered .
id = referenceResolver . nextReadId ( type ) ; if ( TRACE ) trace ( "kryo" , "Read initial reference " + id + ": " + className ( type ) + pos ( input . position ( ) ) ) ; readReferenceIds . add ( id ) ; return readReferenceIds . size ; } // The id is an object reference .
id -= 2 ; // - 2 because 0 and 1 are used for NULL and NOT _ NULL .
readObject = referenceResolver . getReadObject ( type , id ) ; if ( DEBUG ) debug ( "kryo" , "Read reference " + id + ": " + string ( readObject ) + pos ( input . position ( ) ) ) ; return REF ; |
public class DefaultExternalContentManager { /** * Convert the given HTTP < code > Headers < / code > to an array of
* < code > Property < / code > objects . */
private static Property [ ] toPropertyArray ( Header [ ] headers ) { } } | Property [ ] props = new Property [ headers . length ] ; for ( int i = 0 ; i < headers . length ; i ++ ) { props [ i ] = new Property ( ) ; props [ i ] . name = headers [ i ] . getName ( ) ; props [ i ] . value = headers [ i ] . getValue ( ) ; } return props ; |
public class ShellInterpreter { /** * Processes one command and returns . */
private boolean handleCommandImpl ( List < String > arguments ) throws IOException , SQLException , Throwable { } } | String [ ] myargs = new String [ arguments . size ( ) ] ; arguments . toArray ( myargs ) ; return handleCommandImpl ( myargs ) ; |
public class CashbillServiceImp { /** * / * ( non - Javadoc )
* @ see com . popbill . api . CashbillService # getInfos ( java . lang . String , java . lang . String [ ] ) */
@ Override public CashbillInfo [ ] getInfos ( String CorpNum , String [ ] MgtKeyList ) throws PopbillException { } } | if ( MgtKeyList == null || MgtKeyList . length == 0 ) throw new PopbillException ( - 99999999 , "관리번호배열이 입력되지 않았습니다." ) ; String PostData = toJsonString ( MgtKeyList ) ; return httppost ( "/Cashbill/States" , CorpNum , PostData , null , CashbillInfo [ ] . class ) ; |
public class ThreadContextDescriptorImpl { /** * Raises IllegalStateException because the application or application component is unavailable .
* @ param jeeName The metadata identifier , which is the JEE name ( Application / Module / Component name with parts separated by hash signs ) .
* For now , we ' ll parse the string and issue the appropriate message . This may not be appropriate in the future .
* @ param taskName identifier for the task or contextual operation that cannot be performed
* @ throws IllegalStateException indicating that the task cannot run because the application or application component is not available . */
public static void notAvailable ( String jeeName , String taskName ) { } } | String message ; int modSepIndex = jeeName . indexOf ( '#' ) ; if ( modSepIndex == - 1 ) { message = Tr . formatMessage ( tc , "CWWKC1011.app.unavailable" , taskName , jeeName ) ; } else { String application = jeeName . substring ( 0 , modSepIndex ) ; int compSepIndex = jeeName . indexOf ( '#' , modSepIndex + 1 ) ; if ( compSepIndex == - 1 ) { message = Tr . formatMessage ( tc , "CWWKC1012.module.unavailable" , taskName , jeeName . substring ( modSepIndex + 1 ) , application ) ; } else { String module = jeeName . substring ( modSepIndex + 1 , compSepIndex ) ; message = Tr . formatMessage ( tc , "CWWKC1013.component.unavailable" , taskName , jeeName . substring ( compSepIndex + 1 ) , module , application ) ; } } throw new IllegalStateException ( message ) ; |
public class QueueManager { /** * Return true if the given { @ link QueueManager . QueueOperation } can be
* performed by the specified user on the specified job in the given queue .
* An operation is allowed either if the owner of the job is the user
* performing the task , all users are provided access for this
* operation , or if either the user or any of the groups specified is
* provided access .
* If the { @ link QueueManager . QueueOperation } is not job specific then the
* job parameter is ignored .
* @ param queueName Queue on which the operation needs to be performed .
* @ param job The { @ link JobInProgress } on which the operation is being
* performed .
* @ param oper The operation to perform
* @ param ugi The user and groups who wish to perform the operation .
* @ return true if the operation is allowed , false otherwise . */
public synchronized boolean hasAccess ( String queueName , JobInProgress job , QueueOperation oper , UserGroupInformation ugi ) { } } | if ( ! aclsEnabled ) { return true ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "checking access for : " + toFullPropertyName ( queueName , oper . getAclName ( ) ) ) ; } if ( oper . isJobOwnerAllowed ( ) ) { if ( job != null && job . getJobConf ( ) . getUser ( ) . equals ( ugi . getUserName ( ) ) ) { return true ; } } AccessControlList acl = aclsMap . get ( toFullPropertyName ( queueName , oper . getAclName ( ) ) ) ; if ( acl == null ) { return false ; } // Check the ACL list
boolean allowed = acl . allAllowed ( ) ; if ( ! allowed ) { // Check the allowed users list
if ( acl . getUsers ( ) . contains ( ugi . getUserName ( ) ) ) { allowed = true ; } else { // Check the allowed groups list
Set < String > allowedGroups = acl . getGroups ( ) ; for ( String group : ugi . getGroupNames ( ) ) { if ( allowedGroups . contains ( group ) ) { allowed = true ; break ; } } } } return allowed ; |
public class Expect4j { /** * Attempts to detect the provided pattern and executes the provided
* { @ link Closure } if it is detected .
* @ param pattern the pattern to find in the reader stream
* @ param handler the handler to execute if the pattern is found
* @ return the number of times the pattern is found ,
* or an error code
* @ throws MalformedPatternException if the pattern is invalid
* @ throws Exception if a generic error is encountered while
* processing the { @ link Closure } */
public int expect ( String pattern , Closure handler ) throws MalformedPatternException , Exception { } } | logger . trace ( "Searching for '" + pattern + "' in the reader stream and executing Closure " + handler + " if found" ) ; PatternPair match = new GlobMatch ( pattern , handler ) ; List < Match > list = new ArrayList < > ( ) ; list . add ( match ) ; return expect ( list ) ; |
public class Pattern { /** * Splits the given input sequence around matches of this pattern .
* < br >
* The array returned by this method contains each substring of the
* input sequence that is terminated by another subsequence that matches
* this pattern or is terminated by the end of the input sequence . The
* substrings in the array are in the order in which they occur in the
* input . If this pattern does not match any subsequence of the input then
* the resulting array has just one element , namely the input sequence in
* string form .
* < br >
* The < tt > limit < / tt > parameter controls the number of times the
* pattern is applied and therefore affects the length of the resulting
* array . If the limit < i > n < / i > is greater than zero then the pattern
* will be applied at most < i > n < / i > & nbsp ; - & nbsp ; 1 times , the array ' s
* length will be no greater than < i > n < / i > , and the array ' s last entry
* will contain all input beyond the last matched delimiter . If < i > n < / i >
* is non - positive then the pattern will be applied as many times as
* possible and the array can have any length . If < i > n < / i > is zero then
* the pattern will be applied as many times as possible , the array can
* have any length , and trailing empty strings will be discarded .
* < br >
* The input < tt > " boo : and : foo " < / tt > , for example , yields the following
* results with these parameters :
* < blockquote > < table cellpadding = 1 cellspacing = 0
* summary = " Split examples showing regex , limit , and result " >
* < tr > < th > < P align = " left " > < i > Regex & nbsp ; & nbsp ; & nbsp ; & nbsp ; < / i > < / th >
* < th > < P align = " left " > < i > Limit & nbsp ; & nbsp ; & nbsp ; & nbsp ; < / i > < / th >
* < th > < P align = " left " > < i > Result & nbsp ; & nbsp ; & nbsp ; & nbsp ; < / i > < / th > < / tr >
* < tr > < td align = center > : < / td >
* < td align = center > 2 < / td >
* < td > < tt > { " boo " , " and : foo " } < / tt > < / td > < / tr >
* < tr > < td align = center > : < / td >
* < td align = center > 5 < / td >
* < td > < tt > { " boo " , " and " , " foo " } < / tt > < / td > < / tr >
* < tr > < td align = center > : < / td >
* < td align = center > - 2 < / td >
* < td > < tt > { " boo " , " and " , " foo " } < / tt > < / td > < / tr >
* < tr > < td align = center > o < / td >
* < td align = center > 5 < / td >
* < td > < tt > { " b " , " " , " : and : f " , " " , " " } < / tt > < / td > < / tr >
* < tr > < td align = center > o < / td >
* < td align = center > - 2 < / td >
* < td > < tt > { " b " , " " , " : and : f " , " " , " " } < / tt > < / td > < / tr >
* < tr > < td align = center > o < / td >
* < td align = center > 0 < / td >
* < td > < tt > { " b " , " " , " : and : f " } < / tt > < / td > < / tr >
* < / table > < / blockquote >
* @ param input
* The character sequence to be split
* @ param limit
* The result threshold , as described above
* @ return The array of strings computed by splitting the input
* around matches of this pattern */
public String [ ] split ( CharSequence input , int limit ) { } } | int index = 0 ; boolean matchLimited = limit > 0 ; ArrayList < String > matchList = new ArrayList < String > ( ) ; regexodus . Matcher m = new regexodus . Matcher ( internal , input ) ; // Add segments before each match found
while ( m . find ( ) ) { if ( ! matchLimited || matchList . size ( ) < limit - 1 ) { String match = input . subSequence ( index , m . start ( ) ) . toString ( ) ; matchList . add ( match ) ; index = m . end ( ) ; } else if ( matchList . size ( ) == limit - 1 ) { // last one
String match = input . subSequence ( index , input . length ( ) ) . toString ( ) ; matchList . add ( match ) ; index = m . end ( ) ; } } // If no match was found , return this
if ( index == 0 ) return new String [ ] { input . toString ( ) } ; // Add remaining segment
if ( ! matchLimited || matchList . size ( ) < limit ) matchList . add ( input . subSequence ( index , input . length ( ) ) . toString ( ) ) ; // Construct result
int resultSize = matchList . size ( ) ; if ( limit == 0 ) while ( resultSize > 0 && matchList . get ( resultSize - 1 ) . equals ( "" ) ) resultSize -- ; String [ ] result = new String [ resultSize ] ; return matchList . subList ( 0 , resultSize ) . toArray ( result ) ; |
public class ClassObjectTypeConf { /** * This will return the package name - if the package is null , it will
* work it out from the class name ( this is in cases where funky classloading is used ) . */
public static String getPackageName ( Class < ? > clazz , Package pkg ) { } } | String pkgName = "" ; if ( pkg == null ) { int index = clazz . getName ( ) . lastIndexOf ( '.' ) ; if ( index != - 1 ) pkgName = clazz . getName ( ) . substring ( 0 , index ) ; } else { pkgName = pkg . getName ( ) ; } return pkgName ; |
public class PostgresDdlParser { /** * { @ inheritDoc }
* @ see org . modeshape . sequencer . ddl . StandardDdlParser # parseGrantStatement ( org . modeshape . sequencer . ddl . DdlTokenStream ,
* org . modeshape . sequencer . ddl . node . AstNode ) */
@ Override protected AstNode parseGrantStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } } | assert tokens != null ; assert parentNode != null ; assert tokens . matches ( GRANT ) ; markStartOfStatement ( tokens ) ; // NOTE : The first wack at this does not take into account the apparent potential repeating name elements after each type
// declaration . Example :
// GRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }
// [ , . . . ] | ALL [ PRIVILEGES ] }
// ON [ TABLE ] tablename [ , . . . ]
// TO { [ GROUP ] rolename | PUBLIC } [ , . . . ] [ WITH GRANT OPTION ]
// the " ON [ TABLE ] tablename [ , . . . ] " seems to indicate that you can grant privileges on multiple tables at once , which
// is
// different thatn the SQL 92 standard . So this pass ONLY allows one and an parsing error will probably occur if multiple .
// Syntax for tables
// GRANT < privileges > ON < object name >
// TO < grantee > [ { < comma > < grantee > } . . . ]
// [ WITH GRANT OPTION ]
// < object name > : : =
// [ TABLE ] < table name >
// | SEQUENCE < sequence name >
// | DATABASE < db name >
// | FOREIGN DATA WRAPPER < fdw name >
// | FOREIGN SERVER < server name >
// | FUNCTION < function name >
// | LANGUAGE < language name >
// | SCHEMA < schema name >
// | TABLESPACE < tablespace name >
// Syntax for roles
// GRANT roleName [ { , roleName } * ] TO grantees
// privilege - types
// ALL PRIVILEGES | privilege - list
List < AstNode > grantNodes = new ArrayList < AstNode > ( ) ; boolean allPrivileges = false ; List < AstNode > privileges = new ArrayList < AstNode > ( ) ; tokens . consume ( "GRANT" ) ; if ( tokens . canConsume ( "ALL" , "PRIVILEGES" ) ) { allPrivileges = true ; } else { parseGrantPrivileges ( tokens , privileges ) ; } if ( allPrivileges || ! privileges . isEmpty ( ) ) { tokens . consume ( "ON" ) ; if ( tokens . canConsume ( "SCHEMA" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_SCHEMA_STATEMENT ) ; } else if ( tokens . canConsume ( "SEQUENCE" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_SEQUENCE_STATEMENT ) ; } else if ( tokens . canConsume ( "TABLESPACE" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_TABLESPACE_STATEMENT ) ; } else if ( tokens . canConsume ( "DATABASE" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_DATABASE_STATEMENT ) ; } else if ( tokens . canConsume ( "FUNCTION" ) ) { grantNodes = parseFunctionAndParameters ( tokens , parentNode ) ; } else if ( tokens . canConsume ( "LANGUAGE" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_LANGUAGE_STATEMENT ) ; } else if ( tokens . canConsume ( "FOREIGN" , "DATA" , "WRAPPER" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_FOREIGN_DATA_WRAPPER_STATEMENT ) ; } else if ( tokens . canConsume ( "FOREIGN" , "SERVER" ) ) { grantNodes = parseMultipleGrantTargets ( tokens , parentNode , TYPE_GRANT_ON_FOREIGN_SERVER_STATEMENT ) ; } else { tokens . canConsume ( TABLE ) ; // OPTIONAL
String name = parseName ( tokens ) ; AstNode grantNode = nodeFactory ( ) . node ( name , parentNode , TYPE_GRANT_ON_TABLE_STATEMENT ) ; grantNodes . add ( grantNode ) ; while ( tokens . canConsume ( COMMA ) ) { // Assume more names here
name = parseName ( tokens ) ; grantNode = nodeFactory ( ) . node ( name , parentNode , TYPE_GRANT_ON_TABLE_STATEMENT ) ; grantNodes . add ( grantNode ) ; } } } else { // Assume ROLES here
// role [ , . . . ]
AstNode grantNode = nodeFactory ( ) . node ( "roles" , parentNode , TYPE_GRANT_ROLES_STATEMENT ) ; grantNodes . add ( grantNode ) ; do { String role = parseName ( tokens ) ; nodeFactory ( ) . node ( role , grantNode , ROLE ) ; } while ( tokens . canConsume ( COMMA ) ) ; } tokens . consume ( "TO" ) ; List < String > grantees = new ArrayList < String > ( ) ; do { String grantee = parseName ( tokens ) ; grantees . add ( grantee ) ; } while ( tokens . canConsume ( COMMA ) ) ; boolean withGrantOption = false ; if ( tokens . canConsume ( "WITH" , "GRANT" , "OPTION" ) ) { withGrantOption = true ; } // Set all properties and children on Grant Nodes
for ( AstNode grantNode : grantNodes ) { List < AstNode > copyOfPrivileges = copyOfPrivileges ( privileges ) ; // Attach privileges to grant node
for ( AstNode node : copyOfPrivileges ) { node . setParent ( grantNode ) ; } if ( allPrivileges ) { grantNode . setProperty ( ALL_PRIVILEGES , allPrivileges ) ; } for ( String grantee : grantees ) { nodeFactory ( ) . node ( grantee , grantNode , GRANTEE ) ; } if ( withGrantOption ) { AstNode optionNode = nodeFactory ( ) . node ( "withGrant" , grantNode , TYPE_STATEMENT_OPTION ) ; optionNode . setProperty ( VALUE , "WITH GRANT OPTION" ) ; } } AstNode firstGrantNode = grantNodes . get ( 0 ) ; markEndOfStatement ( tokens , firstGrantNode ) ; // Update additional grant nodes with statement info
for ( int i = 1 ; i < grantNodes . size ( ) ; i ++ ) { AstNode grantNode = grantNodes . get ( i ) ; grantNode . setProperty ( DDL_EXPRESSION , firstGrantNode . getProperty ( DDL_EXPRESSION ) ) ; grantNode . setProperty ( DDL_LENGTH , firstGrantNode . getProperty ( DDL_LENGTH ) ) ; grantNode . setProperty ( DDL_START_LINE_NUMBER , firstGrantNode . getProperty ( DDL_START_LINE_NUMBER ) ) ; grantNode . setProperty ( DDL_START_CHAR_INDEX , firstGrantNode . getProperty ( DDL_START_CHAR_INDEX ) ) ; grantNode . setProperty ( DDL_START_COLUMN_NUMBER , firstGrantNode . getProperty ( DDL_START_COLUMN_NUMBER ) ) ; } return grantNodes . get ( 0 ) ; |
public class ProjectsInner { /** * Get projects in a service .
* The project resource is a nested resource representing a stored migration project . This method returns a list of projects owned by a service resource .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ProjectInner & gt ; object */
public Observable < Page < ProjectInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ProjectInner > > , Page < ProjectInner > > ( ) { @ Override public Page < ProjectInner > call ( ServiceResponse < Page < ProjectInner > > response ) { return response . body ( ) ; } } ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.