signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DiscoverHardcodedIPAddressRuleProvider { /** * if this is a maven file , checks to see if " version " tags match the discovered text ; if the discovered text does match something in a version
* tag , it is likely a version , not an IP address
* @ param context
* @ param model
* @ return */
private boolean isMavenVersionTag ( GraphContext context , FileLocationModel model ) { } }
|
if ( isMavenFile ( context , model ) ) { Document doc = ( ( XmlFileModel ) model . getFile ( ) ) . asDocument ( ) ; for ( Element elm : $ ( doc ) . find ( "version" ) ) { String text = StringUtils . trim ( $ ( elm ) . text ( ) ) ; if ( StringUtils . equals ( text , model . getSourceSnippit ( ) ) ) { return true ; } } } return false ;
|
public class Dist { /** * Adds the pipe to the distributor object . */
public void attach ( Pipe pipe ) { } }
|
// If we are in the middle of sending a message , we ' ll add new pipe
// into the list of eligible pipes . Otherwise we add it to the list
// of active pipes .
if ( more ) { pipes . add ( pipe ) ; Collections . swap ( pipes , eligible , pipes . size ( ) - 1 ) ; eligible ++ ; } else { pipes . add ( pipe ) ; Collections . swap ( pipes , active , pipes . size ( ) - 1 ) ; active ++ ; eligible ++ ; }
|
public class SocketIOWithTimeout { /** * The contract is similar to { @ link SocketChannel # connect ( SocketAddress ) }
* with a timeout .
* @ see SocketChannel # connect ( SocketAddress )
* @ param channel - this should be a { @ link SelectableChannel }
* @ param endpoint
* @ throws IOException */
static void connect ( SocketChannel channel , SocketAddress endpoint , int timeout ) throws IOException { } }
|
boolean blockingOn = channel . isBlocking ( ) ; if ( blockingOn ) { channel . configureBlocking ( false ) ; } try { if ( channel . connect ( endpoint ) ) { return ; } long timeoutLeft = timeout ; long endTime = ( timeout > 0 ) ? ( System . currentTimeMillis ( ) + timeout ) : 0 ; while ( true ) { // we might have to call finishConnect ( ) more than once
// for some channels ( with user level protocols )
int ret = selector . select ( ( SelectableChannel ) channel , SelectionKey . OP_CONNECT , timeoutLeft ) ; if ( ret > 0 && channel . finishConnect ( ) ) { return ; } if ( ret == 0 || ( timeout > 0 && ( timeoutLeft = ( endTime - System . currentTimeMillis ( ) ) ) <= 0 ) ) { throw new SocketTimeoutException ( timeoutExceptionString ( channel , timeout , SelectionKey . OP_CONNECT ) ) ; } } } catch ( IOException e ) { // javadoc for SocketChannel . connect ( ) says channel should be closed .
try { channel . close ( ) ; } catch ( IOException ignored ) { } throw e ; } finally { if ( blockingOn && channel . isOpen ( ) ) { channel . configureBlocking ( true ) ; } }
|
public class RubyIO { /** * Closes this IO . */
public void close ( ) { } }
|
try { raFile . close ( ) ; } catch ( IOException e ) { logger . log ( Level . SEVERE , null , e ) ; throw new RuntimeException ( e ) ; }
|
public class TextUtil { /** * Remove the accents inside the specified string .
* @ param text is the string into which the accents must be removed .
* @ return the given string without the accents */
@ Pure public static String removeAccents ( String text ) { } }
|
final Map < Character , String > map = getAccentTranslationTable ( ) ; if ( ( map == null ) || ( map . isEmpty ( ) ) ) { return text ; } return removeAccents ( text , map ) ;
|
public class AbstractWComponent { /** * { @ inheritDoc } */
@ Override public void showErrorIndicators ( final List < Diagnostic > diags ) { } }
|
// Don ' t show indicators if it ' s invisible .
if ( isVisible ( ) ) { // Show indicators for this component .
showErrorIndicatorsForComponent ( diags ) ; // Show indicators for its children .
List < WComponent > children = getComponentModel ( ) . getChildren ( ) ; if ( children != null ) { final int size = children . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { children . get ( i ) . showErrorIndicators ( diags ) ; } } }
|
public class HttpHealthCheckedEndpointGroup { /** * Creates a new { @ link HttpHealthCheckedEndpointGroup } instance .
* @ deprecated Use { @ link HttpHealthCheckedEndpointGroupBuilder } . */
@ Deprecated public static HttpHealthCheckedEndpointGroup of ( EndpointGroup delegate , String healthCheckPath , Duration healthCheckRetryInterval ) { } }
|
return of ( ClientFactory . DEFAULT , delegate , healthCheckPath , healthCheckRetryInterval ) ;
|
public class AbstractWebInteraction { /** * Gets the first element .
* @ param elems the elems
* @ return the first element */
protected WebElement getFirstElement ( Elements elems ) { } }
|
DocumentWebElement documentWebElement = Iterables . getFirst ( elems . as ( InternalWebElements . class ) . wrappedNativeElements ( ) , null ) ; return documentWebElement == null ? null : documentWebElement . getWrappedWebElement ( ) ;
|
public class Internal { /** * Sets the time in a raw data table row key
* @ param row The row to modify
* @ param base _ time The base time to store
* @ since 2.3 */
public static void setBaseTime ( final byte [ ] row , int base_time ) { } }
|
Bytes . setInt ( row , base_time , Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) ) ;
|
public class InternalPureXbaseParser { /** * InternalPureXbase . g : 1213:1 : ruleOpAnd returns [ AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ] : kw = ' & & ' ; */
public final AntlrDatatypeRuleToken ruleOpAnd ( ) throws RecognitionException { } }
|
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ; Token kw = null ; enterRule ( ) ; try { // InternalPureXbase . g : 1219:2 : ( kw = ' & & ' )
// InternalPureXbase . g : 1220:2 : kw = ' & & '
{ kw = ( Token ) match ( input , 32 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( kw ) ; newLeafNode ( kw , grammarAccess . getOpAndAccess ( ) . getAmpersandAmpersandKeyword ( ) ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class RaftService { /** * this method is idempotent */
@ Override public ICompletableFuture < Void > forceDestroyCPGroup ( String groupName ) { } }
|
return invocationManager . invoke ( getMetadataGroupId ( ) , new ForceDestroyRaftGroupOp ( groupName ) ) ;
|
public class SqlClosureElf { /** * Counts the number of rows for the given query .
* @ param clazz the class of the object to query .
* @ param clause The conditional part of a SQL where clause .
* @ param args The query parameters used to find the list of objects .
* @ param < T > the type of object to query .
* @ return The result count . */
public static < T > int countObjectsFromClause ( Class < T > clazz , String clause , Object ... args ) { } }
|
return SqlClosure . sqlExecute ( c -> OrmElf . countObjectsFromClause ( c , clazz , clause , args ) ) ;
|
public class CmsEntity { /** * Sets the given attribute value at the given index . < p >
* @ param attributeName the attribute name
* @ param value the attribute value
* @ param index the value index */
public void setAttributeValue ( String attributeName , CmsEntity value , int index ) { } }
|
if ( m_simpleAttributes . containsKey ( attributeName ) ) { throw new RuntimeException ( "Attribute already exists with a simple type value." ) ; } if ( ! m_entityAttributes . containsKey ( attributeName ) ) { if ( index != 0 ) { throw new IndexOutOfBoundsException ( ) ; } else { addAttributeValue ( attributeName , value ) ; } } else { if ( m_entityAttributes . get ( attributeName ) . size ( ) > index ) { CmsEntity child = m_entityAttributes . get ( attributeName ) . remove ( index ) ; removeChildChangeHandler ( child ) ; } m_entityAttributes . get ( attributeName ) . add ( index , value ) ; fireChange ( ) ; }
|
public class AbstractBigtableAdmin { /** * { @ inheritDoc } */
@ Override public TableName [ ] listTableNamesByNamespace ( String name ) throws IOException { } }
|
if ( provideWarningsForNamespaces ( ) ) { LOG . warn ( "listTableNamesByNamespace is a no-op" ) ; return new TableName [ 0 ] ; } else { throw new UnsupportedOperationException ( "listTableNamesByNamespace" ) ; // TODO
}
|
public class ApiOvhHostingprivateDatabase { /** * Get the availables versions for this private database
* REST : GET / hosting / privateDatabase / { serviceName } / availableVersions
* @ param serviceName [ required ] The internal name of your private database */
public ArrayList < OvhAvailableVersionEnum > serviceName_availableVersions_GET ( String serviceName ) throws IOException { } }
|
String qPath = "/hosting/privateDatabase/{serviceName}/availableVersions" ; StringBuilder sb = path ( qPath , serviceName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t4 ) ;
|
public class DefaultMailMessageParser { /** * Returns the target address from the provided mail object .
* @ param mailMessage
* The mail message with the fax data
* @ return The target address
* @ throws MessagingException
* Any exception while handling the mail message */
protected String getTargetAddress ( Message mailMessage ) throws MessagingException { } }
|
// by default the target address is taken from the mail subject which
// is expected to be in the format of : fax : < number >
String subject = mailMessage . getSubject ( ) ; String targetAddress = null ; if ( ( subject != null ) && ( subject . startsWith ( "fax:" ) ) && ( subject . length ( ) > 4 ) ) { targetAddress = subject . substring ( 4 ) ; } return targetAddress ;
|
public class Config { /** * Checks if the specified key is defined in this Config file .
* @ param key The key of the property to be checked .
* @ return { @ code true } if a property with the specified key is found ,
* { @ code false } otherwise . */
public boolean contains ( String key ) { } }
|
return onlineProps . getProperty ( key ) != null || offlineProps . getProperty ( key ) != null ;
|
public class SofaConfigs { /** * 获取配置值
* @ param appName 应用名
* @ param key 配置项
* @ param defaultValue 默认值
* @ return 配置 */
public static String getStringValue ( String appName , String key , String defaultValue ) { } }
|
String ret = getStringValue0 ( appName , key ) ; return StringUtils . isEmpty ( ret ) ? defaultValue : ret . trim ( ) ;
|
public class AppenderFile { /** * Merged content of internal log files .
* @ param mergedFile Instance of a file to merge logs into . */
private void mergeFiles ( @ NonNull File mergedFile ) throws IOException { } }
|
Context context = appContextRef . get ( ) ; if ( context != null ) { FileWriter fw ; BufferedWriter bw ; fw = new FileWriter ( mergedFile , true ) ; bw = new BufferedWriter ( fw ) ; File dir = context . getFilesDir ( ) ; for ( int i = 1 ; i <= maxFiles ; i ++ ) { File file = new File ( dir , name ( i ) ) ; if ( file . exists ( ) ) { FileInputStream fis ; try { fis = new FileInputStream ( file ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( fis ) ) ; // noinspection TryFinallyCanBeTryWithResources
try { String aLine ; while ( ( aLine = br . readLine ( ) ) != null ) { bw . write ( aLine ) ; bw . newLine ( ) ; } } finally { br . close ( ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } } bw . close ( ) ; }
|
public class Builder { /** * adds an attribute to the UNIQUE constraint
* @ param attribute
* @ return */
public Builder add ( Attribute attribute ) { } }
|
if ( relation != attribute . getRelation ( ) ) throw new IllegalArgumentException ( "Unique Key requires the same table in all attributes: " + relation + " " + attribute ) ; builder . add ( attribute ) ; return this ;
|
public class ViaCEPClient { /** * Executa a consulta de endereços a partir da UF , localidade e logradouro
* @ param uf Unidade Federativa . Precisa ter 2 caracteres .
* @ param localidade Localidade ( p . e . município ) . Precisa ter ao menos 3 caracteres .
* @ param logradouro Logradouro ( p . e . rua , avenida , estrada ) . Precisa ter ao menos 3 caracteres .
* @ return Os endereços encontrado para os dados enviados , nunca < code > null < / code > . Caso não sejam encontrados endereços , uma lista vazia é retornada .
* @ throws IOException em casos de erro de conexão .
* @ throws IllegalArgumentException para localidades e logradouros com tamanho menor do que 3 caracteres . */
public List < ViaCEPEndereco > getEnderecos ( String uf , String localidade , String logradouro ) throws IOException { } }
|
if ( uf == null || uf . length ( ) != 2 ) { throw new IllegalArgumentException ( "UF inválida - deve conter 2 caracteres: " + uf ) ; } if ( localidade == null || localidade . length ( ) < 3 ) { throw new IllegalArgumentException ( "Localidade inválida - deve conter pelo menos 3 caracteres: " + localidade ) ; } if ( logradouro == null || logradouro . length ( ) < 3 ) { throw new IllegalArgumentException ( "Logradouro inválido - deve conter pelo menos 3 caracteres: " + logradouro ) ; } String urlString = getHost ( ) + uf + "/" + localidade + "/" + logradouro + "/json/" ; URL url = new URL ( urlString ) ; HttpURLConnection urlConnection = ( HttpURLConnection ) url . openConnection ( ) ; try { InputStream in = new BufferedInputStream ( urlConnection . getInputStream ( ) ) ; List < ViaCEPEndereco > obj = getService ( ) . listOfFrom ( ViaCEPEndereco . class , in ) ; return obj ; } finally { urlConnection . disconnect ( ) ; }
|
public class ConfigurationBuilder { /** * Build configuration
* @ return result configuration */
@ NotNull @ ObjectiveCName ( "build" ) public Configuration build ( ) { } }
|
if ( endpoints . size ( ) == 0 ) { throw new RuntimeException ( "Endpoints not set" ) ; } if ( phoneBookProvider == null ) { throw new RuntimeException ( "Phonebook Provider not set" ) ; } if ( apiConfiguration == null ) { throw new RuntimeException ( "Api Configuration not set" ) ; } if ( deviceCategory == null ) { throw new RuntimeException ( "Device Category not set" ) ; } if ( platformType == null ) { throw new RuntimeException ( "App Category not set" ) ; } if ( trustedKeys . size ( ) == 0 ) { Log . w ( "ConfigurationBuilder" , "No Trusted keys set. Using anonymous server authentication." ) ; } return new Configuration ( endpoints . toArray ( new ConnectionEndpoint [ endpoints . size ( ) ] ) , phoneBookProvider , notificationProvider , apiConfiguration , enableContactsLogging , enableNetworkLogging , enableFilesLogging , deviceCategory , platformType , minDelay , maxDelay , maxFailureCount , timeZone , preferredLanguages . toArray ( new String [ preferredLanguages . size ( ) ] ) , customAppName , trustedKeys . toArray ( new TrustedKey [ trustedKeys . size ( ) ] ) , isPhoneBookImportEnabled , isOnClientPrivacyEnabled , callsProvider , rawUpdatesHandler , voiceCallsEnabled , videoCallsEnabled , isEnabledGroupedChatList , autoJoinGroups . toArray ( new String [ autoJoinGroups . size ( ) ] ) , autoJoinType ) ;
|
public class SocketBindingJBossASClient { /** * Sets the port number for the named socket binding found in the named socket binding group .
* If sysPropName is null , this simply sets the port number explicitly to the given port number .
* If sysPropName is not null , this sets the port to the expression " $ { sysPropName : port } " .
* @ param socketBindingGroupName the name of the socket binding group that has the named socket binding
* @ param socketBindingName the name of the socket binding whose port is to be set
* @ param sysPropName the name of the system property whose value is to be the port number
* @ param port the default port number if the sysPropName is not defined
* @ throws Exception any error */
public void setSocketBindingPortExpression ( String socketBindingGroupName , String socketBindingName , String sysPropName , int port ) throws Exception { } }
|
String portValue ; if ( sysPropName != null ) { portValue = "${" + sysPropName + ":" + port + "}" ; } else { portValue = String . valueOf ( port ) ; } Address addr = Address . root ( ) . add ( SOCKET_BINDING_GROUP , socketBindingGroupName , SOCKET_BINDING , socketBindingName ) ; ModelNode request = createWriteAttributeRequest ( PORT , portValue , addr ) ; ModelNode results = execute ( request ) ; if ( ! isSuccess ( results ) ) { throw new FailureException ( results ) ; } return ; // everything is OK
|
public class CalendarThinTableModel { /** * Get this item .
* Note : There is no guarantee of the mode of this record , if you want to change it ,
* remember to call makeRowCurrent ( i , true ) . */
public CalendarItem getItem ( int i ) { } }
|
FieldList item = this . makeRowCurrent ( i , false ) ; int iRowCount = this . getRowCount ( ) ; if ( this . isAppending ( ) ) iRowCount -- ; if ( i >= iRowCount ) return null ; if ( item instanceof CalendarItem ) return ( CalendarItem ) item ; else if ( item != null ) return this . getFieldListProxy ( item ) ; return null ;
|
public class SmilesParser { /** * Handle fragment grouping of a reaction that specifies certain disconnected components
* are actually considered a single molecule . Normally used for salts , [ Na + ] . [ OH - ] .
* @ param rxn reaction
* @ param cxstate state */
private void handleFragmentGrouping ( IReaction rxn , CxSmilesState cxstate ) { } }
|
// repartition / merge fragments
if ( cxstate . fragGroups != null ) { final int reactant = 1 ; final int agent = 2 ; final int product = 3 ; // note we don ' t use a list for fragmap as the indexes need to stay consistent
Map < Integer , IAtomContainer > fragMap = new LinkedHashMap < > ( ) ; Map < IAtomContainer , Integer > roleMap = new HashMap < > ( ) ; for ( IAtomContainer mol : rxn . getReactants ( ) . atomContainers ( ) ) { fragMap . put ( fragMap . size ( ) , mol ) ; roleMap . put ( mol , reactant ) ; } for ( IAtomContainer mol : rxn . getAgents ( ) . atomContainers ( ) ) { fragMap . put ( fragMap . size ( ) , mol ) ; roleMap . put ( mol , agent ) ; } for ( IAtomContainer mol : rxn . getProducts ( ) . atomContainers ( ) ) { fragMap . put ( fragMap . size ( ) , mol ) ; roleMap . put ( mol , product ) ; } // check validity of group
boolean invalid = false ; Set < Integer > visit = new HashSet < > ( ) ; for ( List < Integer > grouping : cxstate . fragGroups ) { IAtomContainer dest = fragMap . get ( grouping . get ( 0 ) ) ; if ( dest == null ) continue ; if ( ! visit . add ( grouping . get ( 0 ) ) ) invalid = true ; for ( int i = 1 ; i < grouping . size ( ) ; i ++ ) { if ( ! visit . add ( grouping . get ( i ) ) ) invalid = true ; IAtomContainer src = fragMap . get ( grouping . get ( i ) ) ; if ( src != null ) { dest . add ( src ) ; roleMap . put ( src , 0 ) ; // no - role
} } } if ( ! invalid ) { rxn . getReactants ( ) . removeAllAtomContainers ( ) ; rxn . getAgents ( ) . removeAllAtomContainers ( ) ; rxn . getProducts ( ) . removeAllAtomContainers ( ) ; for ( IAtomContainer mol : fragMap . values ( ) ) { switch ( roleMap . get ( mol ) ) { case reactant : rxn . getReactants ( ) . addAtomContainer ( mol ) ; break ; case product : rxn . getProducts ( ) . addAtomContainer ( mol ) ; break ; case agent : rxn . getAgents ( ) . addAtomContainer ( mol ) ; break ; } } } }
|
public class WriteToBigQuery { /** * Utility to construct an output table reference . */
static TableReference getTable ( String projectId , String datasetId , String tableName ) { } }
|
TableReference table = new TableReference ( ) ; table . setDatasetId ( datasetId ) ; table . setProjectId ( projectId ) ; table . setTableId ( tableName ) ; return table ;
|
public class DataObject { /** * Returns The first , in property order , property which is mandatory and is
* not set . If all mandatory properties have values , returns null .
* @ return */
public String firstMandatoryNullProperty ( ) { } }
|
for ( String property : model . propertyList ) { if ( model . mandatorySet . contains ( property ) && model . defaultMap . get ( property ) == null && data . get ( property ) == null ) { return property ; } } return firstMandatoryNullProperty ( model . propertyList ) ;
|
public class HttpHeaderMap { /** * Add the passed header as a number .
* @ param sName
* Header name . May neither be < code > null < / code > nor empty .
* @ param nValue
* The value to be set . May not be < code > null < / code > . */
public void addLongHeader ( @ Nonnull @ Nonempty final String sName , final long nValue ) { } }
|
_addHeader ( sName , Long . toString ( nValue ) ) ;
|
public class BatchDetachPolicyMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BatchDetachPolicy batchDetachPolicy , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( batchDetachPolicy == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchDetachPolicy . getPolicyReference ( ) , POLICYREFERENCE_BINDING ) ; protocolMarshaller . marshall ( batchDetachPolicy . getObjectReference ( ) , OBJECTREFERENCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class PrimaveraXERFileReader { /** * Reads the XER file table and row structure ready for processing .
* @ param is input stream
* @ throws MPXJException */
private void processFile ( InputStream is ) throws MPXJException { } }
|
int line = 1 ; try { // Test the header and extract the separator . If this is successful ,
// we reset the stream back as far as we can . The design of the
// BufferedInputStream class means that we can ' t get back to character
// zero , so the first record we will read will get " RMHDR " rather than
// " ERMHDR " in the first field position .
BufferedInputStream bis = new BufferedInputStream ( is ) ; byte [ ] data = new byte [ 6 ] ; data [ 0 ] = ( byte ) bis . read ( ) ; bis . mark ( 1024 ) ; bis . read ( data , 1 , 5 ) ; if ( ! new String ( data ) . equals ( "ERMHDR" ) ) { throw new MPXJException ( MPXJException . INVALID_FILE ) ; } bis . reset ( ) ; InputStreamReader reader = new InputStreamReader ( bis , getCharset ( ) ) ; Tokenizer tk = new ReaderTokenizer ( reader ) ; tk . setDelimiter ( '\t' ) ; List < String > record = new ArrayList < String > ( ) ; while ( tk . getType ( ) != Tokenizer . TT_EOF ) { readRecord ( tk , record ) ; if ( ! record . isEmpty ( ) ) { if ( processRecord ( record ) ) { break ; } } ++ line ; } } catch ( Exception ex ) { throw new MPXJException ( MPXJException . READ_ERROR + " (failed at line " + line + ")" , ex ) ; }
|
public class UrlBuilder { /** * Returns full url */
public String buildUrl ( ) { } }
|
StringBuilder sb = new StringBuilder ( ) ; boolean includePort = true ; if ( null != scheme ) { sb . append ( scheme ) . append ( "://" ) ; includePort = ( port != ( scheme . equals ( "http" ) ? 80 : 443 ) ) ; } if ( null != serverName ) { sb . append ( serverName ) ; if ( includePort && port > 0 ) { sb . append ( ':' ) . append ( port ) ; } } if ( ! Objects . equals ( contextPath , "/" ) ) { sb . append ( contextPath ) ; } sb . append ( buildRequestUrl ( ) ) ; return sb . toString ( ) ;
|
public class CsvDozerBeanReader { /** * { @ inheritDoc } */
public void configureBeanMapping ( final Class < ? > clazz , final String [ ] fieldMapping , final Class < ? > [ ] hintTypes ) { } }
|
dozerBeanMapper . addMapping ( new MappingBuilder ( clazz , fieldMapping , hintTypes ) ) ;
|
public class FileDefinitionParser { /** * @ param line
* @ param br
* @ param holderInstance
* @ return an integer code
* { @ value P _ CODE _ NO } if not recognized ,
* { @ value P _ CODE _ YES } if it is and { @ value P _ CODE _ CANCEL } otherwise .
* @ throws IOException */
private int recognizePropertiesHolder ( String line , BufferedReader br , AbstractBlockHolder holderInstance ) throws IOException { } }
|
int result = P_CODE_NO ; String [ ] parts = splitFromInlineComment ( line ) ; String realLine = parts [ 0 ] . trim ( ) ; // Recognize the declaration
AbstractBlockHolder holder = null ; StringBuilder sb = new StringBuilder ( ) ; boolean endInstructionReached = false , foundExtraChars = false ; for ( char c : realLine . toCharArray ( ) ) { if ( c == O_CURLY_BRACKET ) endInstructionReached = true ; else if ( ! endInstructionReached ) sb . append ( c ) ; else { foundExtraChars = true ; break ; } } if ( foundExtraChars ) { addModelError ( ErrorCode . P_O_C_BRACKET_EXTRA_CHARACTERS ) ; result = P_CODE_CANCEL ; } else if ( ! endInstructionReached ) { if ( Utils . isEmptyOrWhitespaces ( sb . toString ( ) ) || sb . toString ( ) . matches ( ParsingConstants . PATTERN_ID ) ) { addModelError ( ErrorCode . P_O_C_BRACKET_MISSING ) ; result = P_CODE_CANCEL ; } else { result = P_CODE_NO ; } } else { result = P_CODE_YES ; holder = holderInstance ; holder . setName ( sb . toString ( ) . trim ( ) ) ; holder . setLine ( this . currentLineNumber ) ; holder . setInlineComment ( parts [ 1 ] ) ; this . definitionFile . getBlocks ( ) . add ( holder ) ; } // Recognize the properties
boolean errorInSubProperties = false ; if ( holder != null ) { while ( ( line = nextLine ( br ) ) != null && ! line . trim ( ) . startsWith ( String . valueOf ( C_CURLY_BRACKET ) ) ) { int code = recognizeBlankLine ( line , holder . getInnerBlocks ( ) ) ; if ( code == P_CODE_YES ) continue ; code = recognizeComment ( line , holder . getInnerBlocks ( ) ) ; if ( code == P_CODE_YES ) continue ; code = recognizeInstanceOf ( line , br , holderInstance ) ; if ( code == P_CODE_NO ) code = recognizeProperty ( line , holder ) ; if ( code == P_CODE_CANCEL ) result = P_CODE_CANCEL ; if ( code != P_CODE_YES ) { errorInSubProperties = true ; break ; } } } // Why did we exit the loop ?
// 1 . We found an invalid content for the holder .
// 2 . We reached EOF or we found a closing curly bracket .
// 3 . We never entered the loop !
// Inner errors prevail
if ( errorInSubProperties ) { if ( result == P_CODE_YES ) { if ( holderInstance . getInstructionType ( ) == AbstractBlock . INSTANCEOF ) addModelError ( ErrorCode . P_INVALID_PROPERTY_OR_INSTANCE ) ; else addModelError ( ErrorCode . P_INVALID_PROPERTY ) ; } result = P_CODE_CANCEL ; } // Inner blocks are valid , we found a curly bracket , check the end
else if ( result == P_CODE_YES && line != null && line . trim ( ) . startsWith ( String . valueOf ( C_CURLY_BRACKET ) ) ) { line = line . replaceFirst ( "\\s*\\}" , "" ) ; parts = splitFromInlineComment ( line ) ; if ( ! Utils . isEmptyOrWhitespaces ( parts [ 0 ] ) ) { addModelError ( ErrorCode . P_C_C_BRACKET_EXTRA_CHARACTERS ) ; result = P_CODE_CANCEL ; } holder . setClosingInlineComment ( parts [ 1 ] ) ; } // The closing bracket is missing
else if ( result == P_CODE_YES ) { addModelError ( ErrorCode . P_C_C_BRACKET_MISSING ) ; } return result ;
|
public class AddTypeInformationCallback { /** * Framework - independent approach to setting the type - if possible , by calling the setType ( ) method .
* @ param component
* @ param type */
private void setType ( UIComponent component , String type ) { } }
|
Method method ; try { method = component . getClass ( ) . getMethod ( "getType" ) ; if ( null != method ) { Object invoke = method . invoke ( component ) ; if ( invoke != null ) { // is it an PrimeFaces component ?
if ( component . getClass ( ) . getName ( ) . equals ( "org.primefaces.component.inputtext.InputText" ) ) { if ( ! "text" . equals ( invoke ) ) { // the programmer has explicitly assigned a type
return ; } } else return ; } } method = component . getClass ( ) . getMethod ( "setType" , String . class ) ; if ( null != method ) { method . invoke ( component , type ) ; return ; } } catch ( ReflectiveOperationException e ) { // catch block required by compiler , can ' t happen in reality
} if ( null == component . getAttributes ( ) . get ( "type" ) && null == component . getPassThroughAttributes ( ) . get ( "type" ) ) { component . getPassThroughAttributes ( ) . put ( "type" , type ) ; }
|
public class SARLFormatter { /** * Format the given SARL agent .
* @ param agent the SARL component .
* @ param document the document . */
protected void _format ( SarlAgent agent , IFormattableDocument document ) { } }
|
formatAnnotations ( agent , document , XbaseFormatterPreferenceKeys . newLineAfterClassAnnotations ) ; formatModifiers ( agent , document ) ; final ISemanticRegionsFinder regionFor = this . textRegionExtensions . regionFor ( agent ) ; document . append ( regionFor . keyword ( this . keywords . getAgentKeyword ( ) ) , ONE_SPACE ) ; document . surround ( regionFor . keyword ( this . keywords . getExtendsKeyword ( ) ) , ONE_SPACE ) ; document . format ( agent . getExtends ( ) ) ; formatBody ( agent , document ) ;
|
public class AHCFactory { /** * Creates a AsyncHttpClient object that can be used for talking to elasticsearch
* @ param configuration The configuration object containing properties for configuring the http connections
* @ param numberOfHostsBeingConnectedTo the number of hosts that are currently known about in the es cluster
* @ return */
public static AsyncHttpClient createClient ( Configuration configuration , int numberOfHostsBeingConnectedTo ) { } }
|
AsyncHttpClientConfig . Builder cf = createClientConfig ( configuration ) ; // A Bug exists in the AsyncConnection library that leak permits on a
// Connection exception ( i . e . when host not listening . . hard fail )
// So we do not enable connection tracking . Which is fine as the ring
// buffer does the job of having a single thread talking to the backend repo ( ES )
// So the connection should not grow in an unmanaged way , as the ring buffer
// is restricting the connections
// cf . setMaximumConnectionsTotal ( numberOfHostsBeingConnectedTo ) ;
cf . setMaximumConnectionsTotal ( - 1 ) ; cf . setMaximumConnectionsPerHost ( - 1 ) ; cf . setExecutorService ( getExecutorService ( numberOfHostsBeingConnectedTo ) ) ; return createClient ( cf ) ;
|
public class CopyToModule { /** * Get copy - to map based on map processing .
* @ return target to source map of URIs relative to temporary directory */
private Map < FileInfo , FileInfo > getCopyToMap ( ) { } }
|
final Map < FileInfo , FileInfo > copyToMap = new HashMap < > ( ) ; if ( forceUnique ) { forceUniqueFilter . copyToMap . forEach ( ( dstFi , srcFi ) -> { job . add ( dstFi ) ; copyToMap . put ( dstFi , srcFi ) ; } ) ; } for ( final Map . Entry < URI , URI > e : reader . getCopyToMap ( ) . entrySet ( ) ) { final URI target = job . tempDirURI . relativize ( e . getKey ( ) ) ; final FileInfo targetFi = job . getFileInfo ( target ) ; final URI source = job . tempDirURI . relativize ( e . getValue ( ) ) ; final FileInfo sourceFi = job . getFileInfo ( source ) ; // Filter when copy - to was ignored ( so target is not in job ) ,
// or where target is used directly
if ( targetFi == null || ( targetFi != null && targetFi . src != null ) ) { continue ; } copyToMap . put ( targetFi , sourceFi ) ; } return copyToMap ;
|
public class ByteValueArray { @ Override public void write ( DataOutputView out ) throws IOException { } }
|
out . writeInt ( position ) ; for ( int i = 0 ; i < position ; i ++ ) { out . writeByte ( data [ i ] ) ; }
|
public class Visualizer { /** * Creates an image of the local entropies of this file .
* @ param file
* the PE file
* @ return image of local entropies
* @ throws IOException
* if file can not be read */
public BufferedImage createEntropyImage ( File file ) throws IOException { } }
|
resetAvailabilityFlags ( ) ; this . data = new PEData ( null , null , null , null , null , file ) ; image = new BufferedImage ( fileWidth , height , IMAGE_TYPE ) ; final int MIN_WINDOW_SIZE = 100 ; // bytes to be read at once to calculate local entropy
final int windowSize = Math . max ( MIN_WINDOW_SIZE , pixelSize ) ; final int windowHalfSize = ( int ) Math . round ( windowSize / ( double ) 2 ) ; final long minLength = withMinLength ( 0 ) ; try ( RandomAccessFile raf = new RandomAccessFile ( file , "r" ) ) { // read until EOF with windowSized steps
for ( long address = 0 ; address <= file . length ( ) ; address += minLength ) { // the start of the window ( windowHalf to the left )
long start = ( address - windowHalfSize < 0 ) ? 0 : address - windowHalfSize ; raf . seek ( start ) ; // cut byte number if EOF reached , otherwise read full window
int bytesToRead = ( int ) Math . min ( file . length ( ) - start , windowSize ) ; byte [ ] bytes = new byte [ bytesToRead ] ; raf . readFully ( bytes ) ; /* calculate and draw entropy square pixel for this window */
double entropy = ShannonEntropy . entropy ( bytes ) ; Color color = getColorForEntropy ( entropy ) ; drawPixels ( color , address , minLength ) ; } } drawVisOverlay ( true ) ; return image ;
|
public class GitHubTokenCredentialsCreator { /** * Creates { @ link org . jenkinsci . plugins . plaincredentials . StringCredentials } with previously created GH token .
* Adds them to domain extracted from server url ( will be generated if no any exists before ) .
* Domain will have domain requirements consists of scheme and host from serverAPIUrl arg
* @ param serverAPIUrl to add to domain with host and scheme requirement from this url
* @ param token GH Personal token
* @ param username used to add to description of newly created creds
* @ return credentials object
* @ see # createCredentials ( String , StandardCredentials ) */
public StandardCredentials createCredentials ( @ Nullable String serverAPIUrl , String token , String username ) { } }
|
String url = defaultIfBlank ( serverAPIUrl , GITHUB_URL ) ; String description = format ( "GitHub (%s) auto generated token credentials for %s" , url , username ) ; StringCredentialsImpl creds = new StringCredentialsImpl ( CredentialsScope . GLOBAL , UUID . randomUUID ( ) . toString ( ) , description , Secret . fromString ( token ) ) ; return createCredentials ( url , creds ) ;
|
public class MapJsonSerializer { /** * < p > serializeValues < / p >
* @ param writer a { @ link com . github . nmorel . gwtjackson . client . stream . JsonWriter } object .
* @ param values a M object .
* @ param ctx a { @ link com . github . nmorel . gwtjackson . client . JsonSerializationContext } object .
* @ param params a { @ link com . github . nmorel . gwtjackson . client . JsonSerializerParameters } object . */
public void serializeValues ( JsonWriter writer , M values , JsonSerializationContext ctx , JsonSerializerParameters params ) { } }
|
if ( ! values . isEmpty ( ) ) { Map < K , V > map = values ; if ( ctx . isOrderMapEntriesByKeys ( ) && ! ( values instanceof SortedMap < ? , ? > ) ) { map = new TreeMap < K , V > ( map ) ; } if ( ctx . isWriteNullMapValues ( ) ) { for ( Entry < K , V > entry : map . entrySet ( ) ) { String name = keySerializer . serialize ( entry . getKey ( ) , ctx ) ; if ( keySerializer . mustBeEscaped ( ctx ) ) { writer . name ( name ) ; } else { writer . unescapeName ( name ) ; } valueSerializer . serialize ( writer , entry . getValue ( ) , ctx , params , true ) ; } } else { for ( Entry < K , V > entry : map . entrySet ( ) ) { if ( null != entry . getValue ( ) ) { String name = keySerializer . serialize ( entry . getKey ( ) , ctx ) ; if ( keySerializer . mustBeEscaped ( ctx ) ) { writer . name ( name ) ; } else { writer . unescapeName ( name ) ; } valueSerializer . serialize ( writer , entry . getValue ( ) , ctx , params , true ) ; } } } }
|
public class FixedCombinationRules { /** * Combines the responses of the classifiers by using estimating the sum
* of the probabilities of their responses .
* @ param classifierClassProbabilityMatrix
* @ return */
public static AssociativeArray sum ( DataTable2D classifierClassProbabilityMatrix ) { } }
|
AssociativeArray combinedClassProbabilities = new AssociativeArray ( ) ; for ( Map . Entry < Object , AssociativeArray > entry : classifierClassProbabilityMatrix . entrySet ( ) ) { // Object classifier = entry . getKey ( ) ;
AssociativeArray listOfClassProbabilities = entry . getValue ( ) ; for ( Map . Entry < Object , Object > entry2 : listOfClassProbabilities . entrySet ( ) ) { Object theClass = entry2 . getKey ( ) ; Double probability = TypeInference . toDouble ( entry2 . getValue ( ) ) ; Double previousValue = combinedClassProbabilities . getDouble ( theClass ) ; if ( previousValue == null ) { previousValue = 0.0 ; } combinedClassProbabilities . put ( theClass , previousValue + probability ) ; } } return combinedClassProbabilities ;
|
public class dnssoarec { /** * Use this API to fetch filtered set of dnssoarec resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static dnssoarec [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
|
dnssoarec obj = new dnssoarec ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; dnssoarec [ ] response = ( dnssoarec [ ] ) obj . getfiltered ( service , option ) ; return response ;
|
public class CreateEntities { /** * < p > Extracts all entity classes in the given set of persistence . xml files an returns a collection thereof . < / p >
* @ param persistenceXmlFiles set of persistence . xml files that you want to scan
* @ return extracted entity classes */
private static Collection < Class > scanEntityClasses ( String ... persistenceXmlFiles ) { } }
|
DDLGenerator . Profile profile = new DDLGenerator . Profile ( null ) ; profile . addPersistenceFile ( persistenceXmlFiles ) ; return profile . getEntityClasses ( ) ;
|
public class FilesImpl { /** * Deletes the specified file from the compute node .
* @ param poolId The ID of the pool that contains the compute node .
* @ param nodeId The ID of the compute node from which you want to delete the file .
* @ param filePath The path to the file or directory that you want to delete .
* @ param recursive Whether to delete children of a directory . If the filePath parameter represents a directory instead of a file , you can set recursive to true to delete the directory and all of the files and subdirectories in it . If recursive is false then the directory must be empty or deletion will fail .
* @ param fileDeleteFromComputeNodeOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void deleteFromComputeNode ( String poolId , String nodeId , String filePath , Boolean recursive , FileDeleteFromComputeNodeOptions fileDeleteFromComputeNodeOptions ) { } }
|
deleteFromComputeNodeWithServiceResponseAsync ( poolId , nodeId , filePath , recursive , fileDeleteFromComputeNodeOptions ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class DataMediaSourceServiceImpl { /** * 添加 */
public void create ( DataMediaSource dataMediaSource ) { } }
|
Assert . assertNotNull ( dataMediaSource ) ; try { DataMediaSourceDO dataMediaSourceDo = modelToDo ( dataMediaSource ) ; dataMediaSourceDo . setId ( 0L ) ; if ( ! dataMediaSourceDao . checkUnique ( dataMediaSourceDo ) ) { String exceptionCause = "exist the same name source in the database." ; logger . warn ( "WARN ## " + exceptionCause ) ; throw new RepeatConfigureException ( exceptionCause ) ; } dataMediaSourceDao . insert ( dataMediaSourceDo ) ; } catch ( RepeatConfigureException rce ) { throw rce ; } catch ( Exception e ) { logger . error ( "ERROR ## create dataMediaSource has an exception!" ) ; throw new ManagerException ( e ) ; }
|
public class CellFormulaHandler { /** * セルに数式を設定する
* @ param field フィールド情報
* @ param config システム情報
* @ param cell セル情報
* @ param targetBean 処理対象のフィールドが定義されているクラスのインスタンス 。
* @ throws ConversionException 数式の解析に失敗した場合 。 */
public void handleFormula ( final FieldAccessor field , final Configuration config , final Cell cell , final Object targetBean ) { } }
|
ArgUtils . notNull ( field , "field" ) ; ArgUtils . notNull ( config , "config" ) ; ArgUtils . notNull ( cell , "cell" ) ; final String evaluatedFormula = createFormulaValue ( config , cell , targetBean ) ; if ( Utils . isEmpty ( evaluatedFormula ) ) { cell . setCellType ( CellType . BLANK ) ; return ; } try { cell . setCellFormula ( evaluatedFormula ) ; cell . setCellType ( CellType . FORMULA ) ; } catch ( FormulaParseException e ) { // 数式の解析に失敗した場合
String message = MessageBuilder . create ( "cell.failParseFormula" ) . var ( "property" , field . getNameWithClass ( ) ) . var ( "cellAddress" , CellPosition . of ( cell ) . toString ( ) ) . var ( "formula" , evaluatedFormula ) . format ( ) ; throw new ConversionException ( message , e , field . getType ( ) ) ; }
|
public class OutputChannelMappingMarshaller { /** * Marshall the given parameter object . */
public void marshall ( OutputChannelMapping outputChannelMapping , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( outputChannelMapping == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( outputChannelMapping . getInputChannels ( ) , INPUTCHANNELS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class TagWizardController { /** * Automatically tags all attributes in the current entity using Lucene lexical matching . Stores
* the tags in the OntologyTag Repository .
* @ param request containing the entityTypeId and selected ontology identifiers
* @ return A { @ link Map } containing Attribute name and a Map of Tag iri and label */
@ PostMapping ( "/autotagattributes" ) public @ ResponseBody Map < String , OntologyTag > autoTagAttributes ( @ Valid @ RequestBody AutoTagRequest request ) { } }
|
String entityTypeId = request . getEntityTypeId ( ) ; EntityType entityType = dataService . getEntityType ( entityTypeId ) ; List < Ontology > ontologies = ontologyService . getOntologies ( request . getOntologyIds ( ) ) ; Map < Attribute , Hit < OntologyTerm > > autoGeneratedTags = new LinkedHashMap < > ( ) ; entityType . getAtomicAttributes ( ) . forEach ( attribute -> { Hits < OntologyTerm > hits = semanticSearchService . findOntologyTerms ( attribute , ontologies ) ; if ( hits . hasHits ( ) ) { autoGeneratedTags . put ( attribute , hits . iterator ( ) . next ( ) ) ; } } ) ; return ontologyTagService . tagAttributesInEntity ( request . getEntityTypeId ( ) , Maps . transformValues ( autoGeneratedTags , Hit :: getResult ) ) ;
|
public class Vector2dfx { /** * Replies the orientation vector , which is corresponding
* to the given angle on a trigonometric circle .
* @ param angle is the angle in radians to translate .
* @ return the orientation vector which is corresponding to the given angle . */
@ Pure @ Inline ( value = "new Vector2dfx(Math.cos($1), Math.sin($1))" , imported = { } }
|
Vector2dfx . class } ) public static Vector2dfx toOrientationVector ( double angle ) { return new Vector2dfx ( Math . cos ( angle ) , Math . sin ( angle ) ) ;
|
public class AdductFormula { /** * Compare to IIsotope . The method doesn ' t compare instance but if they
* have the same symbol , natural abundance and exact mass .
* @ param isotopeOne The first Isotope to compare
* @ param isotopeTwo The second Isotope to compare
* @ return True , if both isotope are the same */
private boolean isTheSame ( IIsotope isotopeOne , IIsotope isotopeTwo ) { } }
|
// XXX : floating point comparision !
if ( ! Objects . equals ( isotopeOne . getSymbol ( ) , isotopeTwo . getSymbol ( ) ) ) return false ; if ( ! Objects . equals ( isotopeOne . getNaturalAbundance ( ) , isotopeTwo . getNaturalAbundance ( ) ) ) return false ; if ( ! Objects . equals ( isotopeOne . getExactMass ( ) , isotopeTwo . getExactMass ( ) ) ) return false ; return true ;
|
public class BatchUpdateDaemon { /** * This invalidates all cache entries in all caches whose template
* is specified .
* @ param template The Template that is used to to invalidate fragments .
* @ param waitOnInvalidation True indicates that this method should
* not return until all invalidations have taken effect .
* False indicates that the invalidations will take effect the next
* time the BatchUpdateDaemon wakes . */
public void invalidateByTemplate ( String template , boolean waitOnInvalidation , DCache cache ) { } }
|
synchronized ( this ) { BatchUpdateList bul = getUpdateList ( cache ) ; bul . invalidateByTemplateEvents . put ( template , new InvalidateByTemplateEvent ( template , CachePerf . LOCAL ) ) ; } if ( waitOnInvalidation ) { wakeUp ( 0 , 0 ) ; }
|
public class ContentSpecProcessor { /** * Gets a list of child nodes that can be transformed .
* @ param childNodes The list of nodes to filter for translatable nodes .
* @ return A list of transformable nodes . */
protected List < Node > getTransformableNodes ( final List < Node > childNodes ) { } }
|
final List < Node > nodes = new LinkedList < Node > ( ) ; for ( final Node childNode : childNodes ) { if ( isTransformableNode ( childNode ) ) { nodes . add ( childNode ) ; } } return nodes ;
|
public class HttpRequestBuilder { /** * Send the request and log / update metrics for the results . */
@ SuppressWarnings ( "PMD.ExceptionAsFlowControl" ) public HttpResponse send ( ) throws IOException { } }
|
HttpResponse response = null ; for ( int attempt = 1 ; attempt <= numAttempts ; ++ attempt ) { entry . withAttempt ( attempt ) ; try { response = sendImpl ( ) ; int s = response . status ( ) ; if ( s == 429 || s == 503 ) { // Request is getting throttled , exponentially back off
// - 429 client sending too many requests
// - 503 server unavailable
try { long delay = initialRetryDelay << ( attempt - 1 ) ; LOGGER . debug ( "request throttled, delaying for {}ms: {} {}" , delay , method , uri ) ; Thread . sleep ( delay ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new IOException ( "request failed " + method + " " + uri , e ) ; } } else if ( s < 500 ) { // 4xx errors other than 429 are not considered retriable , so for anything
// less than 500 just return the response to the user
return response ; } } catch ( IOException e ) { // All exceptions are considered retriable . Some like UnknownHostException are
// debatable , but we have seen them in some cases if there is a high latency for
// DNS lookups . So for now assume all exceptions are transient issues .
if ( attempt == numAttempts ) { throw e ; } else { LOGGER . warn ( "attempt {} of {} failed: {} {}" , attempt , numAttempts , method , uri ) ; } } } if ( response == null ) { // Should not get here
throw new IOException ( "request failed " + method + " " + uri ) ; } return response ;
|
public class BitmapEncoder { /** * Returns a bitmap that lights up red subpixels at the bottom , green subpixels on the right , and
* blue subpixels in bottom - right . */
Bitmap generateGradient ( ) { } }
|
int [ ] [ ] pixels = new int [ 1080 ] [ 1920 ] ; for ( int y = 0 ; y < 1080 ; y ++ ) { for ( int x = 0 ; x < 1920 ; x ++ ) { int r = ( int ) ( y / 1080f * 255 ) ; int g = ( int ) ( x / 1920f * 255 ) ; int b = ( int ) ( ( Math . hypot ( x , y ) / Math . hypot ( 1080 , 1920 ) ) * 255 ) ; pixels [ y ] [ x ] = r << 16 | g << 8 | b ; } } return new Bitmap ( pixels ) ;
|
public class XMLTransformTag { /** * Performs the transformation and writes the result to the JSP writer .
* @ param in the source document to transform . */
public void transform ( Source pIn ) throws JspException { } }
|
try { // Create transformer
Transformer transformer = TransformerFactory . newInstance ( ) . newTransformer ( getSource ( mStylesheetURI ) ) ; // Store temporary output in a bytearray , as the transformer will
// usually try to flush the stream ( illegal operation from a custom
// tag ) .
ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; StreamResult out = new StreamResult ( os ) ; // Perform the transformation
transformer . transform ( pIn , out ) ; // Write the result back to the JSP writer
pageContext . getOut ( ) . print ( os . toString ( ) ) ; } catch ( MalformedURLException murle ) { throw new JspException ( murle . getMessage ( ) , murle ) ; } catch ( IOException ioe ) { throw new JspException ( ioe . getMessage ( ) , ioe ) ; } catch ( TransformerException te ) { throw new JspException ( "XSLT Trandformation failed: " + te . getMessage ( ) , te ) ; }
|
public class KunderaQueryUtils { /** * Checks for where clause .
* @ param jpqlExpression
* the jpql expression
* @ return true , if successful */
public static boolean hasWhereClause ( JPQLExpression jpqlExpression ) { } }
|
if ( isSelectStatement ( jpqlExpression ) ) { return ( ( SelectStatement ) jpqlExpression . getQueryStatement ( ) ) . hasWhereClause ( ) ; } else if ( isUpdateStatement ( jpqlExpression ) ) { return ( ( UpdateStatement ) jpqlExpression . getQueryStatement ( ) ) . hasWhereClause ( ) ; } if ( isDeleteStatement ( jpqlExpression ) ) { return ( ( DeleteStatement ) jpqlExpression . getQueryStatement ( ) ) . hasWhereClause ( ) ; } return false ;
|
public class UserPreferences { /** * Put .
* @ param key the key
* @ param value the value
* @ see java . util . prefs . Preferences # put ( java . lang . String , java . lang . String ) */
public static void put ( final String key , final String value ) { } }
|
try { systemRoot . put ( fixKey ( key ) , value ) ; } catch ( final Exception e ) { System . err . print ( e ) ; }
|
public class ZLoop { /** * socket / FD , cancels ALL of them . */
public void removePoller ( PollItem pollItem ) { } }
|
Iterator < SPoller > it = pollers . iterator ( ) ; while ( it . hasNext ( ) ) { SPoller p = it . next ( ) ; if ( pollItem . equals ( p . item ) ) { it . remove ( ) ; dirty = true ; } } if ( verbose ) { System . out . printf ( "I: zloop: cancel %s poller (%s, %s)" , pollItem . getSocket ( ) != null ? pollItem . getSocket ( ) . getType ( ) : "RAW" , pollItem . getSocket ( ) , pollItem . getRawSocket ( ) ) ; }
|
public class TransactionToDispatchableMap { /** * Removes , from the table , the dispatchable corresponding to a local transaction .
* @ param clientId The client transaction id corresponding to the dispatchable to
* remove
* @ return the removed dispatchable or null if no entry could be found for the
* specified clientId parameter . */
public Dispatchable removeDispatchableForLocalTransaction ( int clientId ) { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "removeDispatchableForLocalTransaction" , "" + clientId ) ; AbstractFirstLevelMapEntry firstLevelEntry = null ; if ( idToFirstLevelEntryMap . containsKey ( clientId ) ) { firstLevelEntry = ( AbstractFirstLevelMapEntry ) idToFirstLevelEntryMap . get ( clientId ) ; } final Dispatchable result ; if ( firstLevelEntry == null ) { result = null ; } else { if ( firstLevelEntry . isLocalTransaction ( ) ) { result = ( ( LocalFirstLevelMapEntry ) firstLevelEntry ) . getDispatchable ( ) ; idToFirstLevelEntryMap . remove ( clientId ) ; } else { final SIErrorException exception = new SIErrorException ( CommsConstants . TRANTODISPATCHMAP_REMOVEFORLOCALTX_01 ) ; FFDCFilter . processException ( exception , CLASS_NAME + ".removeDispatchableForLocalTransaction" , CommsConstants . TRANTODISPATCHMAP_REMOVEFORLOCALTX_01 , new Object [ ] { "" + clientId , firstLevelEntry , idToFirstLevelEntryMap , this } ) ; if ( tc . isEventEnabled ( ) ) SibTr . exception ( this , tc , exception ) ; throw exception ; } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "removeDispatchableForLocalTransaction" , result ) ; return result ;
|
public class DiscoveryJerseyProvider { /** * Checks for the { @ link Serializer } annotation for the given class .
* @ param entityType The class to be serialized / deserialized .
* @ return true if the annotation is present , false otherwise . */
private static boolean isSupportedEntity ( Class < ? > entityType ) { } }
|
try { Annotation annotation = entityType . getAnnotation ( Serializer . class ) ; if ( annotation != null ) { return true ; } } catch ( Throwable th ) { LOGGER . warn ( "Exception in checking for annotations" , th ) ; } return false ;
|
public class ProposalResponse { /** * getChaincodeActionResponseReadWriteSetInfo get this proposals read write set .
* @ return The read write set . See { @ link TxReadWriteSetInfo }
* @ throws InvalidArgumentException */
public TxReadWriteSetInfo getChaincodeActionResponseReadWriteSetInfo ( ) throws InvalidArgumentException { } }
|
if ( isInvalid ( ) ) { throw new InvalidArgumentException ( "Proposal response is invalid." ) ; } try { final ProposalResponsePayloadDeserializer proposalResponsePayloadDeserializer = getProposalResponsePayloadDeserializer ( ) ; TxReadWriteSet txReadWriteSet = proposalResponsePayloadDeserializer . getExtension ( ) . getResults ( ) ; if ( txReadWriteSet == null ) { return null ; } return new TxReadWriteSetInfo ( txReadWriteSet ) ; } catch ( Exception e ) { throw new InvalidArgumentException ( e ) ; }
|
public class HBaseDataHandler { /** * ( non - Javadoc )
* @ see
* com . impetus . client . hbase . admin . DataHandler # readData ( java . lang . String ,
* java . lang . Class , com . impetus . kundera . metadata . model . EntityMetadata ,
* java . lang . String , java . util . List ) */
@ Override public List readAll ( final String tableName , Class clazz , EntityMetadata m , final List < Object > rowKey , List < String > relationNames , String ... columns ) throws IOException { } }
|
List output = null ; Object entity = null ; HTableInterface hTable = null ; hTable = gethTable ( tableName ) ; // Load raw data from HBase
MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; AbstractManagedType managedType = ( AbstractManagedType ) metaModel . entity ( m . getEntityClazz ( ) ) ; // For secondary tables .
List < String > secondaryTables = ( ( DefaultEntityAnnotationProcessor ) managedType . getEntityAnnotation ( ) ) . getSecondaryTablesName ( ) ; secondaryTables . add ( m . getTableName ( ) ) ; List < HBaseData > results = new ArrayList < HBaseData > ( ) ; for ( String colTableName : secondaryTables ) { List table = ( ( HBaseReader ) hbaseReader ) . loadAll ( hTable , rowKey , colTableName , columns ) ; // null check for ' table ' . addAll method throws exception if table
// is null
if ( table != null ) { results . addAll ( table ) ; } } output = onRead ( tableName , clazz , m , output , hTable , entity , relationNames , results ) ; return output ;
|
public class CmsJspObjectValueWrapper { /** * Returns if direct edit is enabled . < p >
* @ param cms the current cms context
* @ return < code > true < / code > if direct edit is enabled */
static boolean isDirectEditEnabled ( CmsObject cms ) { } }
|
return ! cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) && ( cms . getRequestContext ( ) . getAttribute ( CmsGwtConstants . PARAM_DISABLE_DIRECT_EDIT ) == null ) ;
|
public class MiniTemplatorParser { /** * Returns true the condition is met . */
private boolean evaluateConditionFlags ( String flags ) { } }
|
int p = 0 ; while ( true ) { p = skipBlanks ( flags , p ) ; if ( p >= flags . length ( ) ) { break ; } boolean complement = false ; if ( flags . charAt ( p ) == '!' ) { complement = true ; p ++ ; } p = skipBlanks ( flags , p ) ; if ( p >= flags . length ( ) ) { break ; } int p0 = p ; p = skipNonBlanks ( flags , p0 + 1 ) ; String flag = flags . substring ( p0 , p ) . toUpperCase ( ) ; if ( ( conditionFlags != null && conditionFlags . contains ( flag ) ) ^ complement ) { return true ; } } return false ;
|
public class MonitorObserver { /** * Inherited from Observer
* @ param o
* @ param arg */
@ Override public void update ( Observable o , Object arg ) { } }
|
if ( arg instanceof String ) { lines . add ( ( String ) arg ) ; } if ( arg instanceof List < ? > ) { lines . addAll ( ( List < String > ) arg ) ; } if ( arg instanceof Exception ) { Exception e = ( Exception ) arg ; PresentationManager . getPm ( ) . error ( e ) ; lines . add ( e . getMessage ( ) ) ; }
|
public class EscapeUtil { /** * Split but return an array which is never null ( but might be empty )
* @ param pArg argument to split
* @ param pEscape single character used for escaping
* @ param pDelimiter delimiter to use
* @ return the splitted string as list or an empty array if the argument was null */
public static String [ ] splitAsArray ( String pArg , String pEscape , String pDelimiter ) { } }
|
if ( pArg != null ) { List < String > elements = split ( pArg , pEscape , pDelimiter ) ; return elements . toArray ( new String [ elements . size ( ) ] ) ; } else { return new String [ 0 ] ; }
|
public class VisibleMemberMap { /** * Return the key to the member map for the given member . */
private Object getMemberKey ( Element element ) { } }
|
if ( utils . isConstructor ( element ) ) { return utils . getSimpleName ( element ) + utils . flatSignature ( ( ExecutableElement ) element ) ; } else if ( utils . isMethod ( element ) ) { return getClassMember ( ( ExecutableElement ) element ) ; } else if ( utils . isField ( element ) || utils . isEnumConstant ( element ) || utils . isAnnotationType ( element ) ) { return utils . getSimpleName ( element ) ; } else { // it ' s a class or interface
String classOrIntName = utils . getSimpleName ( element ) ; // Strip off the containing class name because we only want the member name .
classOrIntName = classOrIntName . indexOf ( '.' ) != 0 ? classOrIntName . substring ( classOrIntName . lastIndexOf ( '.' ) ) : classOrIntName ; return "clint" + classOrIntName ; }
|
public class CanalEventUtils { /** * 根据entry创建对应的Position对象 */
public static LogPosition createPosition ( Event event , boolean included ) { } }
|
EntryPosition position = new EntryPosition ( ) ; position . setJournalName ( event . getJournalName ( ) ) ; position . setPosition ( event . getPosition ( ) ) ; position . setTimestamp ( event . getExecuteTime ( ) ) ; position . setIncluded ( included ) ; LogPosition logPosition = new LogPosition ( ) ; logPosition . setPostion ( position ) ; logPosition . setIdentity ( event . getLogIdentity ( ) ) ; return logPosition ;
|
public class RenamePRequest { /** * < code > optional . alluxio . grpc . file . RenamePOptions options = 3 ; < / code > */
public alluxio . grpc . RenamePOptions getOptions ( ) { } }
|
return options_ == null ? alluxio . grpc . RenamePOptions . getDefaultInstance ( ) : options_ ;
|
public class UserProfile { public void setFirstName ( String firstName ) throws IllegalArgumentException { } }
|
if ( Text . isNull ( firstName ) ) return ; if ( ! this . firstName . equals ( firstName ) ) { this . firstName = firstName ; }
|
public class Metadata { /** * Adds a new metadata value .
* @ param path the path that designates the key . Must be prefixed with a " / " .
* @ param value the value .
* @ return this metadata object . */
public Metadata add ( String path , String value ) { } }
|
this . values . add ( this . pathToProperty ( path ) , value ) ; this . addOp ( "add" , path , value ) ; return this ;
|
public class MarkdownParser { /** * Searching for valid formatting span end
* @ param cursor text cursor
* @ param spanStart expected span start
* @ param limit maximum index in cursor
* @ param span span control character
* @ return span end , - 1 if not found */
private int findSpanEnd ( TextCursor cursor , int spanStart , int limit , char span ) { } }
|
for ( int i = spanStart + 1 ; i < limit ; i ++ ) { char c = cursor . text . charAt ( i ) ; if ( c == span ) { // Check prev and next symbols
if ( isGoodAnchor ( cursor . text , i + 1 ) && isNotSymbol ( cursor . text , i - 1 , span ) ) { return i + 1 ; } } } return - 1 ;
|
public class Shape { /** * Get the element wise stride for the
* shape info buffer
* @ param buffer the buffer to get the element
* wise stride from
* @ return the element wise stride for the buffer */
public static void setElementWiseStride ( DataBuffer buffer , int elementWiseStride ) { } }
|
int length2 = shapeInfoLength ( Shape . rank ( buffer ) ) ; // if ( 1 > 0 ) throw new RuntimeException ( " setElementWiseStride called : [ " + elementWiseStride + " ] , buffer : " + buffer ) ;
buffer . put ( length2 - 2 , elementWiseStride ) ;
|
public class TypeUtility { /** * Convert a TypeMirror in a typeName , or classname or whatever .
* @ param typeName
* the type name
* @ return typeName */
public static TypeName typeName ( String typeName ) { } }
|
TypeName [ ] values = { TypeName . BOOLEAN , TypeName . BYTE , TypeName . CHAR , TypeName . DOUBLE , TypeName . FLOAT , TypeName . INT , TypeName . LONG , TypeName . SHORT , TypeName . VOID } ; for ( TypeName item : values ) { if ( item . toString ( ) . equals ( typeName ) ) { return item ; } } LiteralType literalName = LiteralType . of ( typeName ) ; if ( literalName . isParametrizedType ( ) ) { return ParameterizedTypeName . get ( className ( literalName . getRawType ( ) ) , typeName ( literalName . getTypeParameter ( ) ) ) ; } if ( literalName . isArray ( ) ) { return ArrayTypeName . of ( typeName ( literalName . getRawType ( ) ) ) ; } return ClassName . bestGuess ( typeName ) ;
|
public class CacheCore { /** * for internal process use
* @ param request < p >
* idxMetricName = = null : query all metrics
* idxMetricName = = [ ] : query none metric
* idxMetricName = = [ a , b , c . . ] : query metric a , b and c , . .
* idxComponentInstance = = null : query all components
* idxComponentInstance = = [ ] : query none component
* idxComponentInstance = = [ c1 - & gt ; null , . . ] : query all instances of c1 , . .
* idxComponentInstance = = [ c1 - & gt ; [ ] , . . ] : query none instance of c1 , . .
* idxComponentInstance = = [ c1 - & gt ; [ a , b , c , . . ] , . . ] : query instance a , b , c , . . of c1 , . .
* assert : startTime & lt ; = endTime
* @ param metricNameType map : metric name to type
* @ return query result */
public MetricResponse getMetrics ( MetricRequest request , MetricsFilter metricNameType ) { } }
|
LOG . fine ( "received query: " + request . toString ( ) ) ; synchronized ( CacheCore . class ) { List < MetricDatum > response = new LinkedList < > ( ) ; // candidate metric names
Set < String > metricNameFilter = request . getMetricNames ( ) ; if ( metricNameFilter == null ) { metricNameFilter = idxMetricName . keySet ( ) ; } // candidate component names
Map < String , Set < String > > componentInstanceMap = request . getComponentNameInstanceId ( ) ; Set < String > componentNameFilter ; if ( componentInstanceMap == null ) { componentNameFilter = idxComponentInstance . keySet ( ) ; } else { componentNameFilter = componentInstanceMap . keySet ( ) ; } for ( String metricName : metricNameFilter ) { if ( ! metricExists ( metricName ) ) { continue ; } MetricsFilter . MetricAggregationType type = metricNameType . getAggregationType ( metricName ) ; for ( String componentName : componentNameFilter ) { // candidate instance ids
Set < String > instanceIdFilter ; if ( componentInstanceMap == null || componentInstanceMap . get ( componentName ) == null ) { instanceIdFilter = idxComponentInstance . get ( componentName ) . keySet ( ) ; } else { instanceIdFilter = componentInstanceMap . get ( componentName ) ; } for ( String instanceId : instanceIdFilter ) { LOG . fine ( componentName + "; " + instanceId + "; " + metricName + "; " + type ) ; // get bucket _ id
int idx1 = idxComponentInstance . get ( componentName ) . get ( instanceId ) ; int idx2 = idxMetricName . get ( metricName ) ; long bucketId = makeBucketId ( idx1 , idx2 ) ; // iterate buckets : the result may be empty due to the bucketId / hash filter
List < MetricTimeRangeValue > metricValue = new LinkedList < > ( ) ; switch ( request . getAggregationGranularity ( ) ) { case AGGREGATE_ALL_METRICS : case AGGREGATE_BY_BUCKET : getAggregatedMetrics ( metricValue , request . getStartTime ( ) /* when */
, request . getEndTime ( ) /* when */
, bucketId /* where */
, type /* how */
, request . getAggregationGranularity ( ) ) ; break ; case RAW : getRawMetrics ( metricValue , request . getStartTime ( ) , request . getEndTime ( ) , bucketId , type ) ; break ; default : LOG . warning ( "unknown aggregationGranularity type " + request . getAggregationGranularity ( ) ) ; } // make metric list in response
response . add ( new MetricDatum ( componentName , instanceId , metricName , metricValue ) ) ; } // end for : instance
} // end for : component
} // end for : metric
return new MetricResponse ( response ) ; }
|
public class Authentication { /** * Login method to authenticate based on the Subject
* < ul >
* < li > If Security is enabled , it calls the MessagingAuthenticationService
* for authenticating < / li >
* < li > If Security is disabled , it returns a Unauthenticated Subject < / li >
* < / ul >
* @ param subject
* The Subject ( mostly already authenticated ) passed from
* authorization code
* @ return subject - If User is authenticated null - If User is
* unauthenticated */
public Subject login ( Subject subject ) throws MessagingAuthenticationException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , CLASS_NAME + "login" , subject ) ; } Subject result = null ; if ( ! runtimeSecurityService . isMessagingSecure ( ) ) { result = runtimeSecurityService . createUnauthenticatedSubject ( ) ; } else { if ( messagingAuthenticationService != null ) { result = messagingAuthenticationService . login ( subject ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , CLASS_NAME + "login" , result ) ; } return result ;
|
public class SegmentHelper { /** * This method sends a WireCommand to update table entries .
* @ param tableName Qualified table name .
* @ param entries List of { @ link TableEntry } s to be updated .
* @ param delegationToken The token to be presented to the segmentstore .
* @ param clientRequestId Request id .
* @ return A CompletableFuture that , when completed normally , will contain the current versions of each { @ link TableEntry }
* If the operation failed , the future will be failed with the causing exception . If the exception can be retried
* then the future will be failed with { @ link WireCommandFailedException } . */
public CompletableFuture < List < KeyVersion > > updateTableEntries ( final String tableName , final List < TableEntry < byte [ ] , byte [ ] > > entries , String delegationToken , final long clientRequestId ) { } }
|
final CompletableFuture < List < KeyVersion > > result = new CompletableFuture < > ( ) ; final Controller . NodeUri uri = getTableUri ( tableName ) ; final WireCommandType type = WireCommandType . UPDATE_TABLE_ENTRIES ; final long requestId = ( clientRequestId == RequestTag . NON_EXISTENT_ID ) ? idGenerator . get ( ) : clientRequestId ; final FailingReplyProcessor replyProcessor = new FailingReplyProcessor ( ) { @ Override public void connectionDropped ( ) { log . warn ( requestId , "updateTableEntries {} Connection dropped" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . ConnectionDropped ) ) ; } @ Override public void wrongHost ( WireCommands . WrongHost wrongHost ) { log . warn ( requestId , "updateTableEntries {} wrong host" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . UnknownHost ) ) ; } @ Override public void noSuchSegment ( WireCommands . NoSuchSegment noSuchSegment ) { log . warn ( requestId , "updateTableEntries {} NoSuchSegment" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . SegmentDoesNotExist ) ) ; } @ Override public void tableEntriesUpdated ( WireCommands . TableEntriesUpdated tableEntriesUpdated ) { log . info ( requestId , "updateTableEntries request for {} tableSegment completed." , tableName ) ; result . complete ( tableEntriesUpdated . getUpdatedVersions ( ) . stream ( ) . map ( KeyVersionImpl :: new ) . collect ( Collectors . toList ( ) ) ) ; } @ Override public void tableKeyDoesNotExist ( WireCommands . TableKeyDoesNotExist tableKeyDoesNotExist ) { log . warn ( requestId , "updateTableEntries request for {} tableSegment failed with TableKeyDoesNotExist." , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . TableKeyDoesNotExist ) ) ; } @ Override public void tableKeyBadVersion ( WireCommands . TableKeyBadVersion tableKeyBadVersion ) { log . warn ( requestId , "updateTableEntries request for {} tableSegment failed with TableKeyBadVersion." , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . TableKeyBadVersion ) ) ; } @ Override public void processingFailure ( Exception error ) { log . error ( requestId , "updateTableEntries {} failed" , tableName , error ) ; handleError ( error , result , type ) ; } @ Override public void authTokenCheckFailed ( WireCommands . AuthTokenCheckFailed authTokenCheckFailed ) { result . completeExceptionally ( new WireCommandFailedException ( new AuthenticationException ( authTokenCheckFailed . toString ( ) ) , type , WireCommandFailedException . Reason . AuthFailed ) ) ; } } ; List < ByteBuf > buffersToRelease = new ArrayList < > ( ) ; List < Map . Entry < WireCommands . TableKey , WireCommands . TableValue > > wireCommandEntries = entries . stream ( ) . map ( te -> { final WireCommands . TableKey key = convertToWireCommand ( te . getKey ( ) ) ; ByteBuf valueBuffer = wrappedBuffer ( te . getValue ( ) ) ; buffersToRelease . add ( key . getData ( ) ) ; buffersToRelease . add ( valueBuffer ) ; final WireCommands . TableValue value = new WireCommands . TableValue ( valueBuffer ) ; return new AbstractMap . SimpleImmutableEntry < > ( key , value ) ; } ) . collect ( Collectors . toList ( ) ) ; WireCommands . UpdateTableEntries request = new WireCommands . UpdateTableEntries ( requestId , tableName , delegationToken , new WireCommands . TableEntries ( wireCommandEntries ) ) ; sendRequestAsync ( request , replyProcessor , result , ModelHelper . encode ( uri ) ) ; return result . whenComplete ( ( r , e ) -> release ( buffersToRelease ) ) ;
|
public class ResourceAssignment { /** * Generates timephased costs from timephased work where a single cost rate
* applies to the whole assignment .
* @ param standardWorkList timephased work
* @ param overtimeWorkList timephased work
* @ return timephased cost */
private List < TimephasedCost > getTimephasedCostSingleRate ( List < TimephasedWork > standardWorkList , List < TimephasedWork > overtimeWorkList ) { } }
|
List < TimephasedCost > result = new LinkedList < TimephasedCost > ( ) ; // just return an empty list if there is no timephased work passed in
if ( standardWorkList == null ) { return result ; } // takes care of the situation where there is no timephased overtime work
Iterator < TimephasedWork > overtimeIterator = overtimeWorkList == null ? java . util . Collections . < TimephasedWork > emptyList ( ) . iterator ( ) : overtimeWorkList . iterator ( ) ; for ( TimephasedWork standardWork : standardWorkList ) { CostRateTableEntry rate = getCostRateTableEntry ( standardWork . getStart ( ) ) ; double standardRateValue = rate . getStandardRate ( ) . getAmount ( ) ; TimeUnit standardRateUnits = rate . getStandardRate ( ) . getUnits ( ) ; double overtimeRateValue = 0 ; TimeUnit overtimeRateUnits = standardRateUnits ; if ( rate . getOvertimeRate ( ) != null ) { overtimeRateValue = rate . getOvertimeRate ( ) . getAmount ( ) ; overtimeRateUnits = rate . getOvertimeRate ( ) . getUnits ( ) ; } TimephasedWork overtimeWork = overtimeIterator . hasNext ( ) ? overtimeIterator . next ( ) : null ; Duration standardWorkPerDay = standardWork . getAmountPerDay ( ) ; if ( standardWorkPerDay . getUnits ( ) != standardRateUnits ) { standardWorkPerDay = standardWorkPerDay . convertUnits ( standardRateUnits , getParentFile ( ) . getProjectProperties ( ) ) ; } Duration totalStandardWork = standardWork . getTotalAmount ( ) ; if ( totalStandardWork . getUnits ( ) != standardRateUnits ) { totalStandardWork = totalStandardWork . convertUnits ( standardRateUnits , getParentFile ( ) . getProjectProperties ( ) ) ; } Duration overtimeWorkPerDay ; Duration totalOvertimeWork ; if ( overtimeWork == null || overtimeWork . getTotalAmount ( ) . getDuration ( ) == 0 ) { overtimeWorkPerDay = Duration . getInstance ( 0 , standardWorkPerDay . getUnits ( ) ) ; totalOvertimeWork = Duration . getInstance ( 0 , standardWorkPerDay . getUnits ( ) ) ; } else { overtimeWorkPerDay = overtimeWork . getAmountPerDay ( ) ; if ( overtimeWorkPerDay . getUnits ( ) != overtimeRateUnits ) { overtimeWorkPerDay = overtimeWorkPerDay . convertUnits ( overtimeRateUnits , getParentFile ( ) . getProjectProperties ( ) ) ; } totalOvertimeWork = overtimeWork . getTotalAmount ( ) ; if ( totalOvertimeWork . getUnits ( ) != overtimeRateUnits ) { totalOvertimeWork = totalOvertimeWork . convertUnits ( overtimeRateUnits , getParentFile ( ) . getProjectProperties ( ) ) ; } } double costPerDay = ( standardWorkPerDay . getDuration ( ) * standardRateValue ) + ( overtimeWorkPerDay . getDuration ( ) * overtimeRateValue ) ; double totalCost = ( totalStandardWork . getDuration ( ) * standardRateValue ) + ( totalOvertimeWork . getDuration ( ) * overtimeRateValue ) ; // if the overtime work does not span the same number of days as the work ,
// then we have to split this into two TimephasedCost values
if ( overtimeWork == null || ( overtimeWork . getFinish ( ) . equals ( standardWork . getFinish ( ) ) ) ) { // normal way
TimephasedCost cost = new TimephasedCost ( ) ; cost . setStart ( standardWork . getStart ( ) ) ; cost . setFinish ( standardWork . getFinish ( ) ) ; cost . setModified ( standardWork . getModified ( ) ) ; cost . setAmountPerDay ( Double . valueOf ( costPerDay ) ) ; cost . setTotalAmount ( Double . valueOf ( totalCost ) ) ; result . add ( cost ) ; } else { // prorated way
result . addAll ( splitCostProrated ( getCalendar ( ) , totalCost , costPerDay , standardWork . getStart ( ) ) ) ; } } return result ;
|
public class PathExpression { /** * Replace certain XPath patterns , including some predicates , with substrings that are compatible with regular expressions .
* @ param expression the input regular expressions string ; may not be null
* @ return the regular expression with XPath patterns replaced with regular expression fragments ; never null */
protected String replaceXPathPatterns ( String expression ) { } }
|
assert expression != null ; // replace 2 or more sequential ' | ' characters in an OR expression
expression = expression . replaceAll ( "[\\|]{2,}" , "|" ) ; // if there is an empty expression in an OR expression , make the whole segment optional . . .
// ( e . g . , " / a / b / ( c | ) / d " = > " a / b ( / ( c ) ) ? / d "
expression = expression . replaceAll ( "/(\\([^|]+)(\\|){2,}([^)]+\\))" , "(/$1$2$3)?" ) ; expression = expression . replaceAll ( "/\\(\\|+([^)]+)\\)" , "(?:/($1))?" ) ; expression = expression . replaceAll ( "/\\((([^|]+)(\\|[^|]+)*)\\|+\\)" , "(?:/($1))?" ) ; // / / Allow any path ( that doesn ' t contain an explicit counter ) to contain a counter ,
// / / done by replacing any ' / ' or ' | ' that isn ' t preceded by ' ] ' or ' * ' or ' / ' or ' ( ' with ' ( \ [ \ d + \ ] ) ? / ' . . .
// input = input . replaceAll ( " ( ? < = [ ^ \ \ ] \ \ * / ( ] ) ( [ / | ] ) " , " ( ? : \ \ \ \ [ \ \ \ \ d + \ \ \ \ ] ) ? $ 1 " ) ;
// Does the path contain any ' [ ] ' or ' [ * ] ' or ' [ 0 ] ' or ' [ n ] ' ( where n is any positive integers ) . . .
expression = expression . replaceAll ( "\\[\\]" , "(?:\\\\[\\\\d+\\\\])?" ) ; // index is optional
expression = expression . replaceAll ( "\\[[*]\\]" , "(?:\\\\[\\\\d+\\\\])?" ) ; // index is optional
// ' [ 0 ] / ' = > ' ( \ [ 0 \ ] ) ? / '
expression = expression . replaceAll ( "\\[0\\]" , "(?:\\\\[0\\\\])?" ) ; // index is optional
// ' [ n ] / ' = > ' \ [ n \ ] / '
expression = expression . replaceAll ( "\\[([1-9]\\d*)\\]" , "\\\\[$1\\\\]" ) ; // index is required
// Change any other end predicates to not be wrapped by braces but to begin with a slash . . .
// . . . ' [ x ] ' = > . . . ' / x '
expression = expression . replaceAll ( "(?<!\\\\)\\[([^\\]]*)\\]$" , "/$1" ) ; // Replace all ' [ n , m , o , p ] ' type sequences with ' [ ( n | m | o | p ) ] '
java . util . regex . Matcher matcher = SEQUENCE_PATTERN . matcher ( expression ) ; // CHECKSTYLE IGNORE check FOR NEXT 1 LINES
StringBuffer sb = new StringBuffer ( ) ; boolean result = matcher . find ( ) ; if ( result ) { do { String sequenceStr = matcher . group ( 1 ) ; boolean optional = false ; if ( sequenceStr . startsWith ( "0," ) ) { sequenceStr = sequenceStr . replaceFirst ( "^0," , "" ) ; optional = true ; } if ( sequenceStr . endsWith ( ",0" ) ) { sequenceStr = sequenceStr . replaceFirst ( ",0$" , "" ) ; optional = true ; } if ( sequenceStr . contains ( ",0," ) ) { sequenceStr = sequenceStr . replaceAll ( ",0," , "," ) ; optional = true ; } sequenceStr = sequenceStr . replaceAll ( "," , "|" ) ; String replacement = "\\\\[(?:" + sequenceStr + ")\\\\]" ; if ( optional ) { replacement = "(?:" + replacement + ")?" ; } matcher . appendReplacement ( sb , replacement ) ; result = matcher . find ( ) ; } while ( result ) ; matcher . appendTail ( sb ) ; expression = sb . toString ( ) ; } // Order is important here
expression = expression . replaceAll ( "[*]([^/(\\\\])" , "[^/]*$1" ) ; // ' * ' not followed by ' / ' , ' \ \ ' , or ' ( '
expression = expression . replaceAll ( "(?<!\\[\\^/\\])[*]" , "[^/]*" ) ; // ' * ' not preceded by ' [ ^ / ] '
expression = expression . replaceAll ( "[/]{2,}$" , "(?:/[^/]*)*" ) ; // ending ' / / '
expression = expression . replaceAll ( "[/]{2,}" , "(?:/[^/]*)*/" ) ; // other ' / / '
return expression ;
|
public class ElasticSearchRestDAOV5 { /** * Performs an index operation with a retry .
* @ param request The index request that we want to perform .
* @ param operationDescription The type of operation that we are performing . */
private void indexWithRetry ( final IndexRequest request , final String operationDescription ) { } }
|
try { new RetryUtil < IndexResponse > ( ) . retryOnException ( ( ) -> { try { return elasticSearchClient . index ( request ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } , null , null , RETRY_COUNT , operationDescription , "indexWithRetry" ) ; } catch ( Exception e ) { Monitors . error ( className , "index" ) ; logger . error ( "Failed to index {} for request type: {}" , request . id ( ) , request . type ( ) , e ) ; }
|
public class ImageLUTIDImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setLUTID ( Integer newLUTID ) { } }
|
Integer oldLUTID = lutid ; lutid = newLUTID ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IMAGE_LUTID__LUTID , oldLUTID , lutid ) ) ;
|
public class CsvResultSetWriter { /** * { @ inheritDoc } */
public void write ( final ResultSet resultSet ) throws SQLException , IOException { } }
|
if ( resultSet == null ) { throw new NullPointerException ( "ResultSet cannot be null" ) ; } writeHeaders ( resultSet ) ; // increments row and line number
writeContents ( resultSet ) ; // increments row and line number before writing of each row
|
public class ZealotKhala { /** * 生成带 " OR " 前缀的between区间查询的SQL片段 ( 当某一个值为null时 , 会是大于等于或小于等于的情形 ) .
* @ param field 数据库字段
* @ param startValue 开始值
* @ param endValue 结束值
* @ return ZealotKhala实例 */
public ZealotKhala orBetween ( String field , Object startValue , Object endValue ) { } }
|
return this . doBetween ( ZealotConst . OR_PREFIX , field , startValue , endValue , true ) ;
|
public class ProcessorConfigurationUtils { /** * Wraps an implementation of { @ link IPreProcessor } into an object that adds some information
* required internally ( like e . g . the dialect this processor was registered for ) .
* This method is meant for < strong > internal < / strong > use only .
* @ param preProcessor the pre - processor to be wrapped .
* @ param dialect the dialect this pre - processor was configured for .
* @ return the wrapped pre - processor . */
public static IPreProcessor wrap ( final IPreProcessor preProcessor , final IProcessorDialect dialect ) { } }
|
Validate . notNull ( dialect , "Dialect cannot be null" ) ; if ( preProcessor == null ) { return null ; } return new PreProcessorWrapper ( preProcessor , dialect ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcAxis2Placement3D ( ) { } }
|
if ( ifcAxis2Placement3DEClass == null ) { ifcAxis2Placement3DEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 33 ) ; } return ifcAxis2Placement3DEClass ;
|
public class AbstractSpreadSheetFlinkFileInputFormat { /** * Read truststore for establishing certificate chain for signature validation
* @ param conf
* @ throws IOException
* @ throws FormatNotUnderstoodException */
private void readTrustStore ( ) throws IOException , FormatNotUnderstoodException { } }
|
if ( ( ( this . hocr . getSigTruststoreFile ( ) != null ) && ( ! "" . equals ( this . hocr . getSigTruststoreFile ( ) ) ) ) ) { LOG . info ( "Reading truststore to validate certificate chain for signatures" ) ; FlinkKeyStoreManager fksm = new FlinkKeyStoreManager ( ) ; try { fksm . openKeyStore ( new Path ( this . hocr . getSigTruststoreFile ( ) ) , this . hocr . getSigTruststoreType ( ) , this . hocr . getSigTruststorePassword ( ) ) ; this . hocr . setX509CertificateChain ( fksm . getAllX509Certificates ( ) ) ; } catch ( NoSuchAlgorithmException | CertificateException | KeyStoreException | IllegalArgumentException e ) { LOG . error ( "Exception: " , e ) ; throw new FormatNotUnderstoodException ( "Cannot read truststore to establish certificate chain for signature validation " + e ) ; } }
|
public class OverrideBillableRevenueForReconciliationLineItemReport { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ param reconciliationReportId the ID of the reconciliation report .
* @ param lineItemId the ID of the line item to retrieve .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long reconciliationReportId , long lineItemId ) throws RemoteException { } }
|
// Get the ReconciliationLineItemReportService .
ReconciliationLineItemReportServiceInterface reconciliationLineItemReportService = adManagerServices . get ( session , ReconciliationLineItemReportServiceInterface . class ) ; // Create a statement to select a reconciliation line item report .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "reconciliationReportId = :reconciliationReportId AND lineItemId = :lineItemId" ) . orderBy ( "lineItemId ASC" ) . limit ( 1 ) . withBindVariableValue ( "reconciliationReportId" , reconciliationReportId ) . withBindVariableValue ( "lineItemId" , lineItemId ) ; // Get reconciliation line item reports by statement .
ReconciliationLineItemReportPage page = reconciliationLineItemReportService . getReconciliationLineItemReportsByStatement ( statementBuilder . toStatement ( ) ) ; ReconciliationLineItemReport lineItemReport = Iterables . getOnlyElement ( Arrays . asList ( page . getResults ( ) ) ) ; // Add $ 10 to the computed billable revenue as an override .
Money billableRevenue ; if ( PricingModel . NET . equals ( lineItemReport . getPricingModel ( ) ) ) { billableRevenue = lineItemReport . getNetBillableRevenue ( ) ; } else { billableRevenue = lineItemReport . getGrossBillableRevenue ( ) ; } billableRevenue . setMicroAmount ( billableRevenue . getMicroAmount ( ) + 10000000L ) ; BillableRevenueOverrides billableRevenueOverrides = new BillableRevenueOverrides ( ) ; billableRevenueOverrides . setBillableRevenueOverride ( billableRevenue ) ; lineItemReport . setBillableRevenueOverrides ( billableRevenueOverrides ) ; ReconciliationLineItemReport [ ] updatedLineItemReports = reconciliationLineItemReportService . updateReconciliationLineItemReports ( new ReconciliationLineItemReport [ ] { lineItemReport } ) ; for ( ReconciliationLineItemReport updatedLineItemReport : updatedLineItemReports ) { System . out . printf ( "Reconciliation line item report for line item ID %d was " + "updated, with net billable revenue %.2f and reconciled volume %d.%n" , updatedLineItemReport . getLineItemId ( ) , updatedLineItemReport . getNetBillableRevenue ( ) . getMicroAmount ( ) / 1000000f , updatedLineItemReport . getReconciledVolume ( ) ) ; }
|
public class PoiChecker { /** * 检查POI包的引入情况 */
public static void checkPoiImport ( ) { } }
|
try { Class . forName ( "org.apache.poi.ss.usermodel.Workbook" , false , ClassLoaderUtil . getClassLoader ( ) ) ; } catch ( ClassNotFoundException | NoClassDefFoundError e ) { throw new DependencyException ( e , NO_POI_ERROR_MSG ) ; }
|
public class ListCollectionsResult { /** * Version numbers of the face detection models associated with the collections in the array
* < code > CollectionIds < / code > . For example , the value of < code > FaceModelVersions [ 2 ] < / code > is the version number for
* the face detection model used by the collection in < code > CollectionId [ 2 ] < / code > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFaceModelVersions ( java . util . Collection ) } or { @ link # withFaceModelVersions ( java . util . Collection ) } if
* you want to override the existing values .
* @ param faceModelVersions
* Version numbers of the face detection models associated with the collections in the array
* < code > CollectionIds < / code > . For example , the value of < code > FaceModelVersions [ 2 ] < / code > is the version
* number for the face detection model used by the collection in < code > CollectionId [ 2 ] < / code > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListCollectionsResult withFaceModelVersions ( String ... faceModelVersions ) { } }
|
if ( this . faceModelVersions == null ) { setFaceModelVersions ( new java . util . ArrayList < String > ( faceModelVersions . length ) ) ; } for ( String ele : faceModelVersions ) { this . faceModelVersions . add ( ele ) ; } return this ;
|
public class ApiOvhDedicatedserver { /** * Alter this object properties
* REST : PUT / dedicated / server / { serviceName } / serviceMonitoring / { monitoringId }
* @ param body [ required ] New object properties
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param monitoringId [ required ] This monitoring id */
public void serviceName_serviceMonitoring_monitoringId_PUT ( String serviceName , Long monitoringId , OvhServiceMonitoring body ) throws IOException { } }
|
String qPath = "/dedicated/server/{serviceName}/serviceMonitoring/{monitoringId}" ; StringBuilder sb = path ( qPath , serviceName , monitoringId ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
|
public class TextureLoader { /** * Load a texture with a given format from the supplied input stream
* @ param format The format of the texture to be loaded ( something like " PNG " or " TGA " )
* @ param in The input stream from which the image data will be read
* @ return The newly created texture
* @ throws IOException Indicates a failure to read the image data */
public static Texture getTexture ( String format , InputStream in ) throws IOException { } }
|
return getTexture ( format , in , false , GL11 . GL_LINEAR ) ;
|
public class Lanczos { /** * Massage error bounds for very close ritz values by placing a gap between
* them . The error bounds are then refined to reflect this .
* @ param ritz array to store the ritz values
* @ param bnd array to store the error bounds
* @ param enough stop flag */
private static int error_bound ( boolean [ ] enough , double [ ] ritz , double [ ] bnd , int step , double tol , double eps34 ) { } }
|
double gapl , gap ; // massage error bounds for very close ritz values
int mid = idamax ( step + 1 , bnd , 0 , 1 ) ; for ( int i = ( ( step + 1 ) + ( step - 1 ) ) / 2 ; i >= mid + 1 ; i -= 1 ) { if ( Math . abs ( ritz [ i - 1 ] - ritz [ i ] ) < eps34 * Math . abs ( ritz [ i ] ) ) { if ( bnd [ i ] > tol && bnd [ i - 1 ] > tol ) { bnd [ i - 1 ] = Math . sqrt ( bnd [ i ] * bnd [ i ] + bnd [ i - 1 ] * bnd [ i - 1 ] ) ; bnd [ i ] = 0.0 ; } } } for ( int i = ( ( step + 1 ) - ( step - 1 ) ) / 2 ; i <= mid - 1 ; i += 1 ) { if ( Math . abs ( ritz [ i + 1 ] - ritz [ i ] ) < eps34 * Math . abs ( ritz [ i ] ) ) { if ( bnd [ i ] > tol && bnd [ i + 1 ] > tol ) { bnd [ i + 1 ] = Math . sqrt ( bnd [ i ] * bnd [ i ] + bnd [ i + 1 ] * bnd [ i + 1 ] ) ; bnd [ i ] = 0.0 ; } } } // refine the error bounds
int neig = 0 ; gapl = ritz [ step ] - ritz [ 0 ] ; for ( int i = 0 ; i <= step ; i ++ ) { gap = gapl ; if ( i < step ) { gapl = ritz [ i + 1 ] - ritz [ i ] ; } gap = Math . min ( gap , gapl ) ; if ( gap > bnd [ i ] ) { bnd [ i ] = bnd [ i ] * ( bnd [ i ] / gap ) ; } if ( bnd [ i ] <= 16.0 * Math . EPSILON * Math . abs ( ritz [ i ] ) ) { neig ++ ; if ( ! enough [ 0 ] ) { enough [ 0 ] = - Math . EPSILON < ritz [ i ] && ritz [ i ] < Math . EPSILON ; } } } logger . info ( "Lancozs method found {} converged eigenvalues of the {}-by-{} matrix" , neig , step + 1 , step + 1 ) ; if ( neig != 0 ) { for ( int i = 0 ; i <= step ; i ++ ) { if ( bnd [ i ] <= 16.0 * Math . EPSILON * Math . abs ( ritz [ i ] ) ) { logger . info ( "ritz[{}] = {}" , i , ritz [ i ] ) ; } } } return neig ;
|
public class ToUnknownStream { /** * Pass the call on to the underlying handler
* @ see org . xml . sax . ext . LexicalHandler # startDTD ( String , String , String ) */
public void startDTD ( String name , String publicId , String systemId ) throws SAXException { } }
|
m_handler . startDTD ( name , publicId , systemId ) ;
|
public class InMemoryKsiSignature { /** * This method is used to verify signature consistency . */
private void calculateCalendarHashChainOutput ( ) throws KSIException { } }
|
ChainResult lastRes = null ; for ( AggregationHashChain chain : aggregationChains ) { if ( lastRes == null ) { lastRes = chain . calculateOutputHash ( 0L ) ; } else { lastRes = chain . calculateOutputHash ( lastRes . getLevel ( ) ) ; } LOGGER . debug ( "Output hash of chain: {} is {}" , chain , lastRes . getOutputHash ( ) ) ; }
|
public class PreauthorizationService { /** * Creates Use either a token or an existing payment to Authorizes the given amount with the given token .
* @ param token
* The identifier of a token .
* @ param amount
* Amount ( in cents ) which will be charged .
* @ param currency
* ISO 4217 formatted currency code .
* @ param description
* A short description for the preauthorization .
* @ return { @ link Transaction } object with the { @ link Preauthorization } as sub object . */
public Preauthorization createWithToken ( final String token , final Integer amount , final String currency , final String description ) { } }
|
ValidationUtils . validatesToken ( token ) ; ValidationUtils . validatesAmount ( amount ) ; ValidationUtils . validatesCurrency ( currency ) ; ParameterMap < String , String > params = new ParameterMap < String , String > ( ) ; params . add ( "token" , token ) ; params . add ( "amount" , String . valueOf ( amount ) ) ; params . add ( "currency" , currency ) ; params . add ( "source" , String . format ( "%s-%s" , PaymillContext . getProjectName ( ) , PaymillContext . getProjectVersion ( ) ) ) ; if ( StringUtils . isNotBlank ( description ) ) params . add ( "description" , description ) ; return RestfulUtils . create ( PreauthorizationService . PATH , params , Preauthorization . class , super . httpClient ) ;
|
public class OutboundSSLSelections { /** * Method to check if port numbers match . */
private boolean doesPortMatch ( String connectionObjPort , String remotePort ) { } }
|
boolean match = false ; if ( remotePort . equalsIgnoreCase ( connectionObjPort ) ) match = true ; return match ;
|
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the last cp definition option value rel in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition option value rel
* @ throws NoSuchCPDefinitionOptionValueRelException if a matching cp definition option value rel could not be found */
@ Override public CPDefinitionOptionValueRel findByGroupId_Last ( long groupId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) throws NoSuchCPDefinitionOptionValueRelException { } }
|
CPDefinitionOptionValueRel cpDefinitionOptionValueRel = fetchByGroupId_Last ( groupId , orderByComparator ) ; if ( cpDefinitionOptionValueRel != null ) { return cpDefinitionOptionValueRel ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionOptionValueRelException ( msg . toString ( ) ) ;
|
public class ModelModifier { /** * Converts { @ link Type } to { @ link JavaType } .
* @ param type object to convert
* @ return object converted to { @ link JavaType } */
private JavaType toJavaType ( Type type ) { } }
|
JavaType typeToFind ; if ( type instanceof JavaType ) { typeToFind = ( JavaType ) type ; } else { typeToFind = _mapper . constructType ( type ) ; } return typeToFind ;
|
public class BackupRuleMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BackupRule backupRule , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( backupRule == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( backupRule . getRuleName ( ) , RULENAME_BINDING ) ; protocolMarshaller . marshall ( backupRule . getTargetBackupVaultName ( ) , TARGETBACKUPVAULTNAME_BINDING ) ; protocolMarshaller . marshall ( backupRule . getScheduleExpression ( ) , SCHEDULEEXPRESSION_BINDING ) ; protocolMarshaller . marshall ( backupRule . getStartWindowMinutes ( ) , STARTWINDOWMINUTES_BINDING ) ; protocolMarshaller . marshall ( backupRule . getCompletionWindowMinutes ( ) , COMPLETIONWINDOWMINUTES_BINDING ) ; protocolMarshaller . marshall ( backupRule . getLifecycle ( ) , LIFECYCLE_BINDING ) ; protocolMarshaller . marshall ( backupRule . getRecoveryPointTags ( ) , RECOVERYPOINTTAGS_BINDING ) ; protocolMarshaller . marshall ( backupRule . getRuleId ( ) , RULEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.