signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class InteropFramework { /** * Determines whether this format received as argument is an output format .
* @ param format a { @ link ProvFormat }
* @ return true if format is an output format */
public Boolean isOutputFormat ( ProvFormat format ) { } } | ProvFormatType t = provTypeMap . get ( format ) ; return ( t . equals ( ProvFormatType . OUTPUT ) || t . equals ( ProvFormatType . INPUTOUTPUT ) ) ; |
public class HostName { /** * Returns whether this represents a valid host name or address format .
* @ return */
public boolean isValid ( ) { } } | if ( parsedHost != null ) { return true ; } if ( validationException != null ) { return false ; } try { validate ( ) ; return true ; } catch ( HostNameException e ) { return false ; } |
public class DOMConfigurator { /** * Used internally to parse a layout element . */
protected Layout parseLayout ( Element layout_element ) { } } | String className = subst ( layout_element . getAttribute ( CLASS_ATTR ) ) ; LogLog . debug ( "Parsing layout of class: \"" + className + "\"" ) ; try { Object instance = Loader . loadClass ( className ) . newInstance ( ) ; Layout layout = ( Layout ) instance ; PropertySetter propSetter = new PropertySetter ( layout ) ; NodeList params = layout_element . getChildNodes ( ) ; final int length = params . getLength ( ) ; for ( int loop = 0 ; loop < length ; loop ++ ) { Node currentNode = ( Node ) params . item ( loop ) ; if ( currentNode . getNodeType ( ) == Node . ELEMENT_NODE ) { Element currentElement = ( Element ) currentNode ; String tagName = currentElement . getTagName ( ) ; if ( tagName . equals ( PARAM_TAG ) ) { setParameter ( currentElement , propSetter ) ; } else { parseUnrecognizedElement ( instance , currentElement , props ) ; } } } propSetter . activate ( ) ; return layout ; } catch ( Exception oops ) { LogLog . error ( "Could not create the Layout. Reported error follows." , oops ) ; return null ; } |
public class CmsSubscriptionManager { /** * Returns the subscribed history resources that were deleted . < p >
* @ param cms the current users context
* @ param user the user that subscribed to the resource
* @ param includeGroups indicates if the users groups should also be checked for subscribed deleted resources
* @ param folderPath the folder path of the deleted resources , if < code > null < / code > all deleted resources will be returned
* @ param includeSubFolders indicates if the sub folders of the specified folder path should be considered , too
* @ param deletedFrom the time stamp from which the resources should have been deleted
* @ return the subscribed history resources that were deleted
* @ throws CmsException if something goes wrong */
public List < I_CmsHistoryResource > readSubscribedDeletedResources ( CmsObject cms , CmsUser user , boolean includeGroups , String folderPath , boolean includeSubFolders , long deletedFrom ) throws CmsException { } } | List < CmsGroup > groups = null ; if ( includeGroups ) { try { groups = cms . getGroupsOfUser ( user . getName ( ) , false ) ; } catch ( CmsException e ) { // failed to set user groups
} } CmsResource resource = null ; if ( CmsStringUtil . isNotEmpty ( folderPath ) ) { resource = cms . readResource ( folderPath , CmsResourceFilter . ALL ) ; } return m_securityManager . readSubscribedDeletedResources ( cms . getRequestContext ( ) , getPoolName ( ) , user , groups , resource , includeSubFolders , deletedFrom ) ; |
public class SslPolicyClient { /** * Lists all features that can be specified in the SSL policy when using custom profile .
* < p > Sample code :
* < pre > < code >
* try ( SslPolicyClient sslPolicyClient = SslPolicyClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* SslPoliciesListAvailableFeaturesResponse response = sslPolicyClient . listAvailableFeaturesSslPolicies ( project ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final SslPoliciesListAvailableFeaturesResponse listAvailableFeaturesSslPolicies ( ProjectName project ) { } } | ListAvailableFeaturesSslPoliciesHttpRequest request = ListAvailableFeaturesSslPoliciesHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return listAvailableFeaturesSslPolicies ( request ) ; |
public class Environment { /** * Environment variable key - value pairs .
* @ return Environment variable key - value pairs . */
public java . util . Map < String , String > getVariables ( ) { } } | if ( variables == null ) { variables = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return variables ; |
public class CharsetToolkit { /** * Retrieves all the available < code > Charset < / code > s on the platform ,
* among which the default < code > charset < / code > .
* @ return an array of < code > Charset < / code > s . */
public static Charset [ ] getAvailableCharsets ( ) { } } | Collection collection = Charset . availableCharsets ( ) . values ( ) ; return ( Charset [ ] ) collection . toArray ( EMPTY_CHARSET_ARRAY ) ; |
import java . util . ArrayList ; import java . util . stream . Collectors ; class CountUppercaseChars { /** * This function sums up the total number of characters in a list of names ,
* excluding names that start with a lowercase letter .
* Examples :
* - > count _ uppercase _ chars ( [ ' sally ' , ' Dylan ' , ' rebecca ' , ' Diana ' , ' Joanne ' , ' keith ' ] ) returns 16
* - > count _ uppercase _ chars ( [ ' php ' , ' res ' , ' Python ' , ' abcd ' , ' Java ' , ' aaa ' ] ) returns 10
* - > count _ uppercase _ chars ( [ ' abcd ' , ' Python ' , ' abba ' , ' aba ' ] ) returns 6
* @ param namesList : A list of strings .
* @ return : Sum of the length of names starting with an uppercase letter . */
public static Integer countUppercaseChars ( ArrayList < String > namesList ) { } } | namesList = namesList . stream ( ) . filter ( name -> Character . isUpperCase ( name . charAt ( 0 ) ) ) . collect ( Collectors . toCollection ( ArrayList :: new ) ) ; return namesList . stream ( ) . mapToInt ( String :: length ) . sum ( ) ; |
public class CertificateLoader { /** * Build TrustManagerFactory .
* @ param trustStoreStream Truststore input stream
* @ param storeProperties store properties
* @ return TrustManagerFactory
* @ throws IOException
* @ throws GeneralSecurityException */
private static TrustManagerFactory getTrustManagerFactory ( InputStream trustStoreStream , StoreProperties storeProperties ) throws IOException , GeneralSecurityException { } } | // use provider if given , otherwise use the first matching security provider
final KeyStore ks ; if ( StringUtils . isNotBlank ( storeProperties . getProvider ( ) ) ) { ks = KeyStore . getInstance ( storeProperties . getType ( ) , storeProperties . getProvider ( ) ) ; } else { ks = KeyStore . getInstance ( storeProperties . getType ( ) ) ; } ks . load ( trustStoreStream , storeProperties . getPassword ( ) . toCharArray ( ) ) ; TrustManagerFactory tmf = TrustManagerFactory . getInstance ( storeProperties . getManagerType ( ) ) ; tmf . init ( ks ) ; return tmf ; |
public class MultipartStream { /** * Reads a byte from the < code > buffer < / code > , and refills it as
* necessary .
* @ return The next byte from the input stream .
* @ throws IOException if there is no more data available . */
public byte readByte ( ) throws IOException { } } | // Buffer depleted ?
if ( head == tail ) { head = 0 ; // Refill .
tail = input . read ( buffer , head , bufSize ) ; if ( tail == - 1 ) { // No more data available .
throw new IOException ( "No more data is available" ) ; } if ( notifier != null ) { notifier . noteBytesRead ( tail ) ; } } return buffer [ head ++ ] ; |
public class SpringApplicationContextRegistry { /** * 调用Spring工具类获取bean
* @ param type 类类型
* @ return 容器托管的bean字典 */
public < T > Map < String , T > findByTypeWithName ( Class < T > type ) { } } | return BeanFactoryUtils . beansOfTypeIncludingAncestors ( applicationContext , type ) ; |
public class Tile { /** * Defines if the smoothing property should be enabled / disabled .
* At the moment this is only used in the SparkLineTileSkin and
* RadarChartTileSkin .
* @ param SMOOTHING */
public void setSmoothing ( final boolean SMOOTHING ) { } } | if ( null == smoothing ) { _smoothing = SMOOTHING ; fireTileEvent ( REDRAW_EVENT ) ; } else { smoothing . set ( SMOOTHING ) ; } |
public class SQLOperation { /** * If there are query specific settings to overlay , then create a copy of config
* There are two cases we need to clone the session config that ' s being passed to hive driver
* 1 . Async query -
* If the client changes a config setting , that shouldn ' t reflect in the execution already underway
* 2 . confOverlay -
* The query specific settings should only be applied to the query config and not session
* @ return new configuration
* @ throws HiveSQLException */
private HiveConf getConfigForOperation ( ) throws HiveSQLException { } } | HiveConf sqlOperationConf = getParentSession ( ) . getHiveConf ( ) ; if ( ! getConfOverlay ( ) . isEmpty ( ) || shouldRunAsync ( ) ) { // clone the parent session config for this query
sqlOperationConf = new HiveConf ( sqlOperationConf ) ; // apply overlay query specific settings , if any
for ( Map . Entry < String , String > confEntry : getConfOverlay ( ) . entrySet ( ) ) { try { sqlOperationConf . verifyAndSet ( confEntry . getKey ( ) , confEntry . getValue ( ) ) ; } catch ( IllegalArgumentException e ) { throw new HiveSQLException ( "Error applying statement specific settings" , e ) ; } } } return sqlOperationConf ; |
public class DeleteChannelRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteChannelRequest deleteChannelRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteChannelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteChannelRequest . getId ( ) , ID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Shell { /** * Evaluate JavaScript source .
* @ param cx the current context
* @ param filename the name of the file to compile , or null
* for interactive mode . */
private void processSource ( Context cx , String filename ) { } } | if ( filename == null ) { BufferedReader in = new BufferedReader ( new InputStreamReader ( System . in ) ) ; String sourceName = "<stdin>" ; int lineno = 1 ; boolean hitEOF = false ; do { int startline = lineno ; System . err . print ( "js> " ) ; System . err . flush ( ) ; try { String source = "" ; // Collect lines of source to compile .
while ( true ) { String newline ; newline = in . readLine ( ) ; if ( newline == null ) { hitEOF = true ; break ; } source = source + newline + "\n" ; lineno ++ ; // Continue collecting as long as more lines
// are needed to complete the current
// statement . stringIsCompilableUnit is also
// true if the source statement will result in
// any error other than one that might be
// resolved by appending more source .
if ( cx . stringIsCompilableUnit ( source ) ) break ; } Object result = cx . evaluateString ( this , source , sourceName , startline , null ) ; if ( result != Context . getUndefinedValue ( ) ) { System . err . println ( Context . toString ( result ) ) ; } } catch ( WrappedException we ) { // Some form of exception was caught by JavaScript and
// propagated up .
System . err . println ( we . getWrappedException ( ) . toString ( ) ) ; we . printStackTrace ( ) ; } catch ( EvaluatorException ee ) { // Some form of JavaScript error .
System . err . println ( "js: " + ee . getMessage ( ) ) ; } catch ( JavaScriptException jse ) { // Some form of JavaScript error .
System . err . println ( "js: " + jse . getMessage ( ) ) ; } catch ( IOException ioe ) { System . err . println ( ioe . toString ( ) ) ; } if ( quitting ) { // The user executed the quit ( ) function .
break ; } } while ( ! hitEOF ) ; System . err . println ( ) ; } else { FileReader in = null ; try { in = new FileReader ( filename ) ; } catch ( FileNotFoundException ex ) { Context . reportError ( "Couldn't open file \"" + filename + "\"." ) ; return ; } try { // Here we evalute the entire contents of the file as
// a script . Text is printed only if the print ( ) function
// is called .
cx . evaluateReader ( this , in , filename , 1 , null ) ; } catch ( WrappedException we ) { System . err . println ( we . getWrappedException ( ) . toString ( ) ) ; we . printStackTrace ( ) ; } catch ( EvaluatorException ee ) { System . err . println ( "js: " + ee . getMessage ( ) ) ; } catch ( JavaScriptException jse ) { System . err . println ( "js: " + jse . getMessage ( ) ) ; } catch ( IOException ioe ) { System . err . println ( ioe . toString ( ) ) ; } finally { try { in . close ( ) ; } catch ( IOException ioe ) { System . err . println ( ioe . toString ( ) ) ; } } } |
public class PositionAlignmentOptions { /** * Set the horizontalAlignment property with an offset . One of LEFT , CENTER , RIGHT .
* @ param horizontalAlignment
* @ param offsetLeft
* @ return the instance */
public PositionAlignmentOptions setHorizontalAlignment ( PositionRelation horizontalAlignment , int offsetLeft ) { } } | switch ( horizontalAlignment ) { case LEFT : case CENTER : case RIGHT : break ; default : throw new IllegalArgumentException ( "Illegal value for the horizontal alignment property" ) ; } this . horizontalAlignment = horizontalAlignment ; this . offsetLeft = offsetLeft ; return this ; |
public class ContentStoreServiceImpl { /** * { @ inheritDoc } */
@ Override public Content getContent ( Context context , String url ) throws InvalidScopeException , PathNotFoundException , StoreException { } } | return getContent ( context , null , url ) ; |
public class ForcedErrorResponse { /** * Returns the error response status that should be returned for the current
* response . If the value is zero , then the normal response should be returned
* and this method should be called again for the next response . If the value
* is less than 0 , then this error response status object is spent and may be
* discarded .
* @ return the error response status */
public int getStatus ( ) { } } | int result = status ; if ( status > 0 ) { if ( skip . getAndDecrement ( ) <= 0 ) { if ( count . getAndDecrement ( ) <= 0 ) { result = status = - 1 ; } } else { result = 0 ; } } return result ; |
public class AbstractOperationContext { /** * Perform the work of processing the various OperationContext . Stage queues , and then the DONE stage . */
private void processStages ( ) { } } | // Locate the next step to execute .
ModelNode primaryResponse = null ; Step step ; do { step = steps . get ( currentStage ) . pollFirst ( ) ; if ( step == null ) { if ( currentStage == Stage . MODEL && addModelValidationSteps ( ) ) { continue ; } // No steps remain in this stage ; give subclasses a chance to check status
// and approve moving to the next stage
if ( ! tryStageCompleted ( currentStage ) ) { // Can ' t continue
resultAction = ResultAction . ROLLBACK ; executeResultHandlerPhase ( null ) ; return ; } // Proceed to the next stage
if ( currentStage . hasNext ( ) ) { currentStage = currentStage . next ( ) ; if ( currentStage == Stage . VERIFY ) { // a change was made to the runtime . Thus , we must wait
// for stability before resuming in to verify .
try { awaitServiceContainerStability ( ) ; } catch ( InterruptedException e ) { cancelled = true ; handleContainerStabilityFailure ( primaryResponse , e ) ; executeResultHandlerPhase ( null ) ; return ; } catch ( TimeoutException te ) { // The service container is in an unknown state ; but we don ' t require restart
// because rollback may allow the container to stabilize . We force require - restart
// in the rollback handling if the container cannot stabilize ( see OperationContextImpl . releaseStepLocks )
// processState . setRestartRequired ( ) ; / / don ' t use our restartRequired ( ) method as this is not reversible in rollback
handleContainerStabilityFailure ( primaryResponse , te ) ; executeResultHandlerPhase ( null ) ; return ; } } } } else { // The response to the first step is what goes to the outside caller
if ( primaryResponse == null ) { primaryResponse = step . response ; } // Execute the step , but make sure we always finalize any steps
Throwable toThrow = null ; // Whether to return after try / finally
boolean exit = false ; try { CapabilityRegistry . RuntimeStatus stepStatus = getStepExecutionStatus ( step ) ; if ( stepStatus == RuntimeCapabilityRegistry . RuntimeStatus . NORMAL ) { executeStep ( step ) ; } else { String header = stepStatus == RuntimeCapabilityRegistry . RuntimeStatus . RESTART_REQUIRED ? OPERATION_REQUIRES_RESTART : OPERATION_REQUIRES_RELOAD ; step . response . get ( RESPONSE_HEADERS , header ) . set ( true ) ; } } catch ( RuntimeException | Error re ) { resultAction = ResultAction . ROLLBACK ; toThrow = re ; } finally { // See if executeStep put us in a state where we shouldn ' t do any more
if ( toThrow != null || ! canContinueProcessing ( ) ) { // We ' re done .
executeResultHandlerPhase ( toThrow ) ; exit = true ; // we ' re on the return path
} } if ( exit ) { return ; } } } while ( currentStage != Stage . DONE ) ; assert primaryResponse != null ; // else ModelControllerImpl executed an op with no steps
// All steps ran and canContinueProcessing returned true for the last one , so . . .
executeDoneStage ( primaryResponse ) ; |
public class ChatApi { /** * Consult with another agent via a queue
* Consult with another agent during a chat by sending an consult invitation to the specified queue . A consult occurs in the context of the specified chat , but the customer is not aware of the consulting agent . Messages and notifications from the consulting agent are only visible to other agents in the cat , not to the customer . After the consulting agent accepts the consultation , the originating agent can either transfer the chat to the consulting agent ( [ / media / { mediatype } / interactions / { id } / transfer - agent ] ( / reference / workspace / Media / index . html # transferAgent ) ) , add them in a conference ( [ / media / chat / interactions / { id } / invite ] ( / reference / workspace / Media / index . html # invite ) ) or the consulting agent can leave the chat ( [ / media / chat / interactions / { id } / leave ] ( / reference / workspace / Media / index . html # leaveChat ) ) .
* @ param id The ID of the chat interaction . ( required )
* @ param consultData ( required )
* @ return ApiResponse & lt ; ApiSuccessResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < ApiSuccessResponse > consultByQueueWithHttpInfo ( String id , ConsultData1 consultData ) throws ApiException { } } | com . squareup . okhttp . Call call = consultByQueueValidateBeforeCall ( id , consultData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class MessageVersion { /** * Set up the screen input fields . */
public void setupFields ( ) { } } | FieldInfo field = null ; field = new FieldInfo ( this , ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , LAST_CHANGED , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Date . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DELETED , 10 , null , new Boolean ( false ) ) ; field . setDataClass ( Boolean . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DESCRIPTION , 30 , null , null ) ; field = new FieldInfo ( this , CODE , 20 , null , null ) ; field = new FieldInfo ( this , NAMESPACE , 128 , null , null ) ; field = new FieldInfo ( this , SCHEMA_LOCATION , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field = new FieldInfo ( this , NUMERIC_VERSION , 20 , null , null ) ; field = new FieldInfo ( this , VERSION_ID , 20 , null , null ) ; field = new FieldInfo ( this , PROPERTIES , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; |
public class JsHdrsImpl { /** * Set the unique StreamId used by the flush protocol to determine
* whether a stream is active or flushed .
* Javadoc description supplied by CommonMessageHeaders interface . */
public final void setGuaranteedStreamUUID ( SIBUuid12 value ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setGuaranteedStreamUUID" , value ) ; if ( value != null ) getHdr2 ( ) . setField ( JsHdr2Access . GUARANTEED_SET_STREAMUUID , value . toByteArray ( ) ) ; else getHdr2 ( ) . setField ( JsHdr2Access . GUARANTEED_SET_STREAMUUID , null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setGuaranteedStreamUUID" ) ; |
public class CmsAfterPublishStaticExportHandler { /** * Returns all resources within the current OpenCms site that are not marked as internal . < p >
* The result list contains objects of type { @ link CmsPublishedResource } . < p >
* @ param cms the cms context
* @ return all resources within the current OpenCms site that are not marked as internal
* @ throws CmsException if something goes wrong */
public List < CmsPublishedResource > getAllResources ( CmsObject cms ) throws CmsException { } } | if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_GET_ALL_RESOURCES_0 ) ) ; } // TODO : to improve performance , get here only the resources to render from the configuration
// read all from the root path , exclude resources flagged as internal
List < CmsResource > vfsResources = cms . readResources ( "/" , CmsResourceFilter . ALL . addExcludeFlags ( CmsResource . FLAG_INTERNAL ) ) ; CmsExportFolderMatcher matcher = OpenCms . getStaticExportManager ( ) . getExportFolderMatcher ( ) ; // loop through the list and create the list of CmsPublishedResources
List < CmsPublishedResource > resources = new ArrayList < CmsPublishedResource > ( vfsResources . size ( ) ) ; Iterator < CmsResource > i = vfsResources . iterator ( ) ; while ( i . hasNext ( ) ) { CmsResource resource = i . next ( ) ; if ( ! matcher . match ( resource . getRootPath ( ) ) ) { // filter files that do not match the resources to render
continue ; } CmsPublishedResource pubRes = new CmsPublishedResource ( resource ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_PROCESSING_1 , resource . getRootPath ( ) ) ) ; } resources . add ( pubRes ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_NUM_RESOURCES_1 , new Integer ( resources . size ( ) ) ) ) ; } return resources ; |
public class Strman { /** * Converts the first character of string to upper case .
* @ param input The string to convert .
* @ return Returns the converted string . */
public static String upperFirst ( String input ) { } } | if ( input == null ) { throw new IllegalArgumentException ( "input can't be null" ) ; } return head ( input ) . map ( String :: toUpperCase ) . map ( h -> tail ( input ) . map ( t -> h + t ) . orElse ( h ) ) . get ( ) ; |
public class JmsJcaActivationSpecImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . api . jmsra . JmsJcaActivationSpec # setDesinationType ( java . lang . String ) */
@ Override public void setDestinationType ( final String destinationType ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isDebugEnabled ( ) ) { SibTr . debug ( this , TRACE , "setDestinationType" , destinationType ) ; } _destinationType = destinationType ; dynamicallyCreateDestination ( ) ; |
public class JobsInner { /** * Get Job .
* Gets a Job .
* @ param resourceGroupName The name of the resource group within the Azure subscription .
* @ param accountName The Media Services account name .
* @ param transformName The Transform name .
* @ param jobName The Job name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ApiErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the JobInner object if successful . */
public JobInner get ( String resourceGroupName , String accountName , String transformName , String jobName ) { } } | return getWithServiceResponseAsync ( resourceGroupName , accountName , transformName , jobName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class CommerceUserSegmentEntryUtil { /** * Returns the first commerce user segment entry in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce user segment entry , or < code > null < / code > if a matching commerce user segment entry could not be found */
public static CommerceUserSegmentEntry fetchByGroupId_First ( long groupId , OrderByComparator < CommerceUserSegmentEntry > orderByComparator ) { } } | return getPersistence ( ) . fetchByGroupId_First ( groupId , orderByComparator ) ; |
public class SearchProvisionedProductsResult { /** * Information about the provisioned products .
* @ param provisionedProducts
* Information about the provisioned products . */
public void setProvisionedProducts ( java . util . Collection < ProvisionedProductAttribute > provisionedProducts ) { } } | if ( provisionedProducts == null ) { this . provisionedProducts = null ; return ; } this . provisionedProducts = new java . util . ArrayList < ProvisionedProductAttribute > ( provisionedProducts ) ; |
public class ICUService { /** * Convenience override of getDisplayNames ( ULocale , Comparator , String ) that
* uses null for the matchID , thus returning all display names . */
public SortedMap < String , String > getDisplayNames ( ULocale locale , Comparator < Object > com ) { } } | return getDisplayNames ( locale , com , null ) ; |
public class ObjectArrayList { /** * Returns an array containing all of the elements in the receiver in the
* correct order . The runtime type of the returned array is that of the
* specified array . If the receiver fits in the specified array , it is
* returned therein . Otherwise , a new array is allocated with the runtime
* type of the specified array and the size of the receiver .
* If the receiver fits in the specified array with room to spare
* ( i . e . , the array has more elements than the receiver ) ,
* the element in the array immediately following the end of the
* receiver is set to null . This is useful in determining the length
* of the receiver < em > only < / em > if the caller knows that the receiver
* does not contain any null elements .
* @ param array the array into which the elements of the receiver are to
* be stored , if it is big enough ; otherwise , a new array of the
* same runtime type is allocated for this purpose .
* @ return an array containing the elements of the receiver .
* @ exception ArrayStoreException the runtime type of < tt > array < / tt > is not a supertype
* of the runtime type of every element in the receiver . */
public Object [ ] toArray ( Object array [ ] ) { } } | if ( array . length < size ) array = ( Object [ ] ) java . lang . reflect . Array . newInstance ( array . getClass ( ) . getComponentType ( ) , size ) ; Object [ ] theElements = elements ; for ( int i = size ; -- i >= 0 ; ) array [ i ] = theElements [ i ] ; if ( array . length > size ) array [ size ] = null ; return array ; |
public class MappingRules { /** * the default rule set for general import an export features */
public static MappingRules getDefaultMappingRules ( ) { } } | return new MappingRules ( MAPPING_NODE_FILTER , MAPPING_EXPORT_FILTER , MAPPING_IMPORT_FILTER , new PropertyFormat ( PropertyFormat . Scope . value , PropertyFormat . Binary . base64 ) , 0 , MappingRules . ChangeRule . update ) ; |
public class DumpResultSqlFilter { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . filter . AbstractSqlFilter # doQuery ( jp . co . future . uroborosql . context . SqlContext , java . sql . PreparedStatement , java . sql . ResultSet ) */
@ Override public ResultSet doQuery ( final SqlContext sqlContext , final PreparedStatement preparedStatement , final ResultSet resultSet ) { } } | try { if ( resultSet . getType ( ) == ResultSet . TYPE_FORWARD_ONLY ) { LOG . warn ( "ResultSet type is TYPE_FORWARD_ONLY. DumpResultSqlFilter use ResultSet#beforeFirst(). Please Set TYPE_SCROLL_INSENSITIVE or TYPE_SCROLL_SENSITIVE." ) ; } StringBuilder builder = displayResult ( resultSet ) ; LOG . info ( builder . toString ( ) ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } return resultSet ; |
public class ListMetricsRequest { /** * The dimensions to filter against .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDimensions ( java . util . Collection ) } or { @ link # withDimensions ( java . util . Collection ) } if you want to
* override the existing values .
* @ param dimensions
* The dimensions to filter against .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListMetricsRequest withDimensions ( DimensionFilter ... dimensions ) { } } | if ( this . dimensions == null ) { setDimensions ( new com . amazonaws . internal . SdkInternalList < DimensionFilter > ( dimensions . length ) ) ; } for ( DimensionFilter ele : dimensions ) { this . dimensions . add ( ele ) ; } return this ; |
public class BureauSender { /** * Issues a notification that will result in a call to { @ link
* BureauReceiver # destroyAgent } on a client . */
public static void destroyAgent ( ClientObject target , int arg1 ) { } } | sendNotification ( target , BureauDecoder . RECEIVER_CODE , BureauDecoder . DESTROY_AGENT , new Object [ ] { Integer . valueOf ( arg1 ) } ) ; |
public class AcceptableUsagePolicyStatus { /** * Gets property or default .
* @ param name the name
* @ param defaultValues the default values
* @ return the property or default */
public Collection < Object > getPropertyOrDefault ( final String name , final Object ... defaultValues ) { } } | if ( this . properties . containsKey ( name ) ) { return this . properties . get ( name ) ; } return Arrays . stream ( defaultValues ) . collect ( Collectors . toList ( ) ) ; |
public class DeviceProxy { public DeviceData command_inout_reply ( int id , int timeout ) throws DevFailed , AsynReplyNotArrived { } } | return deviceProxyDAO . command_inout_reply ( this , id , timeout ) ; |
public class ServiceActionAssociationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ServiceActionAssociation serviceActionAssociation , ProtocolMarshaller protocolMarshaller ) { } } | if ( serviceActionAssociation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serviceActionAssociation . getServiceActionId ( ) , SERVICEACTIONID_BINDING ) ; protocolMarshaller . marshall ( serviceActionAssociation . getProductId ( ) , PRODUCTID_BINDING ) ; protocolMarshaller . marshall ( serviceActionAssociation . getProvisioningArtifactId ( ) , PROVISIONINGARTIFACTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RxUtil { /** * Logs errors and onNext at info level using slf4j { @ link Logger } .
* @ param < T >
* the return generic type
* @ return a logging { @ link Observer } */
public static < T > Observer < ? super T > log ( ) { } } | return new Observer < T > ( ) { @ Override public void onCompleted ( ) { // do nothing
} @ Override public void onError ( Throwable e ) { log . error ( e . getMessage ( ) , e ) ; } @ Override public void onNext ( T t ) { log . info ( t + "" ) ; } } ; |
public class ZipInputStream { /** * Reads local file ( LOC ) header for next entry . */
private ZipEntry readLOC ( ) throws IOException { } } | try { readFully ( tmpbuf , 0 , LOCHDR ) ; } catch ( EOFException e ) { return null ; } if ( get32 ( tmpbuf , 0 ) != LOCSIG ) { return null ; } // get flag first , we need check EFS .
flag = get16 ( tmpbuf , LOCFLG ) ; // get the entry name and create the ZipEntry first
int len = get16 ( tmpbuf , LOCNAM ) ; int blen = b . length ; if ( len > blen ) { do blen = blen * 2 ; while ( len > blen ) ; b = new byte [ blen ] ; } readFully ( b , 0 , len ) ; // Force to use UTF - 8 if the EFS bit is ON , even the cs is NOT UTF - 8
ZipEntry e = createZipEntry ( ( ( flag & EFS ) != 0 ) ? zc . toStringUTF8 ( b , len ) : zc . toString ( b , len ) ) ; // now get the remaining fields for the entry
if ( ( flag & 1 ) == 1 ) { throw new ZipException ( "encrypted ZIP entry not supported" ) ; } e . method = get16 ( tmpbuf , LOCHOW ) ; e . time = get32 ( tmpbuf , LOCTIM ) ; if ( ( flag & 8 ) == 8 ) { /* " Data Descriptor " present */
if ( e . method != DEFLATED ) { throw new ZipException ( "only DEFLATED entries can have EXT descriptor" ) ; } } else { e . crc = get32 ( tmpbuf , LOCCRC ) ; e . csize = get32 ( tmpbuf , LOCSIZ ) ; e . size = get32 ( tmpbuf , LOCLEN ) ; } len = get16 ( tmpbuf , LOCEXT ) ; if ( len > 0 ) { byte [ ] bb = new byte [ len ] ; readFully ( bb , 0 , len ) ; e . setExtra ( bb ) ; // extra fields are in " HeaderID ( 2 ) DataSize ( 2 ) Data . . . format
if ( e . csize == ZIP64_MAGICVAL || e . size == ZIP64_MAGICVAL ) { int off = 0 ; while ( off + 4 < len ) { int sz = get16 ( bb , off + 2 ) ; if ( get16 ( bb , off ) == ZIP64_EXTID ) { off += 4 ; // LOC extra zip64 entry MUST include BOTH original and
// compressed file size fields
if ( sz < 16 || ( off + sz ) > len ) { // Invalid zip64 extra fields , simply skip . Even it ' s
// rare , it ' s possible the entry size happens to be
// the magic value and it " accidnetly " has some bytes
// in extra match the id .
return e ; } e . size = get64 ( bb , off ) ; e . csize = get64 ( bb , off + 8 ) ; break ; } off += ( sz + 4 ) ; } } } return e ; |
public class EventScope { /** * Analyzes the receiver for Methods that satisfy to receive events of type
* < code > eventClass < / code > .
* @ param receiver
* the receiver
* @ param eventClass
* the event class
* @ param namePattern
* Optional . A method name regex pattern to better select the
* handler method .
* @ return */
private Method detectReceiverMethod ( final Object receiver , final Class < ? extends T > eventClass , final Pattern namePattern ) { } } | final Method [ ] methods = receiver . getClass ( ) . getMethods ( ) ; final List < Method > results = new LinkedList < Method > ( ) ; for ( final Method method : methods ) { if ( method . getParameterTypes ( ) == null || method . getParameterTypes ( ) . length != 1 ) { // wrong amount of parameters - > try next
continue ; } if ( namePattern != null && ! namePattern . matcher ( method . getName ( ) ) . find ( ) ) { // wrong method name - try next
continue ; } if ( this . canHandle ( method , eventClass ) ) { // hit !
results . add ( method ) ; } } if ( results . size ( ) == 1 ) { // only one hit - perfect
return results . get ( 0 ) ; } if ( results . size ( ) > 1 ) { throw new IllegalArgumentException ( "receiver '" + receiver + "' has more than one accessible method with a single parameter of type '" + eventClass . getName ( ) + "' " + ( namePattern != null ? " and name pattern '" + namePattern . pattern ( ) + "' " : "" ) + ": " + results . toString ( ) ) ; } // no hit _ > exception
throw new IllegalArgumentException ( "receiver '" + receiver + "' does not have an accessible method with a single parameter of type '" + eventClass . getName ( ) + "' " + ( namePattern != null ? " and name pattern '" + namePattern . pattern ( ) + "' " : "" ) ) ; |
public class InternalCallContextFactory { /** * Create an internal call callcontext
* This is used by notification queue and persistent bus - accountRecordId is expected to be non null
* @ param tenantRecordId tenant record id - if null , the default tenant record id value will be used
* @ param accountRecordId account record id ( can be null in specific use - cases , e . g . config change events in BeatrixListener )
* @ param userName user name
* @ param callOrigin call origin
* @ param userType user type
* @ param userToken user token , if any
* @ return internal call callcontext */
public InternalCallContext createInternalCallContext ( @ Nullable final Long tenantRecordId , @ Nullable final Long accountRecordId , final String userName , final CallOrigin callOrigin , final UserType userType , @ Nullable final UUID userToken ) { } } | return createInternalCallContext ( tenantRecordId , accountRecordId , userName , callOrigin , userType , userToken , null , null , null , null ) ; |
public class ScriptPluginFileCopier { /** * Copy string content */
public String copyScriptContent ( final ExecutionContext executionContext , final String s , final INodeEntry node , final String destination ) throws FileCopierException { } } | return copyFile ( executionContext , null , null , s , node , destination ) ; |
public class API { /** * Returns true if the API call has a valid key
* @ param reqHeader the request header
* @ param params the parameters
* @ return true if the API call has a valid key
* @ since 2.6.0 */
public boolean hasValidKey ( HttpRequestHeader reqHeader , JSONObject params ) { } } | try { String apiPath ; try { apiPath = reqHeader . getURI ( ) . getPath ( ) ; } catch ( URIException e ) { logger . error ( e . getMessage ( ) , e ) ; return false ; } String nonceParam = reqHeader . getHeader ( HttpHeader . X_ZAP_API_NONCE ) ; if ( nonceParam == null && params . has ( API_NONCE_PARAM ) ) { nonceParam = params . getString ( API_NONCE_PARAM ) ; } if ( nonceParam != null ) { Nonce nonce = nonces . get ( nonceParam ) ; if ( nonce == null ) { logger . warn ( "API nonce " + nonceParam + " not found in request from " + reqHeader . getSenderAddress ( ) . getHostAddress ( ) ) ; return false ; } else if ( nonce . isOneTime ( ) ) { nonces . remove ( nonceParam ) ; } if ( ! nonce . isValid ( ) ) { logger . warn ( "API nonce " + nonce . getNonceKey ( ) + " expired at " + nonce . getExpires ( ) . toString ( ) + " in request from " + reqHeader . getSenderAddress ( ) . getHostAddress ( ) ) ; return false ; } if ( ! apiPath . equals ( nonce . getApiPath ( ) ) ) { logger . warn ( "API nonce path was " + nonce . getApiPath ( ) + " but call was for " + apiPath + " in request from " + reqHeader . getSenderAddress ( ) . getHostAddress ( ) ) ; return false ; } } else { String keyParam = reqHeader . getHeader ( HttpHeader . X_ZAP_API_KEY ) ; if ( keyParam == null && params . has ( API_KEY_PARAM ) ) { keyParam = params . getString ( API_KEY_PARAM ) ; } if ( ! getOptionsParamApi ( ) . getKey ( ) . equals ( keyParam ) ) { logger . warn ( "API key incorrect or not supplied: " + keyParam + " in request from " + reqHeader . getSenderAddress ( ) . getHostAddress ( ) ) ; return false ; } } return true ; } finally { synchronized ( nonces ) { for ( Entry < String , Nonce > entry : nonces . entrySet ( ) ) { if ( ! entry . getValue ( ) . isValid ( ) ) { nonces . remove ( entry . getKey ( ) ) ; } } } } |
public class WeldCollections { /** * Returns an immutable view of a given list . If the given list is empty , a shared instance is returned . If the given list
* is an instance of { @ link ArrayList } , it is trimmed . */
public static < T > List < T > immutableListView ( List < T > list ) { } } | if ( list instanceof ImmutableList < ? > ) { return list ; } if ( list instanceof ArrayList < ? > ) { ArrayList . class . cast ( list ) . trimToSize ( ) ; } return Collections . unmodifiableList ( list ) ; |
public class Executable { /** * Opens a PDF document .
* @ param file
* @ param waitForTermination
* @ return a process
* @ throws IOException */
public static final Process openDocument ( File file , boolean waitForTermination ) throws IOException { } } | return openDocument ( file . getAbsolutePath ( ) , waitForTermination ) ; |
public class TreeControlTag { /** * level是当前层 */
protected void render ( JspWriter out , ViewNode node , int level , int width , boolean last ) throws IOException { } } | HttpServletRequest request = ( HttpServletRequest ) pageContext . getRequest ( ) ; String url_Path = request . getContextPath ( ) ; // Debug . logVerbose ( " [ JdonFramework ] node key = " + node . getKey ( ) +
// " level = " + level +
// " width = " + width + " last = " + last , module ) ;
// 如果是根节点或无标签 , 不显示该节点
if ( node . isRoot ( ) ) { // Render the children of this node
ViewNode children [ ] = node . findChildren ( ) ; // Debug . logVerbose ( " [ JdonFramework ] children count = " +
// children . length ) ;
int lastIndex = children . length - 1 ; int newLevel = level + 1 ; for ( int i = 0 ; i < children . length ; i ++ ) { render ( out , children [ i ] , newLevel , width , i == lastIndex ) ; } return ; } // Render the beginning of this node
out . println ( " <tr valign=\"middle\">" ) ; for ( int i = 0 ; i < level ; i ++ ) { int levels = level - i ; ViewNode parent = node ; for ( int j = 1 ; j <= levels ; j ++ ) parent = parent . getParent ( ) ; if ( parent . isLast ( ) ) out . print ( " <td></td>" ) ; else { // 显示竖线
out . print ( " <td><img src=\"" ) ; out . print ( images ) ; out . print ( "/" ) ; out . print ( IMAGE_LINE_VERTICAL ) ; out . print ( "\" alt=\"\" border=\"0\"></td>" ) ; } out . println ( ) ; } displayNodePic ( out , node , url_Path ) ; displayNode ( out , node , url_Path , level , width ) ; // Render the end of this node
out . println ( " </tr>" ) ; // Render the children of this node
if ( node . isExpanded ( ) ) { ViewNode children [ ] = node . findChildren ( ) ; int lastIndex = children . length - 1 ; int newLevel = level + 1 ; for ( int i = 0 ; i < children . length ; i ++ ) { render ( out , children [ i ] , newLevel , width , i == lastIndex ) ; } } |
public class InstanceAssociationStatusInfoMarshaller { /** * Marshall the given parameter object . */
public void marshall ( InstanceAssociationStatusInfo instanceAssociationStatusInfo , ProtocolMarshaller protocolMarshaller ) { } } | if ( instanceAssociationStatusInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceAssociationStatusInfo . getAssociationId ( ) , ASSOCIATIONID_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getDocumentVersion ( ) , DOCUMENTVERSION_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getAssociationVersion ( ) , ASSOCIATIONVERSION_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getExecutionDate ( ) , EXECUTIONDATE_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getDetailedStatus ( ) , DETAILEDSTATUS_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getExecutionSummary ( ) , EXECUTIONSUMMARY_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getErrorCode ( ) , ERRORCODE_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getOutputUrl ( ) , OUTPUTURL_BINDING ) ; protocolMarshaller . marshall ( instanceAssociationStatusInfo . getAssociationName ( ) , ASSOCIATIONNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RegionDiskClient { /** * Resizes the specified regional persistent disk .
* < p > Sample code :
* < pre > < code >
* try ( RegionDiskClient regionDiskClient = RegionDiskClient . create ( ) ) {
* ProjectRegionDiskName disk = ProjectRegionDiskName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ DISK ] " ) ;
* RegionDisksResizeRequest regionDisksResizeRequestResource = RegionDisksResizeRequest . newBuilder ( ) . build ( ) ;
* Operation response = regionDiskClient . resizeRegionDisk ( disk . toString ( ) , regionDisksResizeRequestResource ) ;
* < / code > < / pre >
* @ param disk Name of the regional persistent disk .
* @ param regionDisksResizeRequestResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation resizeRegionDisk ( String disk , RegionDisksResizeRequest regionDisksResizeRequestResource ) { } } | ResizeRegionDiskHttpRequest request = ResizeRegionDiskHttpRequest . newBuilder ( ) . setDisk ( disk ) . setRegionDisksResizeRequestResource ( regionDisksResizeRequestResource ) . build ( ) ; return resizeRegionDisk ( request ) ; |
public class HawkWwwAuthenticateContext { /** * Generate an HMAC from the context ts parameter .
* @ return
* @ throws HawkException */
private String generateHmac ( ) throws HawkException { } } | String baseString = getBaseString ( ) ; Mac mac ; try { mac = Mac . getInstance ( getAlgorithm ( ) . getMacName ( ) ) ; } catch ( NoSuchAlgorithmException e ) { throw new HawkException ( "Unknown algorithm " + getAlgorithm ( ) . getMacName ( ) , e ) ; } SecretKeySpec secret_key = new SecretKeySpec ( getKey ( ) . getBytes ( Charsets . UTF_8 ) , getAlgorithm ( ) . getMacName ( ) ) ; try { mac . init ( secret_key ) ; } catch ( InvalidKeyException e ) { throw new HawkException ( "Key is invalid " , e ) ; } return new String ( Base64 . encodeBase64 ( mac . doFinal ( baseString . getBytes ( Charsets . UTF_8 ) ) ) , Charsets . UTF_8 ) ; |
public class PermutationChromosome { /** * Create a new , random chromosome with the given valid alleles .
* @ param < T > the gene type of the chromosome
* @ param alleles the valid alleles used for this permutation arrays .
* @ return a new chromosome with the given alleles
* @ throws IllegalArgumentException if the given allele sequence is empty . */
public static < T > PermutationChromosome < T > of ( final ISeq < ? extends T > alleles ) { } } | return of ( alleles , alleles . size ( ) ) ; |
public class ActionUtil { /** * Returns the listener associated with the given component and event .
* @ param component The component .
* @ param eventName The event name .
* @ return A DeferredEventListener , or null if not found . */
public static ActionListener getListener ( BaseComponent component , String eventName ) { } } | return ( ActionListener ) component . getAttribute ( ActionListener . getAttrName ( eventName ) ) ; |
public class ContextAnalyzer { /** * Get the last tense used in the sentence
* @ param timex timex construct to discover tense data for
* @ return string that contains the tense */
public static String getLastTense ( Timex3 timex , JCas jcas , Language language ) { } } | RePatternManager rpm = RePatternManager . getInstance ( language , false ) ; String lastTense = "" ; // Get the sentence
FSIterator iterSentence = jcas . getAnnotationIndex ( Sentence . type ) . iterator ( ) ; Sentence s = new Sentence ( jcas ) ; while ( iterSentence . hasNext ( ) ) { s = ( Sentence ) iterSentence . next ( ) ; if ( ( s . getBegin ( ) <= timex . getBegin ( ) ) && ( s . getEnd ( ) >= timex . getEnd ( ) ) ) { break ; } } // Get the tokens
TreeMap < Integer , Token > tmToken = new TreeMap < Integer , Token > ( ) ; FSIterator iterToken = jcas . getAnnotationIndex ( Token . type ) . subiterator ( s ) ; while ( iterToken . hasNext ( ) ) { Token token = ( Token ) iterToken . next ( ) ; tmToken . put ( token . getEnd ( ) , token ) ; } // Get the last VERB token
for ( Integer tokEnd : tmToken . keySet ( ) ) { if ( tokEnd < timex . getBegin ( ) ) { Token token = tmToken . get ( tokEnd ) ; Logger . printDetail ( "GET LAST TENSE: string:" + token . getCoveredText ( ) + " pos:" + token . getPos ( ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4PresentFuture):" + rpm . get ( "tensePos4PresentFuture" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Future):" + rpm . get ( "tensePos4Future" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Past):" + rpm . get ( "tensePos4Past" ) ) ; Logger . printDetail ( "CHECK TOKEN:" + token . getPos ( ) ) ; if ( token . getPos ( ) == null ) { } else if ( ( rpm . containsKey ( "tensePos4PresentFuture" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4PresentFuture" ) ) ) ) { lastTense = "PRESENTFUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } else if ( ( rpm . containsKey ( "tensePos4Past" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Past" ) ) ) ) { lastTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } else if ( ( rpm . containsKey ( "tensePos4Future" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Future" ) ) ) ) { if ( token . getCoveredText ( ) . matches ( rpm . get ( "tenseWord4Future" ) ) ) { lastTense = "FUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } if ( token . getCoveredText ( ) . equals ( "since" ) ) { lastTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } if ( token . getCoveredText ( ) . equals ( "depuis" ) ) { // French
lastTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } if ( lastTense . equals ( "" ) ) { if ( tokEnd > timex . getEnd ( ) ) { Token token = tmToken . get ( tokEnd ) ; Logger . printDetail ( "GET NEXT TENSE: string:" + token . getCoveredText ( ) + " pos:" + token . getPos ( ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4PresentFuture):" + rpm . get ( "tensePos4PresentFuture" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Future):" + rpm . get ( "tensePos4Future" ) ) ; Logger . printDetail ( "hmAllRePattern.containsKey(tensePos4Past):" + rpm . get ( "tensePos4Past" ) ) ; Logger . printDetail ( "CHECK TOKEN:" + token . getPos ( ) ) ; if ( token . getPos ( ) == null ) { } else if ( ( rpm . containsKey ( "tensePos4PresentFuture" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4PresentFuture" ) ) ) ) { lastTense = "PRESENTFUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } else if ( ( rpm . containsKey ( "tensePos4Past" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Past" ) ) ) ) { lastTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } else if ( ( rpm . containsKey ( "tensePos4Future" ) ) && ( token . getPos ( ) . matches ( rpm . get ( "tensePos4Future" ) ) ) ) { if ( token . getCoveredText ( ) . matches ( rpm . get ( "tenseWord4Future" ) ) ) { lastTense = "FUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } } } } // check for double POS Constraints ( not included in the rule language , yet ) TODO
// VHZ VNN and VHZ VNN and VHP VNN and VBP VVN
String prevPos = "" ; String longTense = "" ; if ( lastTense . equals ( "PRESENTFUTURE" ) ) { for ( Integer tokEnd : tmToken . keySet ( ) ) { if ( tokEnd < timex . getBegin ( ) ) { Token token = tmToken . get ( tokEnd ) ; if ( ( "VHZ" . equals ( prevPos ) ) || ( "VBZ" . equals ( prevPos ) ) || ( "VHP" . equals ( prevPos ) ) || ( "VBP" . equals ( prevPos ) ) || ( prevPos . equals ( "VER:pres" ) ) ) { if ( "VVN" . equals ( token . getPos ( ) ) || "VER:pper" . equals ( token . getPos ( ) ) ) { if ( ( ! ( token . getCoveredText ( ) . equals ( "expected" ) ) ) && ( ! ( token . getCoveredText ( ) . equals ( "scheduled" ) ) ) ) { lastTense = "PAST" ; longTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } } prevPos = token . getPos ( ) ; } if ( longTense . equals ( "" ) ) { if ( tokEnd > timex . getEnd ( ) ) { Token token = tmToken . get ( tokEnd ) ; if ( ( "VHZ" . equals ( prevPos ) ) || ( "VBZ" . equals ( prevPos ) ) || ( "VHP" . equals ( prevPos ) ) || ( "VBP" . equals ( prevPos ) ) || ( "VER:pres" . equals ( prevPos ) ) ) { if ( "VVN" . equals ( token . getPos ( ) ) || "VER:pper" . equals ( token . getPos ( ) ) ) { if ( ( ! ( token . getCoveredText ( ) . equals ( "expected" ) ) ) && ( ! ( token . getCoveredText ( ) . equals ( "scheduled" ) ) ) ) { lastTense = "PAST" ; longTense = "PAST" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } } prevPos = token . getPos ( ) ; } } } } // French : VER : pres VER : pper
if ( lastTense . equals ( "PAST" ) ) { for ( Integer tokEnd : tmToken . keySet ( ) ) { if ( tokEnd < timex . getBegin ( ) ) { Token token = tmToken . get ( tokEnd ) ; if ( ( "VER:pres" . equals ( prevPos ) ) && ( "VER:pper" . equals ( token . getPos ( ) ) ) ) { if ( ( ( token . getCoveredText ( ) . matches ( "^prévue?s?$" ) ) ) || ( ( token . getCoveredText ( ) . equals ( "^envisagée?s?$" ) ) ) ) { lastTense = "FUTURE" ; longTense = "FUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } prevPos = token . getPos ( ) ; } if ( longTense . equals ( "" ) ) { if ( tokEnd > timex . getEnd ( ) ) { Token token = tmToken . get ( tokEnd ) ; if ( ( "VER:pres" . equals ( prevPos ) ) && ( "VER:pper" . equals ( token . getPos ( ) ) ) ) { if ( ( ( token . getCoveredText ( ) . matches ( "^prévue?s?$" ) ) ) || ( ( token . getCoveredText ( ) . equals ( "^envisagée?s?$" ) ) ) ) { lastTense = "FUTURE" ; longTense = "FUTURE" ; Logger . printDetail ( "this tense:" + lastTense ) ; } } prevPos = token . getPos ( ) ; } } } } Logger . printDetail ( "TENSE: " + lastTense ) ; return lastTense ; |
public class Ocpp12RequestHandler { /** * { @ inheritDoc } */
@ Override public void handle ( SoftResetChargingStationRequestedEvent event , CorrelationToken correlationToken ) { } } | LOG . info ( "OCPP 1.2 SoftResetChargingStationRequestedEvent" ) ; chargingStationOcpp12Client . softReset ( event . getChargingStationId ( ) ) ; |
public class JCudaDriver { /** * Map graphics resources for access by CUDA .
* < pre >
* CUresult cuGraphicsMapResources (
* unsigned int count ,
* CUgraphicsResource * resources ,
* CUstream hStream )
* < / pre >
* < div >
* < p > Map graphics resources for access by
* CUDA . Maps the < tt > count < / tt > graphics resources in < tt > resources < / tt >
* for access by CUDA .
* < p > The resources in < tt > resources < / tt >
* may be accessed by CUDA until they are unmapped . The graphics API from
* which < tt > resources < / tt > were registered should not access any
* resources while they are mapped by CUDA . If an application does so ,
* the results are
* undefined .
* < p > This function provides the synchronization
* guarantee that any graphics calls issued before cuGraphicsMapResources ( )
* will complete before any subsequent CUDA work issued in < tt > stream < / tt >
* begins .
* < p > If < tt > resources < / tt > includes any
* duplicate entries then CUDA _ ERROR _ INVALID _ HANDLE is returned . If any
* of < tt > resources < / tt > are presently mapped for access by CUDA then
* CUDA _ ERROR _ ALREADY _ MAPPED is returned .
* < div >
* < span > Note : < / span >
* < p > Note that this
* function may also return error codes from previous , asynchronous
* launches .
* < / div >
* < / div >
* @ param count Number of resources to map
* @ param resources Resources to map for CUDA usage
* @ param hStream Stream with which to synchronize
* @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED ,
* CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ HANDLE ,
* CUDA _ ERROR _ ALREADY _ MAPPED , CUDA _ ERROR _ UNKNOWN
* @ see JCudaDriver # cuGraphicsResourceGetMappedPointer
* @ see JCudaDriver # cuGraphicsSubResourceGetMappedArray
* @ see JCudaDriver # cuGraphicsUnmapResources */
public static int cuGraphicsMapResources ( int count , CUgraphicsResource resources [ ] , CUstream hStream ) { } } | return checkResult ( cuGraphicsMapResourcesNative ( count , resources , hStream ) ) ; |
public class PubSubOutputHandler { /** * to be sent downstream */
public List sendValueMessages ( List msgList , long completedPrefix , boolean requestedOnly , int priority , Reliability reliability , SIBUuid12 stream ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendValueMessages" , new Object [ ] { msgList , new Long ( completedPrefix ) , new Boolean ( requestedOnly ) } ) ; // We will send the messages using MPIO
// Work out the cellule pair needed by the
// messageTransmitter
SIBUuid8 [ ] fromTo = new SIBUuid8 [ 1 ] ; fromTo [ 0 ] = _targetMEUuid ; JsMessage jsMsg = null ; TickRange tickRange = null ; MessageItem msgItem = null ; long msgId = - 1 ; List < TickRange > expiredMsgs = null ; for ( int i = 0 ; i < msgList . size ( ) ; i ++ ) { tickRange = ( TickRange ) msgList . get ( i ) ; // Get the messageStore Id from the stream
msgId = tickRange . itemStreamIndex ; // Retrieve the message from the non - persistent ItemStream
msgItem = _destinationHandler . getPubSubRealization ( ) . retrieveMessageFromItemStream ( msgId ) ; // If the item wasn ' t found it has expired
if ( msgItem == null ) { if ( expiredMsgs == null ) expiredMsgs = new ArrayList < TickRange > ( ) ; expiredMsgs . add ( tickRange ) ; // In this case send Silence instead
ControlMessage cMsg = createSilenceMessage ( tickRange . valuestamp , completedPrefix , priority , reliability , stream ) ; ( ( ControlSilence ) cMsg ) . setRequestedOnly ( requestedOnly ) ; // If the destination in a Link add Link specific properties to message
if ( _isLink ) { cMsg = addLinkProps ( cMsg ) ; cMsg . setRoutingDestination ( _routingDestination ) ; } // Send the message to the MessageTransmitter
// Send at priority + 1 if this is a response to a Nack
if ( requestedOnly ) _mpio . sendDownTree ( fromTo , priority + 1 , cMsg ) ; else _mpio . sendDownTree ( fromTo , priority , cMsg ) ; } else { try { // PM34074
// Retrieve a copy of the message from MessageItem
jsMsg = msgItem . getMessage ( ) . getReceived ( ) ; } catch ( MessageCopyFailedException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PubSubOutputHandler.sendValueMessages" , "1:960:1.164.1.5" , this ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendValueMessages" , "SIErrorException: " + e ) ; throw new SIErrorException ( e ) ; } // Modify the streamId if necessary
if ( jsMsg . getGuaranteedStreamUUID ( ) != stream ) { jsMsg . setGuaranteedStreamUUID ( stream ) ; } // Are there Completed ticks after this Value
// If so we need to adjust the message to reflect this
if ( tickRange . endstamp > tickRange . valuestamp ) { jsMsg . setGuaranteedValueEndTick ( tickRange . endstamp ) ; } // Update the completedPrefix to current value
jsMsg . setGuaranteedValueCompletedPrefix ( completedPrefix ) ; // Set the requestedOnly flag
jsMsg . setGuaranteedValueRequestedOnly ( requestedOnly ) ; // If the destination in a Link add Link specific properties to message
if ( _isLink ) { jsMsg = addLinkProps ( jsMsg ) ; jsMsg . setRoutingDestination ( _routingDestination ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && UserTrace . tc_mt . isDebugEnabled ( ) ) { DestinationHandler dest = _messageProcessor . getDestinationManager ( ) . getDestinationInternal ( _destinationHandler . getUuid ( ) , false ) ; if ( dest != null ) UserTrace . traceOutboundSend ( jsMsg , _neighbour . getUUID ( ) , dest . getName ( ) , dest . isForeignBus ( ) || dest . isLink ( ) , dest . isMQLink ( ) , dest . isTemporary ( ) ) ; else UserTrace . traceOutboundSend ( jsMsg , _neighbour . getUUID ( ) , _destinationHandler . getUuid ( ) . toString ( ) . toString ( ) , false , false , false ) ; } // Send the message to the MessageTransmitter
// Send at priority + 1 if this is a response to a Nack
if ( requestedOnly ) _mpio . sendDownTree ( fromTo , priority + 1 , jsMsg ) ; else _mpio . sendDownTree ( fromTo , priority , jsMsg ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendValueMessages" , expiredMsgs ) ; return expiredMsgs ; |
public class SearchPortletController { /** * Display search results */
@ RequestMapping ( params = { } } | "query" , "queryId" } ) public ModelAndView showSearchResults ( PortletRequest request , @ RequestParam ( value = "query" ) String query , @ RequestParam ( value = "queryId" ) String queryId ) { final Map < String , Object > model = new HashMap < > ( ) ; model . put ( "query" , query ) ; // Determine if the new REST search should be used
if ( isRestSearch ( request ) ) { // we only need query , so the model at this point is sufficient
return new ModelAndView ( "/jsp/Search/searchRest" , model ) ; } ConcurrentMap < String , List < Tuple < SearchResult , String > > > results = new ConcurrentHashMap < > ( ) ; final PortalSearchResults portalSearchResults = this . getPortalSearchResults ( request , queryId ) ; if ( portalSearchResults != null ) { results = portalSearchResults . getResults ( ) ; } results . forEach ( ( key , value ) -> { value . sort ( Comparator . comparingInt ( tuple -> tuple . first . getRank ( ) ) ) ; } ) ; logger . debug ( "Search results for query '{}' are: {}" , query , results ) ; model . put ( "results" , results ) ; model . put ( "defaultTabKey" , this . defaultTabKey ) ; model . put ( "tabKeys" , this . tabKeys ) ; final boolean isMobile = isMobile ( request ) ; String viewName = isMobile ? "/jsp/Search/mobileSearch" : "/jsp/Search/search" ; return new ModelAndView ( viewName , model ) ; |
public class RocksDbWrapper { /** * Open a { @ link RocksDB } with specified options in read - only mode .
* @ param dirPath
* existing { @ link RocksDB } data directory
* @ param dbOptions
* @ param readOptions
* @ return
* @ throws RocksDBException
* @ throws IOException */
public static RocksDbWrapper openReadOnly ( String dirPath , DBOptions dbOptions , ReadOptions readOptions ) throws RocksDbException , IOException { } } | RocksDbWrapper rocksDbWrapper = new RocksDbWrapper ( dirPath , true ) ; rocksDbWrapper . setDbOptions ( dbOptions ) . setReadOptions ( readOptions ) ; rocksDbWrapper . init ( ) ; return rocksDbWrapper ; |
public class NfsFileBase { /** * ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . nfs . NfsFile # makeWriteRequest ( long ,
* java . util . List , int ) */
public NfsWriteRequest makeWriteRequest ( long offset , List < ByteBuffer > payload , int syncType ) throws IOException { } } | return getNfs ( ) . makeWriteRequest ( getFileHandle ( ) , offset , payload , syncType ) ; |
public class A_CmsFormatterWidget { /** * Gets a message string . < p >
* @ param cms the CMS context
* @ param message the message key
* @ param args the message arguments
* @ return the message string */
static String getMessage ( CmsObject cms , String message , Object ... args ) { } } | Locale locale = OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) ; return Messages . get ( ) . getBundle ( locale ) . key ( message , args ) ; |
public class DefaultFsService { /** * find files recursively in specific folder
* @ param filter
* The filter to apply to select files .
* @ param root
* The location in the hierarchy to search from .
* @ return A collection of files that match the filter and have the root as
* a parent . */
private Collection < FsItemEx > findRecursively ( FsItemFilter filter , FsItem root ) { } } | List < FsItemEx > results = new ArrayList < FsItemEx > ( ) ; FsVolume vol = root . getVolume ( ) ; for ( FsItem child : vol . listChildren ( root ) ) { if ( vol . isFolder ( child ) ) { results . addAll ( findRecursively ( filter , child ) ) ; } else { FsItemEx item = new FsItemEx ( child , this ) ; if ( filter . accepts ( item ) ) results . add ( item ) ; } } return results ; |
public class MatrixLogSumRescaler { /** * Symmetry preserving scale factors
* @ see Gajulapalli , Lasdon " Scaling Sparse Matrices for Optimization Algorithms " , algorithm 3 */
@ Override public DoubleMatrix1D getMatrixScalingFactorsSymm ( DoubleMatrix2D A ) { } } | int n = A . rows ( ) ; final double log10_b = Math . log10 ( base ) ; final int [ ] x = new int [ n ] ; final double [ ] cHolder = new double [ 1 ] ; final double [ ] tHolder = new double [ 1 ] ; final int [ ] currentColumnIndexHolder = new int [ ] { - 1 } ; IntIntDoubleFunction myFunct = new IntIntDoubleFunction ( ) { @ Override public double apply ( int i , int j , double pij ) { int currentColumnIndex = currentColumnIndexHolder [ 0 ] ; // we take into account only the lower left subdiagonal part of Q ( that is symmetric )
if ( i == currentColumnIndex ) { // diagonal element
// log . debug ( " i : " + i + " , j : " + currentColumnIndex + " : " + pij ) ;
tHolder [ 0 ] = tHolder [ 0 ] - 0.5 * ( Math . log10 ( Math . abs ( pij ) ) / log10_b + 0.5 ) ; // log ( b , x ) = log ( k , x ) / log ( k , b )
cHolder [ 0 ] = cHolder [ 0 ] + 1 ; } else if ( i > currentColumnIndex ) { // sub - diagonal elements
// log . debug ( " i : " + i + " , j : " + currentColumnIndex + " : " + pij ) ;
tHolder [ 0 ] = tHolder [ 0 ] - 2 * ( Math . log10 ( Math . abs ( pij ) ) / log10_b + 0.5 ) - 2 * x [ i ] ; // log ( b , x ) = log ( k , x ) / log ( k , b )
cHolder [ 0 ] = cHolder [ 0 ] + 2 ; // - 2 * x [ i ]
} return pij ; } } ; // view A column by column
for ( int currentColumnIndex = n - 1 ; currentColumnIndex >= 0 ; currentColumnIndex -- ) { // log . debug ( " currentColumnIndex : " + currentColumnIndex ) ;
cHolder [ 0 ] = 0 ; // reset
tHolder [ 0 ] = 0 ; // reset
currentColumnIndexHolder [ 0 ] = currentColumnIndex ; DoubleMatrix2D P = A . viewPart ( 0 , currentColumnIndex , n , 1 ) ; P . forEachNonZero ( myFunct ) ; if ( cHolder [ 0 ] > 0 ) { x [ currentColumnIndex ] = ( int ) Math . round ( tHolder [ 0 ] / cHolder [ 0 ] ) ; } } // log . debug ( " x : " + ArrayUtils . toString ( x ) ) ;
DoubleMatrix1D u = new DenseDoubleMatrix1D ( n ) ; for ( int k = 0 ; k < n ; k ++ ) { u . setQuick ( k , Math . pow ( base , x [ k ] ) ) ; } return u ; |
public class FieldScopeUtil { /** * Returns the singular descriptor used by all non - null messages in the list .
* < p > If there is no descriptor , or more than one , returns { @ code Optional . absent ( ) } . */
static Optional < Descriptor > getSingleDescriptor ( Iterable < ? extends Message > messages ) { } } | Optional < Descriptor > optDescriptor = Optional . absent ( ) ; for ( Message message : messages ) { if ( message != null ) { Descriptor descriptor = message . getDescriptorForType ( ) ; if ( ! optDescriptor . isPresent ( ) ) { optDescriptor = Optional . of ( descriptor ) ; } else if ( descriptor != optDescriptor . get ( ) ) { // Two different descriptors - abandon ship .
return Optional . absent ( ) ; } } } return optDescriptor ; |
public class EPTImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . EPT__PTDO_NAME : setPTdoName ( PTDO_NAME_EDEFAULT ) ; return ; case AfplibPackage . EPT__TRIPLETS : getTriplets ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ; |
public class NdrBuffer { /** * / * double */
public void enc_ndr_string ( String s ) { } } | align ( 4 ) ; int i = index ; int len = s . length ( ) ; Encdec . enc_uint32le ( len + 1 , buf , i ) ; i += 4 ; Encdec . enc_uint32le ( 0 , buf , i ) ; i += 4 ; Encdec . enc_uint32le ( len + 1 , buf , i ) ; i += 4 ; try { System . arraycopy ( s . getBytes ( "UTF-16LE" ) , 0 , buf , i , len * 2 ) ; } catch ( UnsupportedEncodingException uee ) { } i += len * 2 ; buf [ i ++ ] = ( byte ) '\0' ; buf [ i ++ ] = ( byte ) '\0' ; advance ( i - index ) ; |
public class SnapshotsInner { /** * Creates or updates a snapshot .
* @ param resourceGroupName The name of the resource group .
* @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters .
* @ param snapshot Snapshot object supplied in the body of the Put disk operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < SnapshotInner > createOrUpdateAsync ( String resourceGroupName , String snapshotName , SnapshotInner snapshot ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , snapshotName , snapshot ) . map ( new Func1 < ServiceResponse < SnapshotInner > , SnapshotInner > ( ) { @ Override public SnapshotInner call ( ServiceResponse < SnapshotInner > response ) { return response . body ( ) ; } } ) ; |
public class MailSessionService { /** * Declarative Services method for setting mail session registrar */
@ Reference ( service = MailSessionRegistrar . class , policy = ReferencePolicy . DYNAMIC , cardinality = ReferenceCardinality . OPTIONAL , target = "(component.name=com.ibm.ws.javamail.management.j2ee.MailSessionRegistrarImpl)" ) protected void setMailSessionRegistrar ( ServiceReference < MailSessionRegistrar > ref ) { } } | mailSessionRegistrarRef . setReference ( ref ) ; registerJavaMailMBean ( ) ; |
public class ServletClassloaderHack { /** * init */
@ PostConstruct public void init ( ) { } } | ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; DynamicClassLoader dynLoader = ( DynamicClassLoader ) loader ; // dynLoader . setServletHack ( _ isHack ) ; |
public class MtasBasicParser { /** * Update mappings with local references .
* @ param currentObject the current object
* @ param currentList the current list
* @ param updateList the update list */
private void updateMappingsWithLocalReferences ( MtasParserObject currentObject , Map < String , List < MtasParserObject > > currentList , Map < String , Map < Integer , Set < String > > > updateList ) { } } | if ( currentObject . getType ( ) . type . equals ( MAPPING_TYPE_GROUP ) ) { for ( Integer tokenId : updateList . get ( UPDATE_TYPE_LOCAL_REF_POSITION_START ) . keySet ( ) ) { if ( updateList . get ( UPDATE_TYPE_LOCAL_REF_POSITION_END ) . containsKey ( tokenId ) && updateList . get ( UPDATE_TYPE_LOCAL_REF_OFFSET_START ) . containsKey ( tokenId ) && updateList . get ( UPDATE_TYPE_LOCAL_REF_OFFSET_END ) . containsKey ( tokenId ) ) { Iterator < String > startPositionIt = updateList . get ( UPDATE_TYPE_LOCAL_REF_POSITION_START ) . get ( tokenId ) . iterator ( ) ; Iterator < String > endPositionIt = updateList . get ( UPDATE_TYPE_LOCAL_REF_POSITION_END ) . get ( tokenId ) . iterator ( ) ; Iterator < String > startOffsetIt = updateList . get ( UPDATE_TYPE_LOCAL_REF_OFFSET_START ) . get ( tokenId ) . iterator ( ) ; Iterator < String > endOffsetIt = updateList . get ( UPDATE_TYPE_LOCAL_REF_OFFSET_END ) . get ( tokenId ) . iterator ( ) ; Integer startPosition = null ; Integer endPosition = null ; Integer startOffset = null ; Integer endOffset = null ; Integer newValue = null ; while ( startPositionIt . hasNext ( ) ) { String localKey = startPositionIt . next ( ) ; if ( currentObject . referredStartPosition . containsKey ( localKey ) ) { newValue = currentObject . referredStartPosition . get ( localKey ) ; startPosition = ( startPosition == null ) ? newValue : Math . min ( startPosition , newValue ) ; } } while ( endPositionIt . hasNext ( ) ) { String localKey = endPositionIt . next ( ) ; if ( currentObject . referredEndPosition . containsKey ( localKey ) ) { newValue = currentObject . referredEndPosition . get ( localKey ) ; endPosition = ( endPosition == null ) ? newValue : Math . max ( endPosition , newValue ) ; } } while ( startOffsetIt . hasNext ( ) ) { String localKey = startOffsetIt . next ( ) ; if ( currentObject . referredStartOffset . containsKey ( localKey ) ) { newValue = currentObject . referredStartOffset . get ( localKey ) ; startOffset = ( startOffset == null ) ? newValue : Math . min ( startOffset , newValue ) ; } } while ( endOffsetIt . hasNext ( ) ) { String localKey = endOffsetIt . next ( ) ; if ( currentObject . referredEndOffset . containsKey ( localKey ) ) { newValue = currentObject . referredEndOffset . get ( localKey ) ; endOffset = ( endOffset == null ) ? newValue : Math . max ( endOffset , newValue ) ; } } if ( startPosition != null && endPosition != null && startOffset != null && endOffset != null ) { MtasToken token = tokenCollection . get ( tokenId ) ; token . addPositionRange ( startPosition , endPosition ) ; token . setOffset ( startOffset , endOffset ) ; } } } } if ( ! currentList . get ( MAPPING_TYPE_GROUP ) . isEmpty ( ) ) { MtasParserObject parentGroup = currentList . get ( MAPPING_TYPE_GROUP ) . get ( currentList . get ( MAPPING_TYPE_GROUP ) . size ( ) - 1 ) ; parentGroup . referredStartPosition . putAll ( currentObject . referredStartPosition ) ; parentGroup . referredEndPosition . putAll ( currentObject . referredEndPosition ) ; parentGroup . referredStartOffset . putAll ( currentObject . referredStartOffset ) ; parentGroup . referredEndOffset . putAll ( currentObject . referredEndOffset ) ; } currentObject . referredStartPosition . clear ( ) ; currentObject . referredEndPosition . clear ( ) ; currentObject . referredStartOffset . clear ( ) ; currentObject . referredEndOffset . clear ( ) ; |
public class CompositeDialogPage { /** * Sets the active page of this CompositeDialogPage .
* @ param activePage the page to be made active . Must be one of the child
* pages . */
public void setActivePage ( DialogPage activePage ) { } } | DialogPage oldPage = this . activePage ; Assert . isTrue ( activePage == null || pages . contains ( activePage ) ) ; if ( oldPage == activePage ) { return ; } this . activePage = activePage ; updateMessage ( ) ; if ( oldPage != null ) { updatePageLabels ( oldPage ) ; } if ( activePage != null ) { updatePageLabels ( activePage ) ; } |
public class RESTWorkItemHandler { /** * This method does the actual request , including the setup for authorization .
* It is < b > not < / b > responsible for cleaning up after the last request that it does .
* It < i > is < / i > responsible for cleaning up after all previous request , such as for form - based authentication , that happen .
* @ param httpclient The { @ link HttpClient } instance
* @ param requestBuilder The { @ link RequestBuilder } instance
* @ param params The parameters that may be needed for authentication
* @ return A { @ link HttpResponse } instance from which we can extract the content */
protected HttpResponse doRequestWithAuthorization ( HttpClient httpclient , RequestBuilder requestBuilder , Map < String , Object > params , AuthenticationType type ) { } } | // no authorization
if ( type == null || type == AuthenticationType . NONE ) { HttpUriRequest request = requestBuilder . build ( ) ; try { return httpclient . execute ( request ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not execute request [" + request . getMethod ( ) + "] " + request . getURI ( ) , e ) ; } } // user / password
String u = ( String ) params . get ( PARAM_USERNAME ) ; String p = ( String ) params . get ( PARAM_PASSWORD ) ; if ( u == null || p == null ) { u = this . username ; p = this . password ; } if ( u == null ) { throw new IllegalArgumentException ( "Could not find username" ) ; } if ( p == null ) { throw new IllegalArgumentException ( "Could not find password" ) ; } if ( type == AuthenticationType . BASIC ) { // basic auth
URI requestUri = requestBuilder . getUri ( ) ; HttpHost targetHost = new HttpHost ( requestUri . getHost ( ) , requestUri . getPort ( ) , requestUri . getScheme ( ) ) ; // Create AuthCache instance and add it : so that HttpClient thinks that it has already queried ( as per the HTTP spec )
// - generate BASIC scheme object and add it to the local auth cache
AuthCache authCache = new BasicAuthCache ( ) ; BasicScheme basicAuth = new BasicScheme ( ) ; authCache . put ( targetHost , basicAuth ) ; // - add AuthCache to the execution context :
HttpClientContext clientContext = HttpClientContext . create ( ) ; CredentialsProvider credsProvider = new BasicCredentialsProvider ( ) ; credsProvider . setCredentials ( // specify host and port , since that is safer / more secure
new AuthScope ( requestUri . getHost ( ) , requestUri . getPort ( ) , AuthScope . ANY_REALM ) , new UsernamePasswordCredentials ( u , p ) ) ; clientContext . setCredentialsProvider ( credsProvider ) ; clientContext . setAuthCache ( authCache ) ; // - execute request
HttpUriRequest request = requestBuilder . build ( ) ; try { return httpclient . execute ( targetHost , request , clientContext ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not execute request with preemptive authentication [" + request . getMethod ( ) + "] " + request . getURI ( ) , e ) ; } } else if ( type == AuthenticationType . FORM_BASED ) { // form auth
// 1 . do initial request to trigger authentication
HttpUriRequest request = requestBuilder . build ( ) ; int statusCode = - 1 ; try { HttpResponse initialResponse = httpclient . execute ( request ) ; statusCode = initialResponse . getStatusLine ( ) . getStatusCode ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not execute request for form-based authentication" , e ) ; } finally { // weird , but this is the method that releases resources , including the connection
request . abort ( ) ; } // 1b . form authentication requests should have a status of 401
// See : www . w3 . org / Protocols / rfc2616 / rfc2616 - sec10 . html # sec10.4.2
if ( statusCode != HttpStatus . SC_UNAUTHORIZED ) { logger . error ( "Expected form authentication request with status {} but status on response is {}: proceeding anyways" , HttpStatus . SC_UNAUTHORIZED , statusCode ) ; } // 2 . do POST form request to authentiate
String authUrlStr = ( String ) params . get ( PARAM_AUTHURL ) ; if ( authUrlStr == null ) { authUrlStr = authUrl ; } if ( authUrlStr == null ) { throw new IllegalArgumentException ( "Could not find authentication url" ) ; } HttpPost authMethod = new HttpPost ( authUrlStr ) ; List < NameValuePair > formParams = new ArrayList < NameValuePair > ( 2 ) ; formParams . add ( new BasicNameValuePair ( "j_username" , u ) ) ; formParams . add ( new BasicNameValuePair ( "j_password" , p ) ) ; UrlEncodedFormEntity formEntity ; try { formEntity = new UrlEncodedFormEntity ( formParams ) ; } catch ( UnsupportedEncodingException uee ) { throw new RuntimeException ( "Could not encode authentication parameters into request body" , uee ) ; } authMethod . setEntity ( formEntity ) ; try { httpclient . execute ( authMethod ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not initialize form-based authentication" , e ) ; } finally { authMethod . releaseConnection ( ) ; } // 3 . rebuild request and execute
request = requestBuilder . build ( ) ; try { return httpclient . execute ( request ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not execute request [" + request . getMethod ( ) + "] " + request . getURI ( ) , e ) ; } } else { throw new RuntimeException ( "Unknown AuthenticationType " + type ) ; } |
public class Misc { /** * Convert a UTF - 8 byte array into a string . */
public static String toStringUTF8 ( byte [ ] bytes , int offset , int length ) { } } | if ( bytes == null ) { return null ; } try { return new String ( bytes , offset , length , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { // This never happens .
return null ; } catch ( IndexOutOfBoundsException e ) { return null ; } |
public class SyncListItem { /** * Create a SyncListItemFetcher to execute fetch .
* @ param pathServiceSid The service _ sid
* @ param pathListSid The list _ sid
* @ param pathIndex The index
* @ return SyncListItemFetcher capable of executing the fetch */
public static SyncListItemFetcher fetcher ( final String pathServiceSid , final String pathListSid , final Integer pathIndex ) { } } | return new SyncListItemFetcher ( pathServiceSid , pathListSid , pathIndex ) ; |
public class UriEscaper { /** * Escapes a string as a URI query
* @ param fragment the path to escape
* @ param strict whether or not to do strict escaping
* @ return the escaped string */
public static String escapeFragment ( final String fragment , final boolean strict ) { } } | return ( strict ? STRICT_ESCAPER : ESCAPER ) . escapeFragment ( fragment ) ; |
public class SasUtils { /** * Decode integer encoded with { @ link # encodeStringToInteger ( java . lang . String ) } back to Unicode string form .
* @ param num integer to decode back to string .
* @ return decoded string . */
@ SuppressWarnings ( "empty-statement" ) public static String decodeIntegerToString ( BigInteger num ) { } } | byte [ ] bytes = num . toByteArray ( ) ; int count ; for ( count = 0 ; count < bytes . length && bytes [ count ] == 0 ; count ++ ) ; byte [ ] trimmed = new byte [ bytes . length - count ] ; System . arraycopy ( bytes , count , trimmed , 0 , trimmed . length ) ; return new String ( trimmed , StandardCharsets . UTF_8 ) ; |
public class MockWebServer { /** * Starts the server , serves all enqueued requests , and shuts the server
* down .
* @ param port the port to listen to , or 0 for any available port .
* Automated tests should always use port 0 to avoid flakiness when a
* specific port is unavailable . */
public void play ( int port ) throws IOException { } } | if ( acceptExecutor != null ) { throw new IllegalStateException ( "play() already called" ) ; } // The acceptExecutor handles the Socket . accept ( ) and hands each request off to the
// requestExecutor . It also handles shutdown .
acceptExecutor = Executors . newSingleThreadExecutor ( ) ; // The requestExecutor has a fixed number of worker threads . In order to get strict
// guarantees that requests are handled in the order in which they are accepted
// workerThreads should be set to 1.
requestExecutor = Executors . newFixedThreadPool ( workerThreads ) ; serverSocket = new ServerSocket ( port ) ; serverSocket . setReuseAddress ( true ) ; this . port = serverSocket . getLocalPort ( ) ; acceptExecutor . execute ( namedRunnable ( "MockWebServer-accept-" + port , new Runnable ( ) { public void run ( ) { try { acceptConnections ( ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "MockWebServer connection failed" , e ) ; } /* * This gnarly block of code will release all sockets and
* all thread , even if any close fails . */
try { serverSocket . close ( ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "MockWebServer server socket close failed" , e ) ; } for ( Iterator < Socket > s = openClientSockets . keySet ( ) . iterator ( ) ; s . hasNext ( ) ; ) { try { s . next ( ) . close ( ) ; s . remove ( ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "MockWebServer socket close failed" , e ) ; } } try { acceptExecutor . shutdown ( ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "MockWebServer acceptExecutor shutdown failed" , e ) ; } try { requestExecutor . shutdown ( ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "MockWebServer requestExecutor shutdown failed" , e ) ; } } private void acceptConnections ( ) throws Exception { while ( true ) { Socket socket ; try { socket = serverSocket . accept ( ) ; } catch ( SocketException e ) { return ; } SocketPolicy socketPolicy = dispatcher . peek ( ) . getSocketPolicy ( ) ; if ( socketPolicy == DISCONNECT_AT_START ) { dispatchBookkeepingRequest ( 0 , socket ) ; socket . close ( ) ; } else { openClientSockets . put ( socket , true ) ; serveConnection ( socket ) ; } } } } ) ) ; |
public class TypedEntityLinks { /** * Creates a { @ link Link } pointing to item resource backing the given entity . The relation type of the link will be
* determined by the implementation class and should be defaulted to { @ link IanaLinkRelations # SELF } .
* @ param type the entity to point to , must not be { @ literal null } .
* @ return the { @ link Link } pointing to the resource exposed for the given entity . Will never be { @ literal null } .
* @ throws IllegalArgumentException in case the given type is unknown the entity links infrastructure . */
public Link linkToItemResource ( T entity ) { } } | return entityLinks . linkToItemResource ( entity . getClass ( ) , identifierExtractor . apply ( entity ) ) ; |
public class CommerceWarehousePersistenceImpl { /** * Returns the first commerce warehouse in the ordered set where groupId = & # 63 ; and active = & # 63 ; and primary = & # 63 ; .
* @ param groupId the group ID
* @ param active the active
* @ param primary the primary
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce warehouse , or < code > null < / code > if a matching commerce warehouse could not be found */
@ Override public CommerceWarehouse fetchByG_A_P_First ( long groupId , boolean active , boolean primary , OrderByComparator < CommerceWarehouse > orderByComparator ) { } } | List < CommerceWarehouse > list = findByG_A_P ( groupId , active , primary , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class SubProcessIOFiles { /** * Will copy the output files to the GCS path setup via the configuration .
* @ param configuration
* @ param params */
public void copyOutPutFilesToBucket ( SubProcessConfiguration configuration , String params ) { } } | if ( Files . exists ( outFile ) || Files . exists ( errFile ) ) { try { outFileLocation = FileUtils . copyFileFromWorkerToGCS ( configuration , outFile ) ; } catch ( Exception ex ) { LOG . error ( "Error uploading log file to storage " , ex ) ; } try { errFileLocation = FileUtils . copyFileFromWorkerToGCS ( configuration , errFile ) ; } catch ( Exception ex ) { LOG . error ( "Error uploading log file to storage " , ex ) ; } LOG . info ( String . format ( "Log Files for process: %s outFile was: %s errFile was: %s" , params , outFileLocation , errFileLocation ) ) ; } else { LOG . error ( String . format ( "There was no output file or err file for process %s" , params ) ) ; } |
public class Types { /** * Returns an ordered set of superclasses of { @ code clazz } .
* If { @ code clazz } is a class , it begins with { @ code clazz } , followed by
* its superclasses and then its interfaces in breadth - first order ;
* if { @ code clazz } is an interface , it begins with { @ code Object . class } ,
* followed by { @ code clazz } and its super interfaces .
* @ param clazz the class
* @ return ordered set */
public static Set < Class < ? > > superclasses ( Class < ? > clazz ) { } } | final Queue < Class < ? > > queue = new ArrayDeque < > ( ) ; if ( clazz . isInterface ( ) ) { queue . add ( Object . class ) ; } while ( clazz != null ) { queue . add ( clazz ) ; clazz = clazz . getSuperclass ( ) ; } final Set < Class < ? > > result = new LinkedHashSet < > ( ) ; while ( ! queue . isEmpty ( ) ) { Class < ? > c = queue . remove ( ) ; if ( result . add ( c ) ) { queue . addAll ( Arrays . asList ( c . getInterfaces ( ) ) ) ; } } return result ; |
public class PEPUtility { /** * Retrieve a string value .
* @ param data byte array
* @ param offset offset into byte array
* @ return string value */
public static final String getString ( byte [ ] data , int offset ) { } } | return getString ( data , offset , data . length - offset ) ; |
public class AbstractDelegationTokenBinding { /** * Bind to the filesystem . Subclasses can use this to perform their own binding operations - but
* they must always call their superclass implementation . This < i > Must < / i > be called before
* calling { @ code init ( ) } .
* < p > < b > Important : < / b > This binding will happen during FileSystem . initialize ( ) ; the FS is not
* live for actual use and will not yet have interacted with GCS services .
* @ param fs owning FS .
* @ param service name of the service ( i . e . bucket name ) for the FS . */
public void bindToFileSystem ( GoogleHadoopFileSystemBase fs , Text service ) { } } | this . fileSystem = requireNonNull ( fs ) ; this . service = requireNonNull ( service ) ; |
public class GroupService { /** * Power off groups of servers
* @ param groups groups references list
* @ return OperationFuture wrapper for Server list */
public OperationFuture < List < Server > > powerOff ( Group ... groups ) { } } | return serverService ( ) . powerOff ( getServerSearchCriteria ( groups ) ) ; |
public class FileUtil { /** * 复制文件或目录 < br >
* 情况如下 :
* < pre >
* 1 、 src和dest都为目录 , 则讲src下所有文件 ( 包括子目录 ) 拷贝到dest下
* 2 、 src和dest都为文件 , 直接复制 , 名字为dest
* 3 、 src为文件 , dest为目录 , 将src拷贝到dest目录下
* < / pre >
* @ param src 源文件
* @ param dest 目标文件或目录 , 目标不存在会自动创建 ( 目录 、 文件都创建 )
* @ param isOverride 是否覆盖目标文件
* @ return 目标目录或文件
* @ throws IORuntimeException IO异常
* @ since 4.1.5 */
public static File copyFilesFromDir ( File src , File dest , boolean isOverride ) throws IORuntimeException { } } | return FileCopier . create ( src , dest ) . setCopyContentIfDir ( true ) . setOnlyCopyFile ( true ) . setOverride ( isOverride ) . copy ( ) ; |
public class MappingTargetRepositoryImpl { /** * Creates a fully reconstructed MappingProject from an Entity retrieved from the repository .
* @ param mappingTargetEntity Entity with { @ link MappingProjectMetadata } metadata
* @ return fully reconstructed MappingProject */
private MappingTarget toMappingTarget ( Entity mappingTargetEntity ) { } } | List < EntityMapping > entityMappings = Collections . emptyList ( ) ; String identifier = mappingTargetEntity . getString ( MappingTargetMetadata . IDENTIFIER ) ; if ( ! dataService . hasRepository ( mappingTargetEntity . getString ( MappingTargetMetadata . TARGET ) ) ) { return null ; } EntityType target = dataService . getEntityType ( mappingTargetEntity . getString ( MappingTargetMetadata . TARGET ) ) ; if ( mappingTargetEntity . getEntities ( MappingTargetMetadata . ENTITY_MAPPINGS ) != null ) { List < Entity > entityMappingEntities = Lists . newArrayList ( mappingTargetEntity . getEntities ( MappingTargetMetadata . ENTITY_MAPPINGS ) ) ; entityMappings = entityMappingRepository . toEntityMappings ( entityMappingEntities ) ; } return new MappingTarget ( identifier , target , entityMappings ) ; |
public class SeparatorSet { /** * Processes the separators so that they do not intersect .
* The vertical separators are left untouched , the horizontal separators are
* split by the vertical ones when necessary . */
protected void processIntersectionsSplitHorizontal ( ) { } } | boolean change ; do { Vector < Separator > newsep = new Vector < Separator > ( hsep . size ( ) ) ; change = false ; for ( Separator hs : hsep ) { boolean split = false ; for ( Separator vs : vsep ) { if ( hs . intersects ( vs ) ) { Separator nhs = hs . hsplit ( vs ) ; newsep . add ( hs ) ; if ( nhs != null ) newsep . add ( nhs ) ; split = true ; change = true ; break ; // do not try other vertical seps
} } if ( ! split ) newsep . add ( hs ) ; } hsep = newsep ; } while ( change ) ; |
public class NagiosNotifier { /** * logs message with status code 1 - warn */
@ Override public void sendFailure ( String message ) { } } | System . out . print ( "Warning: " + message ) ; System . exit ( 1 ) ; |
public class Strategies { /** * Given some { @ code Clause } s returns a function that forwards the results of all
* clauses which condition is true , of an empty { @ code List } if no clause
* matches the condition . */
@ SafeVarargs public static < T , R > Function < T , List < R > > allMatches ( Clause < T , R > ... clauses ) { } } | return new AllMatchesStrategy < > ( Arrays . asList ( clauses ) ) ; |
public class GraphHopper { /** * Specify which vehicles can be read by this GraphHopper instance . An encoding manager defines
* how data from every vehicle is written ( und read ) into edges of the graph . */
public GraphHopper setEncodingManager ( EncodingManager em ) { } } | ensureNotLoaded ( ) ; this . encodingManager = em ; if ( em . needsTurnCostsSupport ( ) ) traversalMode = TraversalMode . EDGE_BASED_2DIR ; return this ; |
public class CombinedStringDistanceLearner { /** * Prepare a single StringWrapper for a learner */
private StringWrapper prepareForLearner ( StringWrapper w , StringDistanceLearner learner ) { } } | StringWrapperIterator it = new BasicStringWrapperIterator ( Collections . singleton ( w ) . iterator ( ) ) ; return learner . prepare ( it ) . nextStringWrapper ( ) ; |
public class ObjFileImporter { /** * Creates a new { @ link Vertex } from data and adds it to { @ link # vertexes } .
* @ param data the data */
private void addVertex ( String data ) { } } | String coords [ ] = data . split ( "\\s+" ) ; float x = 0 ; float y = 0 ; float z = 0 ; if ( coords . length != 3 ) { MalisisCore . log . error ( "[ObjFileImporter] Wrong coordinates number {} at line {} : {}" , coords . length , lineNumber , currentLine ) ; } else { x = Float . parseFloat ( coords [ 0 ] ) ; y = Float . parseFloat ( coords [ 1 ] ) ; z = Float . parseFloat ( coords [ 2 ] ) ; } vertexes . add ( new Vertex ( x , y , z ) ) ; |
public class CodedInput { /** * Create a new CodedInput wrapping the given byte array slice . */
public static CodedInput newInstance ( final byte [ ] buf , final int off , final int len ) { } } | return new CodedInput ( buf , off , len , false ) ; |
public class StorageRestoreParameters { /** * Set the storageBundleBackup value .
* @ param storageBundleBackup the storageBundleBackup value to set
* @ return the StorageRestoreParameters object itself . */
public StorageRestoreParameters withStorageBundleBackup ( byte [ ] storageBundleBackup ) { } } | if ( storageBundleBackup == null ) { this . storageBundleBackup = null ; } else { this . storageBundleBackup = Base64Url . encode ( storageBundleBackup ) ; } return this ; |
public class AmazonEC2Client { /** * Creates a static route for the specified transit gateway route table .
* @ param createTransitGatewayRouteRequest
* @ return Result of the CreateTransitGatewayRoute operation returned by the service .
* @ sample AmazonEC2 . CreateTransitGatewayRoute
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / CreateTransitGatewayRoute " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public CreateTransitGatewayRouteResult createTransitGatewayRoute ( CreateTransitGatewayRouteRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateTransitGatewayRoute ( request ) ; |
public class vpathparam { /** * Use this API to update vpathparam . */
public static base_response update ( nitro_service client , vpathparam resource ) throws Exception { } } | vpathparam updateresource = new vpathparam ( ) ; updateresource . srcip = resource . srcip ; return updateresource . update_resource ( client ) ; |
public class ReteooRuleBuilder { /** * Creates the corresponting Rete network for the given < code > Rule < / code > and adds it to
* the given rule base .
* @ param rule
* The rule to add .
* @ param kBase
* The rulebase to add the rule to .
* @ return a List < BaseNode > of terminal nodes for the rule
* @ throws InvalidPatternException */
public List < TerminalNode > addRule ( final RuleImpl rule , final InternalKnowledgeBase kBase ) throws InvalidPatternException { } } | // the list of terminal nodes
final List < TerminalNode > nodes = new ArrayList < TerminalNode > ( ) ; // transform rule and gets the array of subrules
final GroupElement [ ] subrules = rule . getTransformedLhs ( kBase . getConfiguration ( ) . getComponentFactory ( ) . getLogicTransformerFactory ( ) . getLogicTransformer ( ) , kBase . getGlobals ( ) ) ; for ( int i = 0 ; i < subrules . length ; i ++ ) { // creates a clean build context for each subrule
final BuildContext context = new BuildContext ( kBase ) ; context . setRule ( rule ) ; // if running in STREAM mode , calculate temporal distance for events
if ( EventProcessingOption . STREAM . equals ( kBase . getConfiguration ( ) . getEventProcessingMode ( ) ) ) { TemporalDependencyMatrix temporal = this . utils . calculateTemporalDistance ( subrules [ i ] ) ; context . setTemporalDistance ( temporal ) ; } if ( kBase . getConfiguration ( ) . isSequential ( ) ) { context . setTupleMemoryEnabled ( false ) ; context . setObjectTypeNodeMemoryEnabled ( false ) ; } else { context . setTupleMemoryEnabled ( true ) ; context . setObjectTypeNodeMemoryEnabled ( true ) ; } // adds subrule
final TerminalNode node = this . addSubRule ( context , subrules [ i ] , i , rule ) ; // adds the terminal node to the list of terminal nodes
nodes . add ( node ) ; } return nodes ; |
public class AmazonEC2Client { /** * Deletes one or more VPC endpoint service configurations in your account . Before you delete the endpoint service
* configuration , you must reject any < code > Available < / code > or < code > PendingAcceptance < / code > interface endpoint
* connections that are attached to the service .
* @ param deleteVpcEndpointServiceConfigurationsRequest
* @ return Result of the DeleteVpcEndpointServiceConfigurations operation returned by the service .
* @ sample AmazonEC2 . DeleteVpcEndpointServiceConfigurations
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DeleteVpcEndpointServiceConfigurations "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteVpcEndpointServiceConfigurationsResult deleteVpcEndpointServiceConfigurations ( DeleteVpcEndpointServiceConfigurationsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteVpcEndpointServiceConfigurations ( request ) ; |
public class VirtualHubsInner { /** * Creates a VirtualHub resource if it doesn ' t exist else updates the existing VirtualHub .
* @ param resourceGroupName The resource group name of the VirtualHub .
* @ param virtualHubName The name of the VirtualHub .
* @ param virtualHubParameters Parameters supplied to create or update VirtualHub .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the VirtualHubInner object */
public Observable < VirtualHubInner > beginCreateOrUpdateAsync ( String resourceGroupName , String virtualHubName , VirtualHubInner virtualHubParameters ) { } } | return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , virtualHubName , virtualHubParameters ) . map ( new Func1 < ServiceResponse < VirtualHubInner > , VirtualHubInner > ( ) { @ Override public VirtualHubInner call ( ServiceResponse < VirtualHubInner > response ) { return response . body ( ) ; } } ) ; |
public class LibertyJava8WorkaroundRuntimeTransformer { /** * Instrument the class at the current position in the specified input stream .
* @ return instrumented class file or null if the class has already
* been instrumented .
* @ throws IOException if an error is encountered while reading from
* the < code > InputStream < / code > */
public static byte [ ] transform ( byte [ ] bytes , boolean skipIfNotPreprocessed ) throws IOException { } } | if ( detailedTransformTrace && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "transform" ) ; ClassReader reader = new ClassReader ( bytes ) ; ClassWriter writer = new ClassWriter ( reader , ClassWriter . COMPUTE_MAXS ) ; StringWriter sw = null ; ClassVisitor visitor = writer ; if ( tc . isDumpEnabled ( ) ) { sw = new StringWriter ( ) ; visitor = new CheckClassAdapter ( visitor , false ) ; visitor = new TraceClassVisitor ( visitor , new PrintWriter ( sw ) ) ; } LibertyTracingClassAdapter tracingClassAdapter = new LibertyTracingClassAdapter ( visitor , skipIfNotPreprocessed ) ; try { // Class reader must maintain all metadata information that ' s present in
// the class
reader . accept ( tracingClassAdapter , skipDebugData ? ClassReader . SKIP_DEBUG : 0 ) ; } catch ( Throwable t ) { IOException ioe = new IOException ( "Unable to instrument class stream with trace: " + t . getMessage ( ) , t ) ; throw ioe ; } // Provide a whole lot of detailed information on the resulting class
if ( detailedTransformTrace && tc . isDumpEnabled ( ) && tracingClassAdapter . isClassModified ( ) ) { Tr . dump ( tc , "Transformed class" , sw ) ; } // Try to short circuit when the class didn ' t change
byte [ ] result = tracingClassAdapter . isClassModified ( ) ? writer . toByteArray ( ) : null ; if ( detailedTransformTrace && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "transform" , result ) ; return result ; |
public class APNSChannelResponseMarshaller { /** * Marshall the given parameter object . */
public void marshall ( APNSChannelResponse aPNSChannelResponse , ProtocolMarshaller protocolMarshaller ) { } } | if ( aPNSChannelResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( aPNSChannelResponse . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getDefaultAuthenticationMethod ( ) , DEFAULTAUTHENTICATIONMETHOD_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getHasCredential ( ) , HASCREDENTIAL_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getHasTokenKey ( ) , HASTOKENKEY_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getIsArchived ( ) , ISARCHIVED_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getLastModifiedBy ( ) , LASTMODIFIEDBY_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getLastModifiedDate ( ) , LASTMODIFIEDDATE_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getPlatform ( ) , PLATFORM_BINDING ) ; protocolMarshaller . marshall ( aPNSChannelResponse . getVersion ( ) , VERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AcolyteDSL { /** * Executes | f | using a connection accepting only queries ,
* and answering with | result | to any query .
* < pre >
* { @ code
* import static acolyte . jdbc . AcolyteDSL . withQueryResult ;
* String str = withQueryResult ( queryRes , con - > " str " ) ;
* < / pre > */
public static < A > A withQueryResult ( QueryResult res , Function < java . sql . Connection , A > f ) { } } | return f . apply ( connection ( handleQuery ( ( x , y ) -> res ) ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.