signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class AVQuery { /** * Add a constraint to the query that requires a particular key ' s value to be greater or equal to
* than the provided value .
* @ param key The key to check .
* @ param value The value that provides an lower bound .
* @ return Returns the query , so you can chain this call . */
public AVQuery < T > whereGreaterThanOrEqualTo ( String key , Object value ) { } }
|
conditions . whereGreaterThanOrEqualTo ( key , value ) ; return this ;
|
public class CmsSitemapController { /** * Clears the modified clip - board list and commits the change . < p > */
public void clearModifiedList ( ) { } }
|
CmsSitemapClipboardData clipboardData = getData ( ) . getClipboardData ( ) . copy ( ) ; clipboardData . getModifications ( ) . clear ( ) ; CmsSitemapChange change = new CmsSitemapChange ( null , null , ChangeType . clipboardOnly ) ; change . setClipBoardData ( clipboardData ) ; commitChange ( change , null ) ;
|
public class UserUtil { /** * Create users for the given array of addresses . The passwords will be set to the email addresses .
* @ param greenMail Greenmail instance to create users for
* @ param addresses Addresses */
public static void createUsers ( GreenMailOperations greenMail , InternetAddress ... addresses ) { } }
|
for ( InternetAddress address : addresses ) { greenMail . setUser ( address . getAddress ( ) , address . getAddress ( ) ) ; }
|
public class XmlUtils { /** * Returns a default XML document
* @ return The default XML document */
public static synchronized Document getDefaultDocument ( ) { } }
|
if ( defaultDocument == null ) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder documentBuilder = null ; try { documentBuilder = documentBuilderFactory . newDocumentBuilder ( ) ; } catch ( ParserConfigurationException e ) { // Can not happen , since it ' s the default configuration
throw new XmlException ( "Could not create default document" , e ) ; } defaultDocument = documentBuilder . newDocument ( ) ; } return defaultDocument ;
|
public class ChannelServiceImpl { /** * ( non - Javadoc )
* @ see
* io . joynr . messaging . service . ChannelServiceDelegate # openChannel ( java . lang
* . String , java . lang . Integer , java . lang . String ) */
@ Override public Broadcastable openChannel ( String ccid , Integer cacheIndex , String atmosphereTrackingId ) { } }
|
throw new JoynrCommunicationException ( "No channels can't be opened on bounce proxy controller, only on bounce proxies." ) ;
|
public class DataSiftAccount { /** * Fetch a Limit
* @ param identity the ID of the identity
* @ param service the name of the service
* @ return The limit for the service in that identity */
public FutureData < Limit > getLimit ( String identity , String service ) { } }
|
FutureData < Limit > future = new FutureData < > ( ) ; URI uri = newParams ( ) . forURL ( config . newAPIEndpointURI ( IDENTITY + "/" + identity + "/limit/" + service ) ) ; Request request = config . http ( ) . GET ( uri , new PageReader ( newRequestCallback ( future , new Limit ( ) , config ) ) ) ; performRequest ( future , request ) ; return future ;
|
public class CmsOUTable { /** * Updates app . < p >
* @ param itemId of current item */
protected void updateApp ( String itemId ) { } }
|
I_CmsOuTreeType foundType = null ; for ( I_CmsOuTreeType type : m_app . getTreeTypeProvider ( ) . getTreeTypes ( ) ) { if ( itemId . equals ( type . getId ( ) ) ) { foundType = type ; break ; } } if ( foundType != null ) { m_app . update ( m_parentOu , foundType , null ) ; return ; } m_app . update ( itemId , CmsOuTreeType . OU , null , "" ) ;
|
public class HttpUtils { /** * access given action with given parameters by post , if get exception , will
* retry by given retryTimes .
* @ param action action url to access
* @ param parameters parameters to post
* @ param retryTimes retry times when get exception .
* @ return response content of target action url . */
public static String doPost ( final String action , final Map < String , String > parameters , final int retryTimes ) { } }
|
try { return doPostByLoop ( action , parameters , retryTimes ) ; } catch ( HttpException e ) { throw new HttpException ( format ( "Failed to download content for action url: '%s' with parameters. Tried '%s' times" , action , parameters , Math . max ( retryTimes + 1 , 1 ) ) ) ; }
|
public class XRemotingProxyFactory { /** * Creates a proxy for the given interface .
* @ param ifaceinterface for which to create proxy
* @ param proxyLoaderclassloader for proxy
* @ return proxy */
public Object create ( Class < ? > iface , ClassLoader proxyLoader ) { } }
|
return create ( new Class [ ] { iface } , proxyLoader ) ;
|
public class TravellingSalesmanApplet { /** * Helper method for formatting a result as a string for display . */
private String createResultString ( String strategyDescription , List < String > shortestRoute , double distance , long elapsedTime ) { } }
|
StringBuilder buffer = new StringBuilder ( ) ; buffer . append ( '[' ) ; buffer . append ( strategyDescription ) ; buffer . append ( "]\n" ) ; buffer . append ( "ROUTE: " ) ; for ( String s : shortestRoute ) { buffer . append ( s ) ; buffer . append ( " -> " ) ; } buffer . append ( shortestRoute . get ( 0 ) ) ; buffer . append ( '\n' ) ; buffer . append ( "TOTAL DISTANCE: " ) ; buffer . append ( String . valueOf ( distance ) ) ; buffer . append ( "km\n" ) ; buffer . append ( "(Search Time: " ) ; double seconds = ( double ) elapsedTime / 1000 ; buffer . append ( String . valueOf ( seconds ) ) ; buffer . append ( " seconds)\n\n" ) ; return buffer . toString ( ) ;
|
public class CmsJspTagParam { /** * Simply send our name and value to our appropriate ancestor . < p >
* @ throws JspException ( never thrown , required by interface )
* @ return EVAL _ PAGE */
@ Override public int doEndTag ( ) throws JspException { } }
|
Tag t = findAncestorWithClass ( this , I_CmsJspTagParamParent . class ) ; if ( t == null ) { throw new JspTagException ( Messages . get ( ) . getBundle ( pageContext . getRequest ( ) . getLocale ( ) ) . key ( Messages . ERR_PARENTLESS_TAG_1 , new Object [ ] { "param" } ) ) ; } // take no action for null or empty names
if ( CmsStringUtil . isEmpty ( m_name ) ) { return EVAL_PAGE ; } // send the parameter to the appropriate ancestor
I_CmsJspTagParamParent parent = ( I_CmsJspTagParamParent ) t ; String value = m_value ; if ( value == null ) { if ( ( bodyContent == null ) || ( bodyContent . getString ( ) == null ) ) { value = "" ; } else { value = bodyContent . getString ( ) . trim ( ) ; } } if ( m_encode ) { parent . addParameter ( CmsEncoder . encode ( m_name , OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ) , CmsEncoder . encode ( value , OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ) ) ; } else { parent . addParameter ( m_name , value ) ; } return EVAL_PAGE ;
|
public class Dstream { /** * Iterates through cluster _ list to ensure that all empty clusters have been removed and
* that all cluster IDs match the cluster ' s index in cluster _ list . */
private void cleanClusters ( ) { } }
|
// System . out . println ( " Clean Clusters " ) ;
Iterator < GridCluster > clusIter = this . cluster_list . iterator ( ) ; ArrayList < GridCluster > toRem = new ArrayList < GridCluster > ( ) ; // Check to see if there are any empty clusters
while ( clusIter . hasNext ( ) ) { GridCluster c = clusIter . next ( ) ; if ( c . getWeight ( ) == 0 ) toRem . add ( c ) ; } // Remove empty clusters
if ( ! toRem . isEmpty ( ) ) { clusIter = toRem . iterator ( ) ; while ( clusIter . hasNext ( ) ) { this . cluster_list . remove ( clusIter . next ( ) ) ; } } // Adjust remaining clusters as necessary
clusIter = this . cluster_list . iterator ( ) ; while ( clusIter . hasNext ( ) ) { GridCluster c = clusIter . next ( ) ; int index = this . cluster_list . indexOf ( c ) ; c . setClusterLabel ( index ) ; this . cluster_list . set ( index , c ) ; Iterator < Map . Entry < DensityGrid , Boolean > > gridsOfClus = c . getGrids ( ) . entrySet ( ) . iterator ( ) ; while ( gridsOfClus . hasNext ( ) ) { DensityGrid dg = gridsOfClus . next ( ) . getKey ( ) ; CharacteristicVector cv = this . grid_list . get ( dg ) ; if ( cv == null ) { System . out . println ( "Warning, cv is null for " + dg . toString ( ) + " from cluster " + index + "." ) ; printGridList ( ) ; printGridClusters ( ) ; } // System . out . println ( " Cluster " + index + " : " + dg . toString ( ) + " is here . " ) ;
cv . setLabel ( index ) ; this . grid_list . put ( dg , cv ) ; } }
|
public class CloudDirectoryUtils { /** * Gets list index request .
* @ param attributeName the attribute name
* @ param attributeValue the attribute value
* @ param reference the reference
* @ param cloud the cloud
* @ return the list index request */
public static ListIndexRequest getListIndexRequest ( final String attributeName , final String attributeValue , final ObjectReference reference , final CloudDirectoryProperties cloud ) { } }
|
val range = getObjectAttributeRanges ( cloud . getSchemaArn ( ) , cloud . getFacetName ( ) , attributeName , attributeValue ) ; return new ListIndexRequest ( ) . withDirectoryArn ( cloud . getDirectoryArn ( ) ) . withIndexReference ( reference ) . withRangesOnIndexedValues ( range ) ;
|
public class HTODDynacache { /** * writeTemplateEntry ( )
* This adds a new entry for the specified template . */
public int writeTemplateEntry ( String template , Object entry ) { } }
|
int rc = NO_EXCEPTION ; if ( ! this . disableTemplatesSupport ) { if ( delayOffload ) { ValueSet vs = auxTemplateDependencyTable . getEntries ( template ) ; if ( vs == null ) { Result result = writeValueSetEntry ( TEMPLATE_ID_DATA , template , entry , null , ! ADD_IF_NEW ) ; rc = result . returnCode ; boolean bExist = result . bExist ; returnToResultPool ( result ) ; if ( rc != DISK_EXCEPTION && rc != DISK_SIZE_OVER_LIMIT_EXCEPTION && bExist == ! EXIST ) { rc = auxTemplateDependencyTable . add ( template , entry ) ; } } else { rc = auxTemplateDependencyTable . add ( template , vs , entry ) ; } } else { Result result = writeValueSetEntry ( TEMPLATE_ID_DATA , template , entry , null , ADD_IF_NEW ) ; rc = result . returnCode ; returnToResultPool ( result ) ; } } return rc ;
|
public class Environment { /** * Merges two environments . The properties of the first environment might be overwritten by the second one . */
public static Environment merge ( Environment env1 , Environment env2 ) { } }
|
final Environment mergedEnv = new Environment ( ) ; // merge tables
final Map < String , TableEntry > tables = new LinkedHashMap < > ( env1 . getTables ( ) ) ; tables . putAll ( env2 . getTables ( ) ) ; mergedEnv . tables = tables ; // merge functions
final Map < String , FunctionEntry > functions = new HashMap < > ( env1 . getFunctions ( ) ) ; functions . putAll ( env2 . getFunctions ( ) ) ; mergedEnv . functions = functions ; // merge execution properties
mergedEnv . execution = ExecutionEntry . merge ( env1 . getExecution ( ) , env2 . getExecution ( ) ) ; // merge deployment properties
mergedEnv . deployment = DeploymentEntry . merge ( env1 . getDeployment ( ) , env2 . getDeployment ( ) ) ; return mergedEnv ;
|
public class DeleteVocabularyRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteVocabularyRequest deleteVocabularyRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteVocabularyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteVocabularyRequest . getVocabularyName ( ) , VOCABULARYNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ST_AsKml { /** * Generates a KML geometry . Specifies the extrude and altitudeMode .
* Available extrude values are true , false or none .
* Supported altitude mode :
* For KML profil
* CLAMPTOGROUND = 1 ; RELATIVETOGROUND = 2 ; ABSOLUTE = 4;
* For GX profil CLAMPTOSEAFLOOR = 8 ; RELATIVETOSEAFLOOR = 16;
* No altitude : NONE = 0;
* @ param geometry
* @ param altitudeModeEnum
* @ param extrude
* @ return
* @ throws SQLException */
public static String toKml ( Geometry geometry , boolean extrude , int altitudeModeEnum ) throws SQLException { } }
|
StringBuilder sb = new StringBuilder ( ) ; if ( extrude ) { KMLGeometry . toKMLGeometry ( geometry , ExtrudeMode . TRUE , altitudeModeEnum , sb ) ; } else { KMLGeometry . toKMLGeometry ( geometry , ExtrudeMode . FALSE , altitudeModeEnum , sb ) ; } return sb . toString ( ) ;
|
public class MBeanAccessChecker { /** * { @ inheritDoc } */
@ Override public boolean check ( Arg pArg ) { } }
|
if ( pArg . isTypeAllowed ( ) ) { // Its allowed in general , so we only need to check
// the denied section , whether its forbidded
return deny == null || ! matches ( deny , pArg ) ; } else { // Its forbidden by default , so we need to check the
// allowed section
return allow != null && matches ( allow , pArg ) ; }
|
public class JTableSelectedRowCountProperty { /** * Updates the value of this property based on the table ' s selection model and notify the listeners . */
private void updateValue ( ) { } }
|
if ( table != null ) { int oldCount = this . count ; this . count = table . getSelectedRowCount ( ) ; maybeNotifyListeners ( oldCount , count ) ; }
|
public class Neo4JVertex { /** * { @ inheritDoc } */
@ Override public Iterator < Edge > edges ( Direction direction , String ... labels ) { } }
|
Objects . requireNonNull ( direction , "direction cannot be null" ) ; Objects . requireNonNull ( labels , "labels cannot be null" ) ; // transaction should be ready for io operations
graph . tx ( ) . readWrite ( ) ; // load labels in hash set ( remove duplicates )
Set < String > set = new HashSet < > ( Arrays . asList ( labels ) ) ; // parameters
Map < String , Object > parameters = new HashMap < > ( ) ; // vertex id
parameters . put ( "id" , id ( ) ) ; // out edges
if ( direction == Direction . OUT ) { // check we have all edges in memory
if ( ! outEdgesLoaded ) { // labels we need to query for
Set < String > relationshipLabels = set . stream ( ) . filter ( item -> ! outEdgeLabels . contains ( item ) ) . collect ( Collectors . toSet ( ) ) ; // check query is required for labels
if ( set . isEmpty ( ) || ! relationshipLabels . isEmpty ( ) ) { // create string builder
StringBuilder builder = new StringBuilder ( ) ; // match clause
builder . append ( "MATCH " ) . append ( matchPattern ( "n" ) ) . append ( "-[r" ) . append ( relationshipLabels . stream ( ) . map ( label -> ":`" + label + "`" ) . collect ( Collectors . joining ( "|" ) ) ) . append ( "]->(m" ) . append ( processLabels ( Collections . emptySet ( ) , true ) ) . append ( ")" ) . append ( " WHERE " ) . append ( vertexIdProvider . matchPredicateOperand ( "n" ) ) . append ( " = {id}" ) ; // edge ids already in memory
List < Object > identifiers = outEdges . stream ( ) . map ( Neo4JEdge :: id ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; // process where clause
processEdgesWhereClause ( "m" , identifiers , "r" , builder , parameters ) ; // return
builder . append ( " RETURN n, r, m" ) ; // create statement
Statement statement = new Statement ( builder . toString ( ) , parameters ) ; // execute statement
StatementResult result = session . executeStatement ( statement ) ; // execute command
Stream < Edge > query = session . edges ( result ) ; // edges in memory plus the ones in database ( return copy since edges can be deleted in the middle of the loop )
Iterator < Edge > iterator = Stream . concat ( ( labels . length != 0 ? outEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : outEdges . stream ( ) ) . map ( edge -> ( Edge ) edge ) , query ) . collect ( Collectors . toList ( ) ) . iterator ( ) ; // process summary ( query has been already consumed by combine )
ResultSummaryLogger . log ( result . consume ( ) ) ; // after this line it is safe to update loaded flag and labels in memory
outEdgesLoaded = labels . length == 0 ; outEdgeLabels . addAll ( set ) ; // return iterator
return iterator ; } } // edges in memory ( return copy since edges can be deleted in the middle of the loop )
return outEdges . stream ( ) . filter ( edge -> labels . length == 0 || set . contains ( edge . label ( ) ) ) . map ( edge -> ( Edge ) edge ) . collect ( Collectors . toList ( ) ) . iterator ( ) ; } // in edges
if ( direction == Direction . IN ) { // check we have all edges in memory
if ( ! inEdgesLoaded ) { // labels we need to query for
Set < String > relationshipLabels = set . stream ( ) . filter ( item -> ! inEdgeLabels . contains ( item ) ) . collect ( Collectors . toSet ( ) ) ; // check query is required for labels
if ( set . isEmpty ( ) || ! relationshipLabels . isEmpty ( ) ) { // create string builder
StringBuilder builder = new StringBuilder ( ) ; // match clause
builder . append ( "MATCH " ) . append ( matchPattern ( "n" ) ) . append ( "<-[r" ) . append ( relationshipLabels . stream ( ) . map ( label -> ":`" + label + "`" ) . collect ( Collectors . joining ( "|" ) ) ) . append ( "]-(m" ) . append ( processLabels ( Collections . emptySet ( ) , true ) ) . append ( ")" ) . append ( " WHERE " ) . append ( vertexIdProvider . matchPredicateOperand ( "n" ) ) . append ( " = {id}" ) ; // edge ids already in memory
List < Object > identifiers = inEdges . stream ( ) . map ( Neo4JEdge :: id ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; // process where clause
processEdgesWhereClause ( "m" , identifiers , "r" , builder , parameters ) ; // return
builder . append ( " RETURN n, r, m" ) ; // create statement
Statement statement = new Statement ( builder . toString ( ) , parameters ) ; // execute statement
StatementResult result = session . executeStatement ( statement ) ; // execute command
Stream < Edge > query = session . edges ( result ) ; // edges in memory plus the ones in database ( return copy since edges can be deleted in the middle of the loop )
Iterator < Edge > iterator = Stream . concat ( ( labels . length != 0 ? inEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : inEdges . stream ( ) ) . map ( edge -> ( Edge ) edge ) , query ) . collect ( Collectors . toList ( ) ) . iterator ( ) ; // process summary ( query has been already consumed by combine )
ResultSummaryLogger . log ( result . consume ( ) ) ; // after this line it is safe to update loaded flag and labels in memory
inEdgesLoaded = labels . length == 0 ; inEdgeLabels . addAll ( set ) ; // return iterator
return iterator ; } } // edges in memory ( return copy since edges can be deleted in the middle of the loop )
return inEdges . stream ( ) . filter ( edge -> labels . length == 0 || set . contains ( edge . label ( ) ) ) . map ( edge -> ( Edge ) edge ) . collect ( Collectors . toList ( ) ) . iterator ( ) ; } // check we have all edges in memory
if ( ! outEdgesLoaded || ! inEdgesLoaded ) { // check we have labels already in memory
if ( set . isEmpty ( ) || ! outEdgeLabels . containsAll ( set ) || ! inEdgeLabels . containsAll ( set ) ) { // create string builder
StringBuilder builder = new StringBuilder ( ) ; // match clause
builder . append ( "MATCH " ) . append ( matchPattern ( "n" ) ) . append ( "-[r" ) . append ( set . stream ( ) . map ( label -> ":`" + label + "`" ) . collect ( Collectors . joining ( "|" ) ) ) . append ( "]-(m" ) . append ( processLabels ( Collections . emptySet ( ) , true ) ) . append ( ")" ) . append ( " WHERE " ) . append ( vertexIdProvider . matchPredicateOperand ( "n" ) ) . append ( " = {id}" ) ; // edge ids already in memory
List < Object > identifiers = Stream . concat ( outEdges . stream ( ) , inEdges . stream ( ) ) . map ( Neo4JEdge :: id ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; // process where clause
processEdgesWhereClause ( "m" , identifiers , "r" , builder , parameters ) ; // return
builder . append ( " RETURN n, r, m" ) ; // create statement
Statement statement = new Statement ( builder . toString ( ) , parameters ) ; // execute statement
StatementResult result = session . executeStatement ( statement ) ; // execute command
Stream < Edge > query = session . edges ( result ) ; // edges in memory plus the ones in database ( return copy since edges can be deleted in the middle of the loop )
Iterator < Edge > iterator = Stream . concat ( Stream . concat ( labels . length != 0 ? outEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : outEdges . stream ( ) , labels . length != 0 ? inEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : inEdges . stream ( ) ) . map ( edge -> ( Edge ) edge ) , query ) . collect ( Collectors . toList ( ) ) . iterator ( ) ; // process summary ( query has been already consumed by combine )
ResultSummaryLogger . log ( result . consume ( ) ) ; // after this line it is safe to update loaded flags
outEdgesLoaded = outEdgesLoaded || labels . length == 0 ; inEdgesLoaded = inEdgesLoaded || labels . length == 0 ; // update labels in memory
outEdgeLabels . addAll ( set ) ; inEdgeLabels . addAll ( set ) ; // return iterator
return iterator ; } } // edges in memory ( return copy since edges can be deleted in the middle of the loop )
return Stream . concat ( labels . length != 0 ? inEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : inEdges . stream ( ) , labels . length != 0 ? outEdges . stream ( ) . filter ( edge -> set . contains ( edge . label ( ) ) ) : outEdges . stream ( ) ) . map ( edge -> ( Edge ) edge ) . collect ( Collectors . toList ( ) ) . iterator ( ) ;
|
public class DefaultGroovyMethods { /** * Create a Collection as a union of two iterables . If the left iterable
* is a Set , then the returned collection will be a Set otherwise a List .
* This operation will always create a new object for the result ,
* while the operands remain unchanged .
* < pre class = " groovyTestCase " > assert [ 1,2,3,4 ] = = [ 1,2 ] + [ 3,4 ] < / pre >
* @ param left the left Iterable
* @ param right the right Iterable
* @ return the merged Collection
* @ since 2.4.0 */
public static < T > Collection < T > plus ( Iterable < T > left , Iterable < T > right ) { } }
|
return plus ( asCollection ( left ) , asCollection ( right ) ) ;
|
public class StreamBlockQueue { /** * Poll stream table schema that either represents the first object in the memory dequeue , or
* from the segment that the cursor is currently reading on . */
public BBContainer pollSchema ( ) { } }
|
BBContainer schemaCont = null ; long segmentIndex = - 1 ; if ( m_memoryDeque . peek ( ) != null ) { StreamBlock sb = m_memoryDeque . peek ( ) ; BBContainer cont = sb . getSchemaContainer ( ) ; if ( cont != null ) { return cont ; } segmentIndex = sb . getSegmentIndex ( ) ; } try { schemaCont = m_reader . getExtraHeader ( segmentIndex ) ; } catch ( IOException e ) { exportLog . error ( "Failed to poll schema: " + e ) ; } return schemaCont ;
|
public class PersistenceUtils { /** * Save object to the binary file defined as filename in property " persistence . store . file " .
* @ param obj the object to save , which should be serializable
* @ param storeType the store type */
public static void saveAll ( final Object obj , final PersistenceStoreType storeType ) { } }
|
try { File file = new File ( persistenceStorePath + File . separator + storeType ) ; // create necessary parent directories on file system
File directory = file . getParentFile ( ) ; if ( null != directory && ! directory . exists ( ) ) { directory . mkdirs ( ) ; } log . info ( "aws-mock: saving to {}" , file . getAbsolutePath ( ) ) ; ObjectOutputStream out = new ObjectOutputStream ( new FileOutputStream ( file , false ) ) ; out . writeObject ( obj ) ; out . close ( ) ; } catch ( FileNotFoundException e ) { log . error ( "FileNotFoundException caught during saving object to file: {}" , e . getMessage ( ) ) ; } catch ( IOException e ) { log . error ( "IOException caught during saving object to file: {}" , e . getMessage ( ) ) ; }
|
public class PathOperations { /** * Delete an existing directory including all child objects if it is existing .
* @ param aDir
* The directory to be deleted . May not be < code > null < / code > .
* @ return A non - < code > null < / code > error code . */
@ Nonnull public static FileIOError deleteDirRecursiveIfExisting ( @ Nonnull final Path aDir ) { } }
|
final FileIOError aError = deleteDirRecursive ( aDir ) ; if ( aError . getErrorCode ( ) . equals ( EFileIOErrorCode . SOURCE_DOES_NOT_EXIST ) ) return aError . withoutErrorCode ( ) ; return aError ;
|
public class ClassAccessor { /** * Determines whether T declares a field . This does not include inherited fields .
* @ param field The field that we want to detect .
* @ return True if T declares the field . */
public boolean declaresField ( Field field ) { } }
|
try { type . getDeclaredField ( field . getName ( ) ) ; return true ; } catch ( NoSuchFieldException e ) { return false ; }
|
public class MappingUtils { /** * Invokes class function using Reflection
* @ param object Instance which function would be invoked
* @ param functionName function name
* @ param parameters function parameters ( array of Class )
* @ param values function values ( array of Object )
* @ return function return
* @ throws org . midao . jdbc . core . exception . MjdbcException in case function doesn ' t exists */
public static Object invokeFunction ( Object object , String functionName , Class [ ] parameters , Object [ ] values ) throws MjdbcException { } }
|
Object result = null ; try { Method method = object . getClass ( ) . getMethod ( functionName , parameters ) ; method . setAccessible ( true ) ; result = method . invoke ( object , values ) ; } catch ( Exception ex ) { throw new MjdbcException ( ex ) ; } return result ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link CmisExtensionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = CreateFolderResponse . class ) public JAXBElement < CmisExtensionType > createCreateFolderResponseExtension ( CmisExtensionType value ) { } }
|
return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , CreateFolderResponse . class , value ) ;
|
public class DriverFactory { /** * Creates new { @ link LoadBalancer } for the routing driver .
* < b > This method is protected only for testing < / b > */
protected LoadBalancer createLoadBalancer ( BoltServerAddress address , ConnectionPool connectionPool , EventExecutorGroup eventExecutorGroup , Config config , RoutingSettings routingSettings ) { } }
|
LoadBalancingStrategy loadBalancingStrategy = createLoadBalancingStrategy ( config , connectionPool ) ; ServerAddressResolver resolver = createResolver ( config ) ; return new LoadBalancer ( address , routingSettings , connectionPool , eventExecutorGroup , createClock ( ) , config . logging ( ) , loadBalancingStrategy , resolver ) ;
|
public class CmsRequestContext { /** * Sets an attribute in the request context . < p >
* @ param key the attribute name
* @ param value the attribute value */
public void setAttribute ( String key , Object value ) { } }
|
if ( m_attributeMap == null ) { // hash table is still the most efficient form of a synchronized Map
m_attributeMap = new Hashtable < String , Object > ( ) ; } m_attributeMap . put ( key , value ) ;
|
public class GetMasterInfoPOptions { /** * < code > repeated . alluxio . grpc . meta . MasterInfoField filter = 1 ; < / code > */
public alluxio . grpc . MasterInfoField getFilter ( int index ) { } }
|
return filter_converter_ . convert ( filter_ . get ( index ) ) ;
|
public class QueryRendererDelegateImpl { /** * Add field sort criteria .
* @ param collateName optional collation name
* @ param isAscending sort direction */
@ Override public void sortSpecification ( String collateName , boolean isAscending ) { } }
|
// collationName is ignored for now
PropertyPath < TypeDescriptor < TypeMetadata > > property = resolveAlias ( propertyPath ) ; checkAnalyzed ( property , false ) ; // todo [ anistor ] cannot sort on analyzed field ?
if ( sortFields == null ) { sortFields = new ArrayList < > ( ARRAY_INITIAL_LENGTH ) ; } sortFields . add ( new IckleParsingResult . SortFieldImpl < > ( property , isAscending ) ) ;
|
public class PersonDirectoryConfiguration { /** * Session - scoped descriptors object . One of these will exist for each user in their session . It
* will store the attributes from the request set by the requestAttributeSourceFilter . This must
* hold both a session - scoped bean and request - scoped bean . See
* http : / / permalink . gmane . org / gmane . comp . java . jasig . uportal / 10771 for more information . */
@ Bean ( name = "requestAdditionalDescriptors" ) public IAdditionalDescriptors getRequestAdditionalDescriptors ( ) { } }
|
final MediatingAdditionalDescriptors rslt = new MediatingAdditionalDescriptors ( ) ; final List < IAdditionalDescriptors > delegateDescriptors = new ArrayList < > ( ) ; delegateDescriptors . add ( getSessionScopeAdditionalDescriptors ( ) ) ; delegateDescriptors . add ( getRequestScopeAdditionalDescriptors ( ) ) ; rslt . setDelegateDescriptors ( delegateDescriptors ) ; return rslt ;
|
public class RouteImpl { /** * Wraps the route in RouteImpl
* @ param path the path
* @ param route the route
* @ return the wrapped route */
public static RouteImpl create ( final String path , final Route route ) { } }
|
return create ( path , DEFAULT_ACCEPT_TYPE , route ) ;
|
public class PvmExecutionImpl { /** * perform starting behavior but don ' t execute the initial activity
* @ param variables the variables which are used for the start */
public void startWithoutExecuting ( Map < String , Object > variables ) { } }
|
initialize ( ) ; initializeTimerDeclarations ( ) ; fireHistoricProcessStartEvent ( ) ; performOperation ( PvmAtomicOperation . FIRE_PROCESS_START ) ; setActivity ( null ) ; setActivityInstanceId ( getId ( ) ) ; // set variables
setVariables ( variables ) ;
|
public class Registration { /** * Registers a repository and a dataset { @ link URIPattern } using a repository
* { @ link OptionBuilder } .
* Both patterns should produce options used by the builder to produce a
* { @ link DatasetRepository } . The dataset pattern should also produce a
* " dataset " option that will be passed to the repository ' s load method as
* the dataset name .
* @ param repoPattern a { @ code URIPattern } for a { @ code DatasetRepository }
* @ param datasetPattern a { @ code URIPattern } for a { @ code Dataset }
* @ param repoBuilder a { @ code OptionBuilder } valid for both URIs */
public static void register ( URIPattern repoPattern , URIPattern datasetPattern , OptionBuilder < DatasetRepository > repoBuilder ) { } }
|
REPO_PATTERNS . put ( repoPattern , repoBuilder ) ; DATASET_PATTERNS . put ( datasetPattern , repoBuilder ) ; // the first dataset pattern for a repository pattern is always used
if ( ! REPO_TO_DATASET_PATTERNS . containsKey ( repoPattern ) ) { REPO_TO_DATASET_PATTERNS . put ( repoPattern , datasetPattern ) ; }
|
public class SearchIndex { /** * Merges the fulltext indexed fields of the aggregated node states into
* < code > doc < / code > .
* @ param state
* the node state on which < code > doc < / code > was created .
* @ param doc
* the lucene document with index fields from < code > state < / code > .
* @ param loadAllProperties
* Indicates whether all the properties should be loaded using the method
* { @ link ItemDataConsumer # getChildPropertiesData ( org . exoplatform . services . jcr . datamodel . NodeData ) } */
protected void mergeAggregatedNodeIndexes ( NodeData state , Document doc , boolean loadAllProperties , VolatileIndex volatileIndex ) { } }
|
if ( indexingConfig != null ) { AggregateRule [ ] aggregateRules = indexingConfig . getAggregateRules ( ) ; if ( aggregateRules == null ) { return ; } try { List fulltextTemp = new ArrayList ( ) ; ItemDataConsumer ism = getContext ( ) . getItemStateManager ( ) ; for ( int i = 0 ; i < aggregateRules . length ; i ++ ) { boolean ruleMatched = false ; // node includes
NodeData [ ] aggregates = aggregateRules [ i ] . getAggregatedNodeStates ( state ) ; if ( aggregates != null ) { ruleMatched = true ; for ( int j = 0 ; j < aggregates . length ; j ++ ) { Document aDoc = createDocument ( aggregates [ j ] , getNamespaceMappings ( ) , indexRegister . getDefaultIndex ( ) . getIndexFormatVersion ( ) , loadAllProperties ) ; if ( volatileIndex != null ) { volatileIndex . addAggregateIndexes ( aDoc ) ; } // transfer fields to doc if there are any
Fieldable [ ] fulltextFields = aDoc . getFieldables ( FieldNames . FULLTEXT ) ; if ( fulltextFields != null ) { for ( int k = 0 ; k < fulltextFields . length ; k ++ ) { for ( Fieldable fulltextField : fulltextFields ) { doc . add ( fulltextField ) ; } } doc . add ( new Field ( FieldNames . AGGREGATED_NODE_UUID , aggregates [ j ] . getIdentifier ( ) , Field . Store . NO , Field . Index . NOT_ANALYZED_NO_NORMS ) ) ; } } // make sure that fulltext fields are aligned properly
// first all stored fields , then remaining
Fieldable [ ] fulltextFields = doc . getFieldables ( FieldNames . FULLTEXT ) ; doc . removeFields ( FieldNames . FULLTEXT ) ; Arrays . sort ( fulltextFields , FIELDS_COMPARATOR_STORED ) ; for ( Fieldable f : fulltextFields ) { doc . add ( f ) ; } } // property includes
PropertyData [ ] propStates = aggregateRules [ i ] . getAggregatedPropertyStates ( state ) ; if ( propStates != null ) { ruleMatched = true ; for ( int j = 0 ; j < propStates . length ; j ++ ) { PropertyData propState = propStates [ j ] ; String namePrefix = FieldNames . createNamedValue ( getNamespaceMappings ( ) . translateName ( propState . getQPath ( ) . getName ( ) ) , "" ) ; NodeData parent = ( NodeData ) ism . getItemData ( propState . getParentIdentifier ( ) ) ; Document aDoc = createDocument ( parent , getNamespaceMappings ( ) , getIndex ( ) . getIndexFormatVersion ( ) , loadAllProperties ) ; try { // find the right fields to transfer
Fieldable [ ] fields = aDoc . getFieldables ( FieldNames . PROPERTIES ) ; for ( int k = 0 ; k < fields . length ; k ++ ) { Fieldable field = fields [ k ] ; // assume properties fields use
// SingleTokenStream
// t = field . tokenStreamValue ( ) . next ( t ) ;
field . tokenStreamValue ( ) . incrementToken ( ) ; CharTermAttribute term = field . tokenStreamValue ( ) . getAttribute ( CharTermAttribute . class ) ; PayloadAttribute payload = field . tokenStreamValue ( ) . getAttribute ( PayloadAttribute . class ) ; String value = new String ( term . buffer ( ) , 0 , term . length ( ) ) ; if ( value . startsWith ( namePrefix ) ) { // extract value
value = value . substring ( namePrefix . length ( ) ) ; // create new named value
QPath p = getRelativePath ( state , propState ) ; String path = getNamespaceMappings ( ) . translatePath ( p ) ; value = FieldNames . createNamedValue ( path , value ) ; term . setEmpty ( ) ; term . append ( value ) ; doc . add ( new Field ( field . name ( ) , new SingletonTokenStream ( term . toString ( ) , payload . getPayload ( ) ) ) ) ; doc . add ( new Field ( FieldNames . AGGREGATED_NODE_UUID , parent . getIdentifier ( ) , Field . Store . NO , Field . Index . NOT_ANALYZED_NO_NORMS ) ) ; } } } finally { Util . disposeDocument ( aDoc ) ; } } } // only use first aggregate definition that matches
if ( ruleMatched ) { break ; } } } catch ( Exception e ) { // do not fail if aggregate cannot be created
log . warn ( "Exception while building indexing aggregate for" + " node with UUID: " + state . getIdentifier ( ) , e ) ; } }
|
public class QueryBuilder { /** * Creates a new { @ code TRUNCATE } query .
* @ param keyspace the name of the keyspace to use .
* @ param table the name of the table to truncate .
* @ return the truncation query . */
public static Truncate truncate ( @ Nullable CqlIdentifier keyspace , @ NonNull CqlIdentifier table ) { } }
|
return new DefaultTruncate ( keyspace , table ) ;
|
public class FunctionList { /** * Returns { @ link Function } s that has the parameters
* that start with given types ( but can have additional parameters . ) */
public FunctionList signatureStartsWith ( final Class ... args ) { } }
|
return filter ( new Filter ( ) { public boolean keep ( Function m ) { Class [ ] params = m . getParameterTypes ( ) ; if ( params . length < args . length ) return false ; for ( int i = 0 ; i < args . length ; i ++ ) { if ( params [ i ] != args [ i ] ) return false ; } return true ; } } ) ;
|
public class HScreenField { /** * Get the current string value in HTML . < p / >
* May want to check GetRootScreen ( ) . GetScreenType ( ) & INPUT / DISPLAY _ MODE
* @ exception DBException File exception .
* @ param out The html out stream .
* @ param iHtmlAttribures The attributes .
* @ return true if any fields were found . */
public boolean printData ( PrintWriter out , int iHtmlAttributes ) { } }
|
String strFieldDesc = " " ; if ( this . getScreenField ( ) . getConverter ( ) != null ) strFieldDesc = this . getScreenField ( ) . getConverter ( ) . getFieldDesc ( ) ; this . printHtmlControlDesc ( out , strFieldDesc , iHtmlAttributes ) ; if ( this . getScreenField ( ) . isEnabled ( ) == false ) iHtmlAttributes = iHtmlAttributes & ( ~ HtmlConstants . HTML_INPUT ) ; // Display control
if ( ( iHtmlAttributes & HtmlConstants . HTML_INPUT ) != 0 ) { // Input field
String strFieldName = this . getHtmlFieldParam ( ) ; Convert converter = this . getScreenField ( ) . getConverter ( ) ; int iMaxSize = 10 ; if ( converter != null ) iMaxSize = converter . getMaxLength ( ) ; String strMaxSize = Integer . toString ( iMaxSize ) ; String strSize = "40" ; if ( iMaxSize < 40 ) strSize = strMaxSize ; String strValue = this . getScreenField ( ) . getSFieldValue ( false , false ) ; // Overriding methods will replace with : text int checkbox radio hidden float date url textbox
String strControlType = this . getInputType ( null ) ; this . printInputControl ( out , strFieldDesc , strFieldName , strSize , strMaxSize , strValue , strControlType , iHtmlAttributes ) ; } else this . printDisplayControl ( out ) ; return true ;
|
public class BlocksGroup { /** * First block from this group with specified resource id . */
@ CheckForNull public Block first ( String resourceId ) { } }
|
for ( Block block : blocks ) { if ( resourceId . equals ( block . getResourceId ( ) ) ) { return block ; } } return null ;
|
public class EsaSubsystemFeatureDefinitionImpl { /** * Create a new instance of this class for the supplied ESA file .
* @ param esa The ESA to load
* @ return The { @ link EsaSubsystemFeatureDefinitionImpl } for working with the properties of the ESA
* @ throws ZipException
* @ throws IOException */
public static EsaSubsystemFeatureDefinitionImpl constructInstance ( File esa ) throws ZipException , IOException { } }
|
// Find the manifest - case isn ' t guaranteed so do a search
ZipFile zip = new ZipFile ( esa ) ; Enumeration < ? extends ZipEntry > zipEntries = zip . entries ( ) ; ZipEntry subsystemEntry = null ; while ( zipEntries . hasMoreElements ( ) ) { ZipEntry nextEntry = zipEntries . nextElement ( ) ; if ( "OSGI-INF/SUBSYSTEM.MF" . equalsIgnoreCase ( nextEntry . getName ( ) ) ) { subsystemEntry = nextEntry ; } } return new EsaSubsystemFeatureDefinitionImpl ( zip . getInputStream ( subsystemEntry ) , zip ) ;
|
public class SAMLCredentialPersonAttributeDao { /** * Extracts the string value of the XMLObject depending upon its type .
* @ param xmlo XMLObject
* @ return String value of object . Null if unable to convert object to string . */
private String extractStringValue ( XMLObject xmlo ) { } }
|
if ( xmlo instanceof XSString ) { return ( ( XSString ) xmlo ) . getValue ( ) ; } else if ( xmlo instanceof XSAnyImpl ) { return ( ( XSAnyImpl ) xmlo ) . getTextContent ( ) ; } logger . warn ( "Unable to map attribute class {} to String. Unknown type. Enable TRACE logging to see attribute name" , xmlo . getClass ( ) ) ; return null ;
|
public class Nfs3 { /** * / * ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . nfs . Nfs # wrapped _ getRead ( com . emc . ecs . nfsclient . nfs . NfsReadRequest , byte [ ] , int ) */
public Nfs3ReadResponse wrapped_getRead ( NfsReadRequest request , final byte [ ] bytes , final int position ) throws IOException { } }
|
NfsResponseHandler < Nfs3ReadResponse > responseHandler = new NfsResponseHandler < Nfs3ReadResponse > ( ) { /* ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . rpc . RpcResponseHandler # makeNewResponse ( ) */
protected Nfs3ReadResponse makeNewResponse ( ) { return new Nfs3ReadResponse ( bytes , position ) ; } } ; _rpcWrapper . callRpcWrapped ( request , responseHandler ) ; return responseHandler . getResponse ( ) ;
|
public class AbstractIoSessionEx { /** * Memorizes the Subject representing the current logged on user and fires any
* currently registered SubjectChangeListeners */
protected void setSubject ( Subject subject ) { } }
|
Subject currentSubject = this . subject ; if ( ! ( currentSubject == null && subject == null ) ) { this . subject = subject ; if ( currentThread ( ) == ioThread ) { notifySubjectChanged ( subject ) ; } else { final Subject changedSubject = subject ; ioExecutor . execute ( new Runnable ( ) { @ Override public void run ( ) { notifySubjectChanged ( changedSubject ) ; } } ) ; } }
|
public class ErrorTextProvider { /** * Add an error item to be disabled .
* @ param eField
* The field to be used . May not be < code > null < / code > .
* @ param sText
* The text that should contain the placeholder ( { @ value # PLACEHOLDER } )
* that will be replaced
* @ return this for chaining */
@ Nonnull public ErrorTextProvider addItem ( @ Nonnull final EField eField , @ Nonnull @ Nonempty final String sText ) { } }
|
return addItem ( new FormattableItem ( eField , sText ) ) ;
|
public class JFapChannelOutbound { /** * end F189000 */
public ConnectionLink getConnectionLink ( VirtualConnection vc , ChannelData config ) { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getConnectionLink" , vc ) ; ConnectionLink retValue = new JFapOutboundConnLink ( vc , channelFactoryData , config ) ; // D196678.10.1
if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getConnectionLink" , retValue ) ; return retValue ;
|
public class EVCacheClientSample { /** * Main Program which does some simple sets and gets . */
public static void main ( String [ ] args ) { } }
|
// set verboseMode based on the environment variable
verboseMode = ( "true" . equals ( System . getenv ( "EVCACHE_SAMPLE_VERBOSE" ) ) ) ; if ( verboseMode ) { System . out . println ( "To run this sample app without using Gradle:" ) ; System . out . println ( "java -cp " + System . getProperty ( "java.class.path" ) + " com.netflix.evcache.sample.EVCacheClientSample" ) ; } try { EVCacheClientSample evCacheClientSample = new EVCacheClientSample ( ) ; // Set ten keys to different values
for ( int i = 0 ; i < 10 ; i ++ ) { String key = "key_" + i ; String value = "data_" + i ; // Set the TTL to 24 hours
int ttl = 86400 ; evCacheClientSample . setKey ( key , value , ttl ) ; } // Do a " get " for each of those same keys
for ( int i = 0 ; i < 10 ; i ++ ) { String key = "key_" + i ; String value = evCacheClientSample . getKey ( key ) ; System . out . println ( "Get of " + key + " returned " + value ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } // We have to call System . exit ( ) now , because some background
// threads were started without the " daemon " flag . This is
// probably a mistake somewhere , but hey , this is only a sample app .
System . exit ( 0 ) ;
|
public class AbstractRocksDBRestoreOperation { /** * Necessary clean up iff restore operation failed . */
@ Override public void close ( ) { } }
|
IOUtils . closeQuietly ( defaultColumnFamilyHandle ) ; IOUtils . closeQuietly ( nativeMetricMonitor ) ; IOUtils . closeQuietly ( db ) ; // Making sure the already created column family options will be closed
columnFamilyDescriptors . forEach ( ( cfd ) -> IOUtils . closeQuietly ( cfd . getOptions ( ) ) ) ;
|
public class CPSpecificationOptionLocalServiceBaseImpl { /** * Deletes the cp specification option with the primary key from the database . Also notifies the appropriate model listeners .
* @ param CPSpecificationOptionId the primary key of the cp specification option
* @ return the cp specification option that was removed
* @ throws PortalException if a cp specification option with the primary key could not be found */
@ Indexable ( type = IndexableType . DELETE ) @ Override public CPSpecificationOption deleteCPSpecificationOption ( long CPSpecificationOptionId ) throws PortalException { } }
|
return cpSpecificationOptionPersistence . remove ( CPSpecificationOptionId ) ;
|
public class CascadeDeleteCapable { /** * deleteCascades .
* @ param entity
* a { @ link java . lang . Object } object . */
public void deleteCascades ( Object entity ) { } }
|
if ( entity != null ) { List < Field > fields = ClassUtil . getAnnotatedFields ( entity . getClass ( ) , Reference . class ) ; for ( Field field : fields ) { Cascade [ ] cascades = ClassUtil . getFieldAnnotationValue ( "cascade" , field , Reference . class , Cascade [ ] . class ) ; if ( Arrays . asList ( cascades ) . contains ( Cascade . DELETE ) ) { Object referencedEntity = ClassUtil . getValueOfField ( field , entity ) ; if ( referencedEntity != null ) { try { apitraryDaoSupport . resolveApitraryEntityId ( referencedEntity ) ; apitraryDaoSupport . delete ( referencedEntity ) ; } catch ( ApitraryOrmIdException aoie ) { /* * TODO find better way for exception without breaking th cascade flow */
log . warn ( aoie ) ; } } } } }
|
public class MerlinReader { /** * Extract a duration amount from the assignment , converting a percentage
* into an actual duration .
* @ param task parent task
* @ param work duration from assignment
* @ return Duration instance */
private Duration assignmentDuration ( Task task , Duration work ) { } }
|
Duration result = work ; if ( result != null ) { if ( result . getUnits ( ) == TimeUnit . PERCENT ) { Duration taskWork = task . getWork ( ) ; if ( taskWork != null ) { result = Duration . getInstance ( taskWork . getDuration ( ) * result . getDuration ( ) , taskWork . getUnits ( ) ) ; } } } return result ;
|
public class ItemLink { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . msgstore . cache . xalist . Link # assertCanDelete ( long ) */
public SevereMessageStoreException assertCanDelete ( final PersistentTransaction transaction ) { } }
|
SevereMessageStoreException ex = super . assertCanDelete ( transaction ) ; if ( null == ex ) { if ( 0 < _referenceCount ) { ex = new SevereMessageStoreException ( "Cannot delete Item with references" ) ; FFDCFilter . processException ( ex , "ItemLink.delete" , "1:111:1.104.1.1" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) SibTr . event ( this , tc , "Exception : Cannot delete Item with references" ) ; } } return ex ;
|
public class AddOn { /** * Tells whether or not this add - on has a ( direct ) dependency on any of the given { @ code addOns } ( including version ) .
* @ param addOns the add - ons that will be checked
* @ return { @ code true } if it depends on any of the given add - ons , { @ code false } otherwise .
* @ since 2.4.0 */
public boolean dependsOn ( Collection < AddOn > addOns ) { } }
|
if ( dependencies == null || dependencies . getAddOns ( ) . isEmpty ( ) ) { return false ; } for ( AddOn addOn : addOns ) { if ( dependsOn ( addOn ) ) { return true ; } } return false ;
|
public class DatastoreMutationPool { /** * Adds a mutation to put the given entity to the datastore . */
public void delete ( Key key ) { } }
|
// This is probably a serious overestimation , but I can ' t see a good
// way to find the size in the public API .
int bytesHere = KeyFactory . keyToString ( key ) . length ( ) ; // Do this before the add so that we guarantee that size is never > sizeLimit
if ( deletesBytes + bytesHere >= params . getBytesLimit ( ) ) { flushDeletes ( ) ; } deletesBytes += bytesHere ; deletes . add ( key ) ; if ( deletes . size ( ) >= params . getCountLimit ( ) ) { flushDeletes ( ) ; }
|
public class SSTableDeletingTask { /** * for tests */
public static void waitForDeletions ( ) { } }
|
Runnable runnable = new Runnable ( ) { public void run ( ) { } } ; FBUtilities . waitOnFuture ( ScheduledExecutors . nonPeriodicTasks . schedule ( runnable , 0 , TimeUnit . MILLISECONDS ) ) ;
|
public class DiagnosticsInner { /** * List Site Detector Responses .
* List Site Detector Responses .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param siteName Site Name
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; DetectorResponseInner & gt ; object if successful . */
public PagedList < DetectorResponseInner > listSiteDetectorResponses ( final String resourceGroupName , final String siteName ) { } }
|
ServiceResponse < Page < DetectorResponseInner > > response = listSiteDetectorResponsesSinglePageAsync ( resourceGroupName , siteName ) . toBlocking ( ) . single ( ) ; return new PagedList < DetectorResponseInner > ( response . body ( ) ) { @ Override public Page < DetectorResponseInner > nextPage ( String nextPageLink ) { return listSiteDetectorResponsesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
|
public class Gauge { /** * Adds the given Section to the list of sections .
* Sections in the Medusa library
* usually are less eye - catching than Areas .
* @ param SECTION */
public void addSection ( final Section SECTION ) { } }
|
if ( null == SECTION ) return ; sections . add ( SECTION ) ; Collections . sort ( sections , new SectionComparator ( ) ) ; fireUpdateEvent ( SECTION_EVENT ) ;
|
public class DownloadService { /** * Builds up an HTML fragment that starts all the download jobs . */
public String generateFragment ( ) { } }
|
if ( ! DownloadSettings . usePostBack ( ) ) { return "" ; } if ( neverUpdate ) return "" ; if ( doesNotSupportPostMessage ( ) ) return "" ; StringBuilder buf = new StringBuilder ( ) ; if ( Jenkins . getInstance ( ) . hasPermission ( Jenkins . READ ) ) { long now = System . currentTimeMillis ( ) ; for ( Downloadable d : Downloadable . all ( ) ) { if ( d . getDue ( ) < now && d . lastAttempt + TimeUnit . SECONDS . toMillis ( 10 ) < now ) { buf . append ( "<script>" ) . append ( "Behaviour.addLoadEvent(function() {" ) . append ( " downloadService.download(" ) . append ( QuotedStringTokenizer . quote ( d . getId ( ) ) ) . append ( ',' ) . append ( QuotedStringTokenizer . quote ( mapHttps ( d . getUrl ( ) ) ) ) . append ( ',' ) . append ( "{version:" + QuotedStringTokenizer . quote ( Jenkins . VERSION ) + '}' ) . append ( ',' ) . append ( QuotedStringTokenizer . quote ( Stapler . getCurrentRequest ( ) . getContextPath ( ) + '/' + getUrl ( ) + "/byId/" + d . getId ( ) + "/postBack" ) ) . append ( ',' ) . append ( "null);" ) . append ( "});" ) . append ( "</script>" ) ; d . lastAttempt = now ; } } } return buf . toString ( ) ;
|
public class DescribeEventTopicsResult { /** * A list of SNS topic names that receive status messages from the specified Directory ID .
* @ return A list of SNS topic names that receive status messages from the specified Directory ID . */
public java . util . List < EventTopic > getEventTopics ( ) { } }
|
if ( eventTopics == null ) { eventTopics = new com . amazonaws . internal . SdkInternalList < EventTopic > ( ) ; } return eventTopics ;
|
public class Commands { /** * Closes all connections .
* @ param line Command line
* @ param callback Callback for command status */
public void closeall ( String line , DispatchCallback callback ) { } }
|
close ( null , callback ) ; if ( callback . isSuccess ( ) ) { while ( callback . isSuccess ( ) ) { close ( null , callback ) ; } // the last " close " will set it to fail so reset it to success .
callback . setToSuccess ( ) ; } // probably a holdover of the old boolean returns .
callback . setToFailure ( ) ;
|
public class JKExceptionHandlerFactory { /** * Gets the handler .
* @ param clas the clas
* @ return the handler */
public JKExceptionHandlerInfo getHandler ( final Class < ? extends Throwable > clas ) { } }
|
final JKExceptionHandler handler = this . handlers . get ( clas ) ; if ( handler != null ) { final JKExceptionHandlerInfo info = new JKExceptionHandlerInfo ( ) ; info . setExceptionClass ( clas ) . setHandler ( handler ) ; return info ; } return null ;
|
public class JSON { /** * Mutant factory for constructing an instance with specified features
* enabled . */
public JSON with ( Feature ... features ) { } }
|
int flags = _features ; for ( Feature feature : features ) { flags |= feature . mask ( ) ; } return _with ( flags ) ;
|
public class AtlasKnoxSSOAuthenticationFilter { /** * Create the URL to be used for authentication of the user in the absence
* of a JWT token within the incoming request .
* @ param request for getting the original request URL
* @ return url to use as login url for redirect */
protected String constructLoginURL ( HttpServletRequest request , boolean isXMLRequest ) { } }
|
String delimiter = "?" ; if ( authenticationProviderUrl . contains ( "?" ) ) { delimiter = "&" ; } StringBuilder loginURL = new StringBuilder ( ) ; if ( isXMLRequest ) { String atlasApplicationURL = "" ; String referalURL = request . getHeader ( "referer" ) ; if ( referalURL == null ) { atlasApplicationURL = request . getScheme ( ) + "://" + request . getServerName ( ) + ":" + request . getServerPort ( ) + request . getContextPath ( ) ; } else { atlasApplicationURL = referalURL ; } loginURL . append ( authenticationProviderUrl ) . append ( delimiter ) . append ( originalUrlQueryParam ) . append ( "=" ) . append ( atlasApplicationURL ) ; } else { loginURL . append ( authenticationProviderUrl ) . append ( delimiter ) . append ( originalUrlQueryParam ) . append ( "=" ) . append ( request . getRequestURL ( ) . append ( getOriginalQueryString ( request ) ) ) ; } return loginURL . toString ( ) ;
|
public class ApplicationDetail { /** * Describes reference data sources configured for the application . For more information , see < a
* href = " http : / / docs . aws . amazon . com / kinesisanalytics / latest / dev / how - it - works - input . html " > Configuring Application
* Input < / a > .
* @ param referenceDataSourceDescriptions
* Describes reference data sources configured for the application . For more information , see < a
* href = " http : / / docs . aws . amazon . com / kinesisanalytics / latest / dev / how - it - works - input . html " > Configuring
* Application Input < / a > . */
public void setReferenceDataSourceDescriptions ( java . util . Collection < ReferenceDataSourceDescription > referenceDataSourceDescriptions ) { } }
|
if ( referenceDataSourceDescriptions == null ) { this . referenceDataSourceDescriptions = null ; return ; } this . referenceDataSourceDescriptions = new java . util . ArrayList < ReferenceDataSourceDescription > ( referenceDataSourceDescriptions ) ;
|
public class MetaModel { /** * Finds all attribute names except for id .
* @ return all attribute names except for id . */
public Set < String > getAttributeNamesSkipId ( ) { } }
|
if ( attributeNamesNoId == null ) { // no one cares about unfortunate multi - threading timing with 2 instances created
// if someone does , use DCL with volatile
Set < String > attributesNames = new CaseInsensitiveSet ( getAttributeNames ( ) ) ; attributesNames . remove ( getIdName ( ) ) ; attributeNamesNoId = attributesNames ; } return attributeNamesNoId ;
|
public class PEMUtils { /** * Return a hexadecimal representation of a byte array
* @ param b a byte array
* @ return String containing the hexadecimal representation */
public final static String toHex ( byte [ ] b ) { } }
|
char [ ] buf = new char [ b . length * 2 ] ; int i , j , k ; i = j = 0 ; for ( ; i < b . length ; i ++ ) { k = b [ i ] ; buf [ j ++ ] = hex [ ( k >>> 4 ) & 0x0F ] ; buf [ j ++ ] = hex [ k & 0x0F ] ; } return new String ( buf ) ;
|
public class AttributeWidgetFactory { /** * Create a widget for an attribute of a { @ link Feature } .
* @ param feature the feature of the attribute .
* @ param descriptor the descriptor of the attribute of the feature .
* @ return the ( possible custom ) widget for a feature attribute . */
public Widget createAttributeWidget ( Feature feature , AttributeDescriptor descriptor ) { } }
|
Attribute < ? > attribute = feature . getAttributes ( ) . get ( descriptor . getName ( ) ) ; // Get a builder for the attribute
// attribute . getValue is e . g . StringAttribute , ImageURLAttribute , . . .
AttributeWidgetBuilder builder = builders . get ( attribute . getValue ( ) . getClass ( ) ) ; if ( builder == null ) { builder = new DefaultAttributeWidgetBuilder ( ) ; } // Build the widget and return it :
return builder . buildAttributeWidget ( attribute . getValue ( ) ) ;
|
public class BeanValidationService { /** * Called by DS to deactivate this service
* @ param compcontext the context of this component */
protected void deactivate ( ComponentContext compcontext ) { } }
|
if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Deactivating " + this . getClass ( ) . getName ( ) ) ; } setInstance ( null ) ; this . beanValidation . deactivate ( compcontext ) ;
|
public class ParametersAction { /** * Creates an { @ link VariableResolver } that aggregates all the parameters .
* If you are a { @ link BuildStep } , most likely you should call { @ link AbstractBuild # getBuildVariableResolver ( ) } . */
public VariableResolver < String > createVariableResolver ( AbstractBuild < ? , ? > build ) { } }
|
VariableResolver [ ] resolvers = new VariableResolver [ getParameters ( ) . size ( ) + 1 ] ; int i = 0 ; for ( ParameterValue p : getParameters ( ) ) { if ( p == null ) continue ; resolvers [ i ++ ] = p . createVariableResolver ( build ) ; } resolvers [ i ] = build . getBuildVariableResolver ( ) ; return new VariableResolver . Union < String > ( resolvers ) ;
|
public class AccountsInner { /** * Updates the specified Data Lake Analytics account to add an Azure Storage account .
* @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account .
* @ param accountName The name of the Data Lake Analytics account to which to add the Azure Storage account .
* @ param storageAccountName The name of the Azure Storage account to add
* @ param parameters The parameters containing the access key and optional suffix for the Azure Storage Account .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void addStorageAccount ( String resourceGroupName , String accountName , String storageAccountName , AddStorageAccountParameters parameters ) { } }
|
addStorageAccountWithServiceResponseAsync ( resourceGroupName , accountName , storageAccountName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class ViolationRest { /** * Returns the information of an specific violation given an uuid If the
* violation it is not in the database , it returns 404 with empty payload
* < pre >
* GET / violations / { violation _ uuid }
* Request :
* GET / violation HTTP / 1.1
* Accept : application / xml
* Response :
* HTTP / 1.1 200 OK
* Content - type : application / xml
* { @ code
* < ? xml version = " 1.0 " encoding = " UTF - 8 " ? >
* < violation > . . . < / violation >
* < / pre >
* Example :
* < li > curl
* http : / / localhost : 8080 / sla - service / violations / ? agrementId = agreement04 < / li >
* @ return violations according to parameters in the query string . */
@ GET @ Path ( "{uuid}" ) @ Produces ( MediaType . APPLICATION_XML ) public Response getViolationByUuid ( @ PathParam ( "uuid" ) UUID violationUuid ) { } }
|
logger . debug ( "StartOf getViolationByUuid - REQUEST for /violations/" + violationUuid ) ; String serializedViolations = null ; ViolationHelper violationRestHelper = getViolationHelper ( ) ; try { serializedViolations = violationRestHelper . getViolationByUUID ( violationUuid ) ; } catch ( HelperException e ) { logger . info ( "getViolationByUuid exception:" + e . getMessage ( ) ) ; return buildResponse ( e ) ; } logger . debug ( "EndOf getViolationByUuid" ) ; return buildResponse ( 200 , serializedViolations ) ;
|
public class LockSupport { /** * Disables the current thread for thread scheduling purposes unless the
* permit is available .
* < p > If the permit is available then it is consumed and the call returns
* immediately ; otherwise
* the current thread becomes disabled for thread scheduling
* purposes and lies dormant until one of three things happens :
* < ul >
* < li > Some other thread invokes { @ link # unpark unpark } with the
* current thread as the target ; or
* < li > Some other thread { @ linkplain Thread # interrupt interrupts }
* the current thread ; or
* < li > The call spuriously ( that is , for no reason ) returns .
* < / ul >
* < p > This method does < em > not < / em > report which of these caused the
* method to return . Callers should re - check the conditions which caused
* the thread to park in the first place . Callers may also determine ,
* for example , the interrupt status of the thread upon return .
* @ param blocker the synchronization object responsible for this
* thread parking
* @ since 1.6 */
public static void park ( Object blocker ) { } }
|
Thread t = Thread . currentThread ( ) ; setBlocker ( t , blocker ) ; U . park ( false , 0L ) ; setBlocker ( t , null ) ;
|
public class CmsDialog { /** * Builds a button row with a " set " , an " ok " , and a " cancel " button . < p >
* @ param setAttributes additional attributes for the " set " button
* @ param okAttributes additional attributes for the " ok " button
* @ param cancelAttributes additional attributes for the " cancel " button
* @ return the button row */
public String dialogButtonsSetOkCancel ( String setAttributes , String okAttributes , String cancelAttributes ) { } }
|
return dialogButtons ( new int [ ] { BUTTON_SET , BUTTON_OK , BUTTON_CANCEL } , new String [ ] { setAttributes , okAttributes , cancelAttributes } ) ;
|
public class DateUtils { /** * Separates raw string input of the format MMYY into a " month " group and a " year " group .
* Either or both of these may be incomplete . This method does not check to see if the input
* is valid .
* @ param expiryInput up to four characters of user input
* @ return a length - 2 array containing the first two characters in the 0 index , and the last
* two characters in the 1 index . " 123 " gets split into { " 12 " , " 3 " } , and " 1 " becomes { " 1 " , " " } . */
@ Size ( 2 ) @ NonNull static String [ ] separateDateStringParts ( @ NonNull @ Size ( max = 4 ) String expiryInput ) { } }
|
String [ ] parts = new String [ 2 ] ; if ( expiryInput . length ( ) >= 2 ) { parts [ 0 ] = expiryInput . substring ( 0 , 2 ) ; parts [ 1 ] = expiryInput . substring ( 2 ) ; } else { parts [ 0 ] = expiryInput ; parts [ 1 ] = "" ; } return parts ;
|
public class LottieCompositionFactory { /** * Fetch an animation from an http url . Once it is downloaded once , Lottie will cache the file to disk for
* future use . Because of this , you may call ` fromUrl ` ahead of time to warm the cache if you think you
* might need an animation in the future . */
public static LottieTask < LottieComposition > fromUrl ( final Context context , final String url ) { } }
|
String urlCacheKey = "url_" + url ; return cache ( urlCacheKey , new Callable < LottieResult < LottieComposition > > ( ) { @ Override public LottieResult < LottieComposition > call ( ) { return NetworkFetcher . fetchSync ( context , url ) ; } } ) ;
|
public class Application { /** * For each version in { @ link # versions } is tested , if it is already
* installed . If not already installed , the version is installed . Only if
* < code > _ withDependency < / code > is defined , also the { @ link # dependencies }
* are installed .
* @ param _ userName name of the installation user
* @ param _ password password of the installation user
* @ param _ profiles set of profile to be applied
* @ param _ compile compile during the install / if null the setting from the version . xml applies
* @ param _ withDependency must the dependency also installed ?
* @ throws InstallationException if installation failed */
protected void install ( final String _userName , final String _password , final Set < Profile > _profiles , final Boolean _compile , final boolean _withDependency ) throws InstallationException { } }
|
// install dependency if required
if ( _withDependency ) { for ( final Dependency dependency : this . dependencies ) { dependency . resolve ( ) ; final Application appl = Application . getApplicationFromJarFile ( dependency . getJarFile ( ) , this . classpathElements ) ; appl . install ( _userName , _password , dependency . getProfiles ( ) , _compile , false ) ; } } // reload cache ( if possible )
reloadCache ( ) ; // load latest installed versions
final Map < String , Integer > latestVersions ; try { Context . begin ( ) ; EFapsClassLoader . getOfflineInstance ( getClass ( ) . getClassLoader ( ) ) ; latestVersions = this . install . getLatestVersions ( ) ; Context . rollback ( ) ; } catch ( final EFapsException e ) { throw new InstallationException ( "Could not get information about installed versions" , e ) ; } final Integer latestVersion = latestVersions . get ( this . application ) ; Application . LOG . info ( "Install application '" + this . application + "'" ) ; for ( final ApplicationVersion version : this . versions ) { Application . LOG . info ( "Check version '{}'" , version . getNumber ( ) ) ; if ( _compile != null ) { version . setCompile ( _compile ) ; } if ( latestVersion != null && version . getNumber ( ) < latestVersion ) { if ( Application . LOG . isInfoEnabled ( ) ) { Application . LOG . info ( "Version " + version . getNumber ( ) + " already installed" ) ; } } else { if ( Application . LOG . isInfoEnabled ( ) ) { Application . LOG . info ( "Starting installation of version " + version . getNumber ( ) ) ; final String desc = version . getDescription ( ) ; if ( ! "" . equals ( desc ) ) { Application . LOG . info ( desc ) ; } } storeVersion ( _userName , version . getNumber ( ) ) ; try { version . install ( this . install , getLastVersion ( ) . getNumber ( ) , _profiles , _userName , _password ) ; // CHECKSTYLE : OFF
} catch ( final Exception e ) { // CHECKSTYLE : ON
throw new InstallationException ( "Installation failed" , e ) ; } if ( Application . LOG . isInfoEnabled ( ) ) { Application . LOG . info ( "Finished installation of version " + version . getNumber ( ) ) ; } } } // reload cache ( if possible )
reloadCache ( ) ;
|
public class RESTClient { /** * Writes and reads the XOP attachment using a CXF JAX - RS WebClient .
* Note that WebClient is created with the help of JAXRSClientFactoryBean .
* JAXRSClientFactoryBean can be used when neither of the WebClient factory
* methods is appropriate . For example , in this case , an " mtom - enabled "
* property is set on the factory bean first .
* @ throws Exception */
public void useXopAttachmentServiceWithWebClient ( ) throws Exception { } }
|
final String serviceURI = "http://localhost:" + port + "/services/attachments/xop" ; JAXRSClientFactoryBean factoryBean = new JAXRSClientFactoryBean ( ) ; factoryBean . setAddress ( serviceURI ) ; factoryBean . setProperties ( Collections . singletonMap ( org . apache . cxf . message . Message . MTOM_ENABLED , ( Object ) "true" ) ) ; WebClient client = factoryBean . createWebClient ( ) ; WebClient . getConfig ( client ) . getRequestContext ( ) . put ( "support.type.as.multipart" , "true" ) ; client . type ( "multipart/related" ) . accept ( "multipart/related" ) ; XopBean xop = createXopBean ( ) ; System . out . println ( ) ; System . out . println ( "Posting a XOP attachment with a WebClient" ) ; XopBean xopResponse = client . post ( xop , XopBean . class ) ; verifyXopResponse ( xop , xopResponse ) ;
|
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcRelConnectsWithEccentricity ( ) { } }
|
if ( ifcRelConnectsWithEccentricityEClass == null ) { ifcRelConnectsWithEccentricityEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 464 ) ; } return ifcRelConnectsWithEccentricityEClass ;
|
public class CompoundComparator { /** * Add a Comparator to the end of the chain .
* < p > The Comparator will default to ascending sort order ,
* unless it is a InvertibleComparator .
* @ param comparator the Comparator to add to the end of the chain
* @ see InvertibleComparator */
public void addComparator ( Comparator < T > comparator ) { } }
|
if ( comparator instanceof InvertibleComparator ) { this . comparators . add ( ( InvertibleComparator < T > ) comparator ) ; } else { this . comparators . add ( new InvertibleComparator < T > ( comparator ) ) ; }
|
public class XmlContentBuilder { /** * Build XML for any JCR content .
* @ param content Content with properties and nested nodes
* @ return JCR XML */
public Document buildContent ( Map < String , Object > content ) { } }
|
Document doc = documentBuilder . newDocument ( ) ; String primaryType = StringUtils . defaultString ( ( String ) content . get ( PN_PRIMARY_TYPE ) , NT_UNSTRUCTURED ) ; Element jcrRoot = createJcrRoot ( doc , primaryType ) ; exportPayload ( doc , jcrRoot , content ) ; return doc ;
|
public class EntityBeanTypeImpl { /** * Returns all < code > jms - connection - factory < / code > elements
* @ return list of < code > jms - connection - factory < / code > */
public List < JmsConnectionFactoryType < EntityBeanType < T > > > getAllJmsConnectionFactory ( ) { } }
|
List < JmsConnectionFactoryType < EntityBeanType < T > > > list = new ArrayList < JmsConnectionFactoryType < EntityBeanType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "jms-connection-factory" ) ; for ( Node node : nodeList ) { JmsConnectionFactoryType < EntityBeanType < T > > type = new JmsConnectionFactoryTypeImpl < EntityBeanType < T > > ( this , "jms-connection-factory" , childNode , node ) ; list . add ( type ) ; } return list ;
|
public class XmlParser { /** * Parse a content particle .
* < pre >
* [ 48 ] cp : : = ( Name | choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ?
* < / pre > */
private void parseCp ( ) throws Exception { } }
|
if ( tryRead ( '(' ) ) { dataBufferAppend ( '(' ) ; parseElements ( readBuffer ) ; } else { dataBufferAppend ( readNmtoken ( true ) ) ; char c = readCh ( ) ; switch ( c ) { case '?' : case '*' : case '+' : dataBufferAppend ( c ) ; break ; default : unread ( c ) ; break ; } }
|
public class ServerEnvironment { /** * Instantiates an assembly and starts startable components .
* If an extended class path is provided , eligible classes will be loaded by ExtendedClassPathClassLoader .
* Note : classes in package org . ijsberg . iglu . configuration will always be loaded by the default class loader .
* Argument " - rou " , reset - on - update enables automatic reset of the application if updated classes are detected
* by ExtendedClassPathClassLoader . This function may be used in a development environment .
* @ param args < assembly class > [ - xcl < extended class path > ] [ - rou ]
* @ throws Exception
* @ see ExtendedClassPathClassLoader */
public static void main ( String [ ] args ) throws Exception { } }
|
if ( args . length == 0 ) { printUsage ( ) ; } else { System . out . println ( "Creating server environment for assembly " + args [ 0 ] ) ; server = new ServerEnvironment ( args ) ; System . out . println ( "Starting server ..." ) ; server . start ( ) ; System . out . println ( "... Server started" ) ; }
|
public class Parser { /** * block code */
private Node parseBlockCode ( ) { } }
|
Token tok = this . expect ( BlockCode . class ) ; ExpressionNode node ; Token body = this . peek ( ) ; String text ; if ( body instanceof PipelessText ) { this . advance ( ) ; text = StringUtils . join ( body . getValues ( ) , "\n" ) ; } else { text = "" ; } node = new ExpressionNode ( ) ; node . setValue ( text ) ; node . setLineNumber ( tok . getLineNumber ( ) ) ; return node ;
|
public class Cache { /** * 返回有序集 key 中 , 所有 score 值介于 min 和 max 之间 ( 包括等于 min 或 max ) 的成员 。
* 有序集成员按 score 值递增 ( 从小到大 ) 次序排列 。 */
@ SuppressWarnings ( "rawtypes" ) public Set zrangeByScore ( Object key , double min , double max ) { } }
|
Jedis jedis = getJedis ( ) ; try { Set < byte [ ] > data = jedis . zrangeByScore ( keyToBytes ( key ) , min , max ) ; Set < Object > result = new LinkedHashSet < Object > ( ) ; // 有序集合必须 LinkedHashSet
valueSetFromBytesSet ( data , result ) ; return result ; } finally { close ( jedis ) ; }
|
public class QueryCustomizerDefaultImpl { /** * / * ( non - Javadoc )
* @ see org . apache . ojb . broker . metadata . AttributeContainer # getAttribute ( java . lang . String , java . lang . String ) */
public String getAttribute ( String attributeName , String defaultValue ) { } }
|
String result = defaultValue ; if ( m_attributeList != null ) { result = ( String ) m_attributeList . get ( attributeName ) ; if ( result == null ) { result = defaultValue ; } } return result ;
|
public class ResourceConverter { /** * Parses out included resources excluding relationships .
* @ param parent root node
* @ return map of identifier / resource pairs
* @ throws IOException
* @ throws IllegalAccessException
* @ throws InstantiationException */
private Map < String , Object > getIncludedResources ( JsonNode parent ) throws IOException , IllegalAccessException , InstantiationException { } }
|
Map < String , Object > result = new HashMap < > ( ) ; if ( parent . has ( INCLUDED ) ) { for ( JsonNode jsonNode : parent . get ( INCLUDED ) ) { String type = jsonNode . get ( TYPE ) . asText ( ) ; Class < ? > clazz = configuration . getTypeClass ( type ) ; if ( clazz != null ) { Object object = readObject ( jsonNode , clazz , false ) ; if ( object != null ) { result . put ( createIdentifier ( jsonNode ) , object ) ; } } else if ( ! deserializationFeatures . contains ( DeserializationFeature . ALLOW_UNKNOWN_INCLUSIONS ) ) { throw new IllegalArgumentException ( "Included section contains unknown resource type: " + type ) ; } } } return result ;
|
public class OutputFileWriter { /** * Returns the CSV output file data .
* @ param lines The lines to add to the output file
* @ return The byte array representing the CSV output file data */
private byte [ ] getCSVOutput ( List < String [ ] > lines ) { } }
|
StringWriter writer = new StringWriter ( ) ; csv = new CSVWriter ( writer , delimiter . separator ( ) . charAt ( 0 ) ) ; for ( int i = 0 ; i < lines . size ( ) ; i ++ ) { csv . writeNext ( ( String [ ] ) lines . get ( i ) , quotes ) ; } // The contents returned is the CSV string
return writer . toString ( ) . getBytes ( ) ;
|
public class ShuffleIndexInformation { /** * Get index offset for a particular reducer . */
public ShuffleIndexRecord getIndex ( int reduceId ) { } }
|
long offset = offsets . get ( reduceId ) ; long nextOffset = offsets . get ( reduceId + 1 ) ; return new ShuffleIndexRecord ( offset , nextOffset - offset ) ;
|
public class EventClient { /** * Sends a user - action - on - item request .
* @ param action name of the action performed
* @ param uid ID of the user
* @ param iid ID of the item
* @ param properties a map of properties associated with this action
* @ param eventTime timestamp of the event */
public FutureAPIResponse userActionItemAsFuture ( String action , String uid , String iid , Map < String , Object > properties , DateTime eventTime ) throws IOException { } }
|
return createEventAsFuture ( new Event ( ) . event ( action ) . entityType ( "user" ) . entityId ( uid ) . targetEntityType ( "item" ) . targetEntityId ( iid ) . properties ( properties ) . eventTime ( eventTime ) ) ;
|
public class PathOverrideService { /** * Return collection of path Ids in priority order
* @ param profileId ID of profile
* @ return collection of path Ids in priority order */
public List < Integer > getPathOrder ( int profileId ) { } }
|
ArrayList < Integer > pathOrder = new ArrayList < Integer > ( ) ; PreparedStatement queryStatement = null ; ResultSet results = null ; try ( Connection sqlConnection = sqlService . getConnection ( ) ) { queryStatement = sqlConnection . prepareStatement ( "SELECT * FROM " + Constants . DB_TABLE_PATH + " WHERE " + Constants . GENERIC_PROFILE_ID + " = ? " + " ORDER BY " + Constants . PATH_PROFILE_PATH_ORDER + " ASC" ) ; queryStatement . setInt ( 1 , profileId ) ; results = queryStatement . executeQuery ( ) ; while ( results . next ( ) ) { pathOrder . add ( results . getInt ( Constants . GENERIC_ID ) ) ; } } catch ( SQLException e ) { e . printStackTrace ( ) ; } finally { try { if ( results != null ) { results . close ( ) ; } } catch ( Exception e ) { } try { if ( queryStatement != null ) { queryStatement . close ( ) ; } } catch ( Exception e ) { } } logger . info ( "pathOrder = {}" , pathOrder ) ; return pathOrder ;
|
public class UpdateTarget { /** * A list of operations supported by the maintenance track .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSupportedOperations ( java . util . Collection ) } or { @ link # withSupportedOperations ( java . util . Collection ) }
* if you want to override the existing values .
* @ param supportedOperations
* A list of operations supported by the maintenance track .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateTarget withSupportedOperations ( SupportedOperation ... supportedOperations ) { } }
|
if ( this . supportedOperations == null ) { setSupportedOperations ( new com . amazonaws . internal . SdkInternalList < SupportedOperation > ( supportedOperations . length ) ) ; } for ( SupportedOperation ele : supportedOperations ) { this . supportedOperations . add ( ele ) ; } return this ;
|
public class JsonPath { /** * Applies this JsonPath to the provided json input stream
* @ param jsonInputStream input stream to read from
* @ param configuration configuration to use
* @ param < T > expected return type
* @ return list of objects matched by the given path
* @ throws IOException */
@ SuppressWarnings ( { } }
|
"unchecked" } ) public < T > T read ( InputStream jsonInputStream , Configuration configuration ) throws IOException { notNull ( jsonInputStream , "json input stream can not be null" ) ; notNull ( configuration , "configuration can not be null" ) ; return read ( jsonInputStream , "UTF-8" , configuration ) ;
|
public class WMultiFileWidget { /** * Returns a list of file types accepted by the file input .
* @ see # setFileTypes ( Collection ) for a description of what constitutes an allowable file types
* If no types have been added an empty list is returned . An empty list indicates that all file types are accepted .
* @ return The file types accepted by this file input e . g . " image / * " , " . vis " , " text / plain " , " text / html " ,
* " application / pdf " . */
public List < String > getFileTypes ( ) { } }
|
Set < String > fileTypes = getComponentModel ( ) . fileTypes ; List < String > result ; if ( fileTypes == null || fileTypes . isEmpty ( ) ) { return Collections . emptyList ( ) ; } result = new ArrayList < > ( fileTypes ) ; return result ;
|
public class Metadata { /** * Adds a new reference to the specific group element class . If no group element class is found , then a new group
* element class . will be created .
* @ param groupName
* the group class name of
* @ param groupReference
* the new reference to be added . */
public void addGroupReference ( final String groupName , final MetadataElement groupReference ) { } }
|
groupReference . setRef ( getNamespaceValue ( groupReference . getRef ( ) ) ) ; for ( MetadataItem item : groupList ) { if ( item . getName ( ) . equals ( groupName ) && item . getNamespace ( ) . equals ( getCurrentNamespace ( ) ) ) { item . getReferences ( ) . add ( groupReference ) ; return ; } } final MetadataItem newItem = new MetadataItem ( groupName ) ; newItem . getReferences ( ) . add ( groupReference ) ; newItem . setNamespace ( getCurrentNamespace ( ) ) ; newItem . setSchemaName ( getCurrentSchmema ( ) ) ; newItem . setPackageApi ( getCurrentPackageApi ( ) ) ; newItem . setPackageImpl ( getCurrentPackageImpl ( ) ) ; groupList . add ( newItem ) ;
|
public class BrowserSteps { /** * Open Url if different with conditions .
* @ param pageKey
* is the key of page ( example : GOOGLE _ HOME )
* @ param conditions
* list of ' expected ' values condition and ' actual ' values ( { @ link com . github . noraui . gherkin . GherkinStepCondition } ) .
* @ throws TechnicalException
* is thrown if you have a technical error ( format , configuration , datas , . . . ) in NoraUi .
* Exception with { @ value com . github . noraui . utils . Messages # FAIL _ MESSAGE _ UNABLE _ TO _ OPEN _ APPLICATION } message ( with screenshot , with exception )
* @ throws FailureException
* if the scenario encounters a functional error */
@ Times ( { } }
|
@ Time ( name = "AM" ) , @ Time ( name = "{pageKey}" ) } ) @ Conditioned @ Lorsque ( "'(.*)' est ouvert[\\.|\\?]" ) @ Given ( "'(.*)' is opened[\\.|\\?]" ) public void openUrlIfDifferent ( @ TimeName ( "pageKey" ) String pageKey , List < GherkinStepCondition > conditions ) throws TechnicalException , FailureException { goToUrl ( pageKey , false ) ;
|
public class WebSiteManagementClientImpl { /** * Gets the source controls available for Azure websites .
* Gets the source controls available for Azure websites .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; SourceControlInner & gt ; object */
public Observable < Page < SourceControlInner > > listSourceControlsAsync ( ) { } }
|
return listSourceControlsWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < SourceControlInner > > , Page < SourceControlInner > > ( ) { @ Override public Page < SourceControlInner > call ( ServiceResponse < Page < SourceControlInner > > response ) { return response . body ( ) ; } } ) ;
|
public class GenerateClientCertificateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GenerateClientCertificateRequest generateClientCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( generateClientCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( generateClientCertificateRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class PaytrailService { /** * This function can be used to validate parameters returned by failure request .
* Parameters must be validated in order to avoid hacking of payment failure . */
public boolean confirmFailure ( String orderNumber , String timestamp , String authCode ) { } }
|
String base = new StringBuilder ( ) . append ( orderNumber ) . append ( '|' ) . append ( timestamp ) . append ( '|' ) . append ( merchantSecret ) . toString ( ) ; return StringUtils . equals ( StringUtils . upperCase ( DigestUtils . md5Hex ( base ) ) , authCode ) ;
|
public class ClassFileWriter { /** * Generate the code to leave on stack the given string even if the string encoding exeeds the
* class file limit for single string constant
* @ param k the constant */
public void addPush ( String k ) { } }
|
int length = k . length ( ) ; int limit = itsConstantPool . getUtfEncodingLimit ( k , 0 , length ) ; if ( limit == length ) { addLoadConstant ( k ) ; return ; } // Split string into picies fitting the UTF limit and generate code for
// StringBuilder sb = new StringBuilder ( length ) ;
// sb . append ( loadConstant ( piece _ 1 ) ) ;
// sb . append ( loadConstant ( piece _ N ) ) ;
// sb . toString ( ) ;
final String SB = "java/lang/StringBuilder" ; add ( ByteCode . NEW , SB ) ; add ( ByteCode . DUP ) ; addPush ( length ) ; addInvoke ( ByteCode . INVOKESPECIAL , SB , "<init>" , "(I)V" ) ; int cursor = 0 ; for ( ; ; ) { add ( ByteCode . DUP ) ; String s = k . substring ( cursor , limit ) ; addLoadConstant ( s ) ; addInvoke ( ByteCode . INVOKEVIRTUAL , SB , "append" , "(Ljava/lang/String;)Ljava/lang/StringBuilder;" ) ; add ( ByteCode . POP ) ; if ( limit == length ) { break ; } cursor = limit ; limit = itsConstantPool . getUtfEncodingLimit ( k , limit , length ) ; } addInvoke ( ByteCode . INVOKEVIRTUAL , SB , "toString" , "()Ljava/lang/String;" ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.