signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class GroovyScript2RestLoader { /** * This method is useful for clients that send scripts as file in * ' multipart / * ' request body . * NOTE even we use iterator item should be only one , rule one address - one * script . This method is created just for comfort loading script from HTML * form . NOT use this script for uploading few files in body of * ' multipart / form - data ' or other type of multipart . * @ param items iterator org . apache . commons . fileupload . FileItem * @ param uriInfo javax . ws . rs . core . UriInfo * @ param repository repository name * @ param workspace workspace name * @ param path path to resource to be created * @ return Response with status ' created ' * @ request * { code } * " items " : the sending data with HTML form ( ' multipart / form - data ' ) * { code } * @ LevelAPI Provisional */ @ POST @ Consumes ( { } }
"multipart/*" } ) @ Path ( "add/{repository}/{workspace}/{path:.*}" ) public Response addScript ( Iterator < FileItem > items , @ Context UriInfo uriInfo , @ PathParam ( "repository" ) String repository , @ PathParam ( "workspace" ) String workspace , @ PathParam ( "path" ) String path ) { Session ses = null ; try { ses = sessionProviderService . getSessionProvider ( null ) . getSession ( workspace , repositoryService . getRepository ( repository ) ) ; Node node = ( Node ) ses . getItem ( getPath ( path ) ) ; InputStream stream = null ; boolean autoload = false ; while ( items . hasNext ( ) ) { FileItem fitem = items . next ( ) ; if ( fitem . isFormField ( ) && fitem . getFieldName ( ) != null && fitem . getFieldName ( ) . equalsIgnoreCase ( "autoload" ) ) { autoload = Boolean . valueOf ( fitem . getString ( ) ) ; } else if ( ! fitem . isFormField ( ) ) { stream = fitem . getInputStream ( ) ; } } createScript ( node , getName ( path ) , autoload , stream ) ; ses . save ( ) ; URI location = uriInfo . getBaseUriBuilder ( ) . path ( getClass ( ) , "getScript" ) . build ( repository , workspace , path ) ; return Response . created ( location ) . build ( ) ; } catch ( PathNotFoundException e ) { String msg = "Path " + path + " does not exists" ; LOG . error ( msg ) ; return Response . status ( Response . Status . NOT_FOUND ) . entity ( msg ) . entity ( MediaType . TEXT_PLAIN ) . build ( ) ; } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; return Response . status ( Response . Status . INTERNAL_SERVER_ERROR ) . entity ( e . getMessage ( ) ) . type ( MediaType . TEXT_PLAIN ) . build ( ) ; } finally { if ( ses != null ) { ses . logout ( ) ; } }
public class QueryExecuter { /** * Gets neighborhood of the source set . * @ param sourceSet seed to the query * @ param model BioPAX model * @ param limit neigborhood distance to get * @ param direction UPSTREAM , DOWNSTREAM or BOTHSTREAM * @ param filters for filtering graph elements * @ return BioPAX elements in the result set */ public static Set < BioPAXElement > runNeighborhood ( Set < BioPAXElement > sourceSet , Model model , int limit , Direction direction , Filter ... filters ) { } }
Graph graph ; if ( model . getLevel ( ) == BioPAXLevel . L3 ) { if ( direction == Direction . UNDIRECTED ) { graph = new GraphL3Undirected ( model , filters ) ; direction = Direction . BOTHSTREAM ; } else { graph = new GraphL3 ( model , filters ) ; } } else return Collections . emptySet ( ) ; Set < Node > source = prepareSingleNodeSet ( sourceSet , graph ) ; if ( sourceSet . isEmpty ( ) ) return Collections . emptySet ( ) ; NeighborhoodQuery query = new NeighborhoodQuery ( source , direction , limit ) ; Set < GraphObject > resultWrappers = query . run ( ) ; return convertQueryResult ( resultWrappers , graph , true ) ;
public class LssClient { /** * List all your live presets . * @ return The list of all your live presets */ public ListPresetsResponse listPresets ( ) { } }
ListPresetsRequest request = new ListPresetsRequest ( ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . GET , request , LIVE_PRESET ) ; return invokeHttpClient ( internalRequest , ListPresetsResponse . class ) ;
public class BkBasic { /** * Adds custom headers with information about socket . * @ param req Request * @ param socket Socket * @ return Request with custom headers */ @ SuppressWarnings ( "PMD.AvoidDuplicateLiterals" ) private static Request addSocketHeaders ( final Request req , final Socket socket ) { } }
return new RqWithHeaders ( req , String . format ( "%s: %s" , BkBasic . LOCALADDR , socket . getLocalAddress ( ) . getHostAddress ( ) ) , String . format ( "%s: %d" , BkBasic . LOCALPORT , socket . getLocalPort ( ) ) , String . format ( "%s: %s" , BkBasic . REMOTEADDR , socket . getInetAddress ( ) . getHostAddress ( ) ) , String . format ( "%s: %d" , BkBasic . REMOTEADDR , socket . getPort ( ) ) ) ;
public class BeanGenerator { /** * TODO To complete . * @ param javaClass * @ param body */ private void writePropertyGetter ( final JavaClassSource javaClass , final Property propDef ) { } }
if ( propDef . needGetter ( ) && propDef . needProperty ( ) ) { if ( ! javaClass . hasMethodSignature ( propDef . getPropertyName ( ) ) ) { final StringBuilder javadoc = new StringBuilder ( ) ; javadoc . append ( "@return the pSourcePath\n" ) ; String body = "" ; if ( propDef . isList ( ) ) { javaClass . addImport ( "javafx.collections.FXCollections" ) ; body = Templates . use ( TemplateName . PropertyGetter_List , propDef ) ; } else if ( propDef . isMap ( ) ) { javaClass . addImport ( "javafx.collections.FXCollections" ) ; body = Templates . use ( TemplateName . PropertyGetter_Map , propDef ) ; } else { body = Templates . use ( TemplateName . PropertyGetter , propDef ) ; } final MethodSource < ? > method = javaClass . addMethod ( ) . setName ( propDef . getPropertyName ( ) ) . setPublic ( ) . setBody ( body . toString ( ) ) . setReturnType ( propDef . getPropertyType ( ) ) ; method . getJavaDoc ( ) . setFullText ( javadoc . toString ( ) ) ; } else { // javaClass . getMethod ( propDef . getName ( ) ) . setBody ( javaClass . getMethod ( propDef . getName ( ) ) . getBody ( ) // + body . toString ( ) ) ; } }
public class PubSubInputHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DownstreamControl # sendAckExpectedMessage ( long , com . ibm . ws . sib . trm . topology . Cellule , int , com . ibm . ws . sib . common . Reliability ) */ @ Override public void sendAckExpectedMessage ( long ackExpStamp , int priority , Reliability reliability , SIBUuid12 stream ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sendAckExpectedMessage" , new Object [ ] { new Long ( ackExpStamp ) , new Integer ( priority ) , reliability , stream } ) ; HashMap allPubSubOutputHandlers = _destination . getAllPubSubOutputHandlers ( ) ; try { Iterator itr = allPubSubOutputHandlers . values ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { PubSubOutputHandler handler = ( PubSubOutputHandler ) itr . next ( ) ; // Send AckExpected to all OutputHandlers handler . processAckExpected ( ackExpStamp , priority , reliability , stream ) ; } } finally { // By calling the getAllPubSubOutputHandlers it will lock the // handlers _destination . unlockPubsubOutputHandlers ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sendAckExpectedMessage" ) ;
public class QuickDiagnosingMatcherBase { /** * Returns a { @ link MatchResult } that caches the mismatch descripton . * @ param < I > * @ param item * @ return match result */ @ Override public < I > MatchResult < I > matchResult ( I item ) { } }
StringDescription mismatch = new StringDescription ( ) ; if ( matches ( item , mismatch ) ) { return new MatchResultSuccess < > ( item , this ) ; } else { return new MatchResultMismatch < > ( item , this , mismatch . toString ( ) ) ; }
public class StringMap { public Object get ( String key ) { } }
if ( key == null ) return _nullValue ; Map . Entry entry = getEntry ( key , 0 , key . length ( ) ) ; if ( entry == null ) return null ; return entry . getValue ( ) ;
public class Types { /** * Recursively skip type - variables until a class / array type is found ; capture conversion is then * ( optionally ) applied to the resulting type . This is useful for i . e . computing a site that is * suitable for a method lookup . */ public Type skipTypeVars ( Type site , boolean capture ) { } }
while ( site . hasTag ( TYPEVAR ) ) { site = site . getUpperBound ( ) ; } return capture ? capture ( site ) : site ;
public class RequestMessage { /** * Sets a list of vbuckets to stream keys from . * @ param vbs - A list of vbuckets . */ public void setVbucketlist ( short [ ] vbs ) { } }
int oldSize = ( vblist . length + 1 ) * 2 ; int newSize = ( vbs . length + 1 ) * 2 ; totalbody += newSize - oldSize ; vblist = vbs ;
public class MtasSolrComponentFacet { /** * ( non - Javadoc ) * @ see * mtas . solr . handler . component . util . MtasSolrComponent # modifyRequest ( org . apache * . solr . handler . component . ResponseBuilder , * org . apache . solr . handler . component . SearchComponent , * org . apache . solr . handler . component . ShardRequest ) */ public void modifyRequest ( ResponseBuilder rb , SearchComponent who , ShardRequest sreq ) { } }
if ( sreq . params . getBool ( MtasSolrSearchComponent . PARAM_MTAS , false ) && sreq . params . getBool ( PARAM_MTAS_FACET , false ) ) { if ( ( sreq . purpose & ShardRequest . PURPOSE_GET_TOP_IDS ) != 0 ) { // do nothing } else { // remove prefix for other requests Set < String > keys = MtasSolrResultUtil . getIdsFromParameters ( rb . req . getParams ( ) , PARAM_MTAS_FACET ) ; sreq . params . remove ( PARAM_MTAS_FACET ) ; for ( String key : keys ) { sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_FIELD ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_KEY ) ; Set < String > subKeys = MtasSolrResultUtil . getIdsFromParameters ( rb . req . getParams ( ) , PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY ) ; for ( String subKey : subKeys ) { sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_TYPE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_VALUE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_PREFIX ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_IGNORE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_MAXIMUM_IGNORE_LENGTH ) ; Set < String > subSubKeys = MtasSolrResultUtil . getIdsFromParameters ( rb . req . getParams ( ) , PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_VARIABLE ) ; for ( String subSubKey : subSubKeys ) { sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_VARIABLE + "." + subSubKey + "." + SUBNAME_MTAS_FACET_QUERY_VARIABLE_NAME ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_QUERY + "." + subKey + "." + SUBNAME_MTAS_FACET_QUERY_VARIABLE + "." + subSubKey + "." + SUBNAME_MTAS_FACET_QUERY_VARIABLE_VALUE ) ; } } subKeys = MtasSolrResultUtil . getIdsFromParameters ( rb . req . getParams ( ) , PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE ) ; for ( String subKey : subKeys ) { sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_FIELD ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_TYPE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_RANGE + "." + SUBNAME_MTAS_FACET_BASE_RANGE_SIZE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_RANGE + "." + SUBNAME_MTAS_FACET_BASE_RANGE_BASE ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_MAXIMUM ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_MINIMUM ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_NUMBER ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_SORT_DIRECTION ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_SORT_TYPE ) ; Set < String > subSubKeys = MtasSolrResultUtil . getIdsFromParameters ( rb . req . getParams ( ) , PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION ) ; for ( String subSubKey : subSubKeys ) { sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION + "." + subSubKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION_EXPRESSION ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION + "." + subSubKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION_KEY ) ; sreq . params . remove ( PARAM_MTAS_FACET + "." + key + "." + NAME_MTAS_FACET_BASE + "." + subKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION + "." + subSubKey + "." + SUBNAME_MTAS_FACET_BASE_FUNCTION_TYPE ) ; } } } } }
public class TagVFilter { /** * Asynchronously resolves the tagk name to it ' s UID . On a successful lookup * the { @ link tagk _ bytes } will be set . * @ param tsdb The TSDB to use for the lookup * @ return A deferred to let the caller know that the lookup was completed . * The value will be the tag UID ( unless it ' s an exception of course ) */ public Deferred < byte [ ] > resolveTagkName ( final TSDB tsdb ) { } }
class ResolvedCB implements Callback < byte [ ] , byte [ ] > { @ Override public byte [ ] call ( final byte [ ] uid ) throws Exception { tagk_bytes = uid ; return uid ; } } return tsdb . getUIDAsync ( UniqueIdType . TAGK , tagk ) . addCallback ( new ResolvedCB ( ) ) ;
public class SearchPortletController { /** * Performs a search of the explicitly configured { @ link IPortalSearchService } s . This is done as * an event handler so that it can run concurrently with the other portlets handling the search * request */ @ SuppressWarnings ( "unchecked" ) @ EventMapping ( SearchConstants . SEARCH_REQUEST_QNAME_STRING ) public void handleSearchRequest ( EventRequest request , EventResponse response ) { } }
// UP - 3887 Design flaw . Both the searchLauncher portlet instance and the search portlet // instance receive // searchRequest and searchResult events because they are in the same portlet code base ( to // share // autosuggest _ handler . jsp and because we have to calculate the search portlet url for the // ajax call ) // and share the portlet . xml which defines the event handling behavior . // If this instance is the searchLauncher , ignore the searchResult . The search was submitted // to the search // portlet instance . final String searchLaunchFname = request . getPreferences ( ) . getValue ( SEARCH_LAUNCH_FNAME , null ) ; if ( searchLaunchFname != null ) { // Noisy in debug mode so commented out log statement // logger . debug ( " SearchLauncher does not participate in SearchRequest events so // discarding message " ) ; return ; } final Event event = request . getEvent ( ) ; final SearchRequest searchQuery = ( SearchRequest ) event . getValue ( ) ; // Map used to track searches that have been handled , used so that one search doesn ' t get // duplicate results ConcurrentMap < String , Boolean > searchHandledCache ; final PortletSession session = request . getPortletSession ( ) ; synchronized ( org . springframework . web . portlet . util . PortletUtils . getSessionMutex ( session ) ) { searchHandledCache = ( ConcurrentMap < String , Boolean > ) session . getAttribute ( SEARCH_HANDLED_CACHE_NAME , PortletSession . APPLICATION_SCOPE ) ; if ( searchHandledCache == null ) { searchHandledCache = CacheBuilder . newBuilder ( ) . maximumSize ( 20 ) . expireAfterAccess ( 5 , TimeUnit . MINUTES ) . < String , Boolean > build ( ) . asMap ( ) ; session . setAttribute ( SEARCH_HANDLED_CACHE_NAME , searchHandledCache , PortletSession . APPLICATION_SCOPE ) ; } } final String queryId = searchQuery . getQueryId ( ) ; if ( searchHandledCache . putIfAbsent ( queryId , Boolean . TRUE ) != null ) { // Already handled this search request return ; } // Create the results final SearchResults results = new SearchResults ( ) ; results . setQueryId ( queryId ) ; results . setWindowId ( request . getWindowID ( ) ) ; final List < SearchResult > searchResultList = results . getSearchResult ( ) ; // Run the search for each service appending the results for ( IPortalSearchService searchService : searchServices ) { try { logger . debug ( "For queryId {}, query '{}', searching search service {}" , queryId , searchQuery . getSearchTerms ( ) , searchService . getClass ( ) . toString ( ) ) ; final SearchResults serviceResults = searchService . getSearchResults ( request , searchQuery ) ; logger . debug ( "For queryId {}, obtained {} results from search service {}" , queryId , serviceResults . getSearchResult ( ) . size ( ) , searchService . getClass ( ) . toString ( ) ) ; searchResultList . addAll ( serviceResults . getSearchResult ( ) ) ; } catch ( Exception e ) { logger . warn ( searchService . getClass ( ) + " threw an exception when searching, it will be ignored. " + searchQuery , e ) ; } } // Respond with a results event if results were found if ( ! searchResultList . isEmpty ( ) ) { response . setEvent ( SearchConstants . SEARCH_RESULTS_QNAME , results ) ; }
public class Scope { /** * Set scope persistence class * @ param persistenceClass * Scope ' s persistence class * @ throws Exception * Exception */ public void setPersistenceClass ( String persistenceClass ) throws Exception { } }
this . persistenceClass = persistenceClass ; if ( persistenceClass != null ) { store = PersistenceUtils . getPersistenceStore ( this , persistenceClass ) ; }
public class AuthRundeckStorageTree { /** * Generate the environment for a path , based on the convention that / project / name / * maps to a project called * " name " , and anything else is within the application environment . * @ param path path * @ return authorization environment : a project environment if the path matches / project / name / * , otherwise the * application environment */ Set < Attribute > environmentForPath ( Path path ) { } }
String [ ] paths = path . getPath ( ) . split ( "/" ) ; if ( paths != null && paths . length > 2 && paths [ 0 ] . equals ( PROJECT_PATH_COMPONENT ) ) { return FrameworkProject . authorizationEnvironment ( paths [ 1 ] ) ; } else { return Framework . RUNDECK_APP_ENV ; }
public class Calendar { /** * Reconstitutes this object from a stream ( i . e . , deserialize it ) . */ private void readObject ( ObjectInputStream stream ) throws IOException , ClassNotFoundException { } }
final ObjectInputStream input = stream ; input . defaultReadObject ( ) ; stamp = new int [ FIELD_COUNT ] ; // Starting with version 2 ( not implemented yet ) , we expect that // fields [ ] , isSet [ ] , isTimeSet , and areFieldsSet may not be // streamed out anymore . We expect ' time ' to be correct . if ( serialVersionOnStream >= 2 ) { isTimeSet = true ; if ( fields == null ) { fields = new int [ FIELD_COUNT ] ; } if ( isSet == null ) { isSet = new boolean [ FIELD_COUNT ] ; } } else if ( serialVersionOnStream >= 0 ) { for ( int i = 0 ; i < FIELD_COUNT ; ++ i ) { stamp [ i ] = isSet [ i ] ? COMPUTED : UNSET ; } } serialVersionOnStream = currentSerialVersion ; // Android - changed : removed ZoneInfo support . // If the deserialized object has a SimpleTimeZone , try to // replace it with a ZoneInfo equivalent ( as of 1.4 ) in order // to be compatible with the SimpleTimeZone - based // implementation as much as possible . if ( zone instanceof SimpleTimeZone ) { String id = zone . getID ( ) ; TimeZone tz = TimeZone . getTimeZone ( id ) ; if ( tz != null && tz . hasSameRules ( zone ) && tz . getID ( ) . equals ( id ) ) { zone = tz ; } }
public class CacheObjectUtil { /** * remove the cached object . * @ param cacheConfigBean the data source configuration * @ param cacheKey the key of the cache * @ return the single observable for the result , true on success , false if nothing changed , error on exception . */ public static Single < Boolean > remove ( CacheConfigBean cacheConfigBean , String cacheKey ) { } }
return SingleRxXian . call ( CacheService . CACHE_SERVICE , "cacheObjectRemove" , new JSONObject ( ) { { put ( "cacheConfig" , cacheConfigBean ) ; put ( "key" , cacheKey ) ; } } ) . map ( unitResponseObject -> { unitResponseObject . throwExceptionIfNotSuccess ( ) ; return unitResponseObject . succeeded ( ) ; } ) ;
public class ApiOvhHostingprivateDatabase { /** * Create a new IP whitelist * REST : POST / hosting / privateDatabase / { serviceName } / whitelist * @ param name [ required ] Custom name for your Whitelisted IP * @ param sftp [ required ] Authorize this IP to access sftp port * @ param ip [ required ] The IP to whitelist in your instance * @ param service [ required ] Authorize this IP to access service port * @ param serviceName [ required ] The internal name of your private database */ public OvhTask serviceName_whitelist_POST ( String serviceName , String ip , String name , Boolean service , Boolean sftp ) throws IOException { } }
String qPath = "/hosting/privateDatabase/{serviceName}/whitelist" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ip" , ip ) ; addBody ( o , "name" , name ) ; addBody ( o , "service" , service ) ; addBody ( o , "sftp" , sftp ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class TaskManagerTabPanel { /** * < / editor - fold > */ private void jButtonSaveConfigActionPerformed ( java . awt . event . ActionEvent evt ) { } }
String path = "" ; BaseFileChooser propDir = new BaseFileChooser ( ) ; int selection = propDir . showSaveDialog ( this ) ; if ( selection == JFileChooser . APPROVE_OPTION ) { path = propDir . getSelectedFile ( ) . getAbsolutePath ( ) ; SaveConfig ( path ) ; }
public class CopticDate { /** * Obtains a { @ code CopticDate } from a temporal object . * This obtains a date in the Coptic calendar system based on the specified temporal . * A { @ code TemporalAccessor } represents an arbitrary set of date and time information , * which this factory converts to an instance of { @ code CopticDate } . * The conversion typically uses the { @ link ChronoField # EPOCH _ DAY EPOCH _ DAY } * field , which is standardized across calendar systems . * This method matches the signature of the functional interface { @ link TemporalQuery } * allowing it to be used as a query via method reference , { @ code CopticDate : : from } . * @ param temporal the temporal object to convert , not null * @ return the date in Coptic calendar system , not null * @ throws DateTimeException if unable to convert to a { @ code CopticDate } */ public static CopticDate from ( TemporalAccessor temporal ) { } }
if ( temporal instanceof CopticDate ) { return ( CopticDate ) temporal ; } return CopticDate . ofEpochDay ( temporal . getLong ( EPOCH_DAY ) ) ;
public class PDFView { /** * Load all the parts around the center of the screen , * taking into account X and Y offsets , zoom level , and * the current page displayed */ public void loadPages ( ) { } }
if ( optimalPageWidth == 0 || optimalPageHeight == 0 ) { return ; } // Cancel all current tasks renderingAsyncTask . removeAllTasks ( ) ; cacheManager . makeANewSet ( ) ; // Find current index in filtered user pages int index = currentPage ; if ( filteredUserPageIndexes != null ) { index = filteredUserPageIndexes [ currentPage ] ; } // Loop through the pages like [ . . . ] [ 4 ] [ 2 ] [ 0 ] [ 1 ] [ 3 ] [ . . . ] // loading as many parts as it can . int parts = 0 ; for ( int i = 0 ; i <= Constants . LOADED_SIZE / 2 && parts < CACHE_SIZE ; i ++ ) { parts += loadPage ( index + i , CACHE_SIZE - parts ) ; if ( i != 0 && parts < CACHE_SIZE ) { parts += loadPage ( index - i , CACHE_SIZE - parts ) ; } } invalidate ( ) ;
public class CmsSitemapTreeNodeData { /** * Checks if this entry is marked as ' do not translate ' for the given locale . < p > * @ param locale the locale * @ return true if the ' do not translate ' mark for the given locale is set */ public boolean isMarkedNoTranslation ( Locale locale ) { } }
if ( m_noTranslation != null ) { return CmsLocaleManager . getLocales ( m_noTranslation ) . contains ( locale ) ; } return false ;
public class CmsSessionsTable { /** * Handles the table item clicks , including clicks on images inside of a table item . < p > * @ param event the click event * @ param itemId of the clicked row * @ param propertyId column id */ void onItemClick ( MouseEvents . ClickEvent event , Object itemId , Object propertyId ) { } }
if ( ! event . isCtrlKey ( ) && ! event . isShiftKey ( ) ) { changeValueIfNotMultiSelect ( itemId ) ; // don ' t interfere with multi - selection using control key if ( event . getButton ( ) . equals ( MouseButton . RIGHT ) || ( TableProperty . Icon . equals ( propertyId ) ) ) { m_menu . setEntries ( getMenuEntries ( ) , ( Set < String > ) getValue ( ) ) ; m_menu . openForTable ( event , itemId , propertyId , this ) ; } else if ( event . getButton ( ) . equals ( MouseButton . LEFT ) && TableProperty . UserName . equals ( propertyId ) ) { showUserInfoWindow ( ( ( Set < String > ) getValue ( ) ) . iterator ( ) . next ( ) ) ; } }
public class DcpControl { /** * Shorthand getter to check if buffer acknowledgements are enabled . */ public boolean bufferAckEnabled ( ) { } }
String bufSize = get ( Names . CONNECTION_BUFFER_SIZE ) ; return bufSize != null && Integer . parseInt ( bufSize ) > 0 ;
public class Messages { /** * Loads Pippo internal messages & application messages and returns the merger . * @ return all messages */ private Map < String , Properties > loadRegisteredMessageResources ( ) { } }
Map < String , Properties > internalMessages = loadRegisteredMessageResources ( "pippo/pippo-messages%s.properties" ) ; Map < String , Properties > applicationMessages = loadRegisteredMessageResources ( "conf/messages%s.properties" ) ; Map < String , Properties > allMessages = new TreeMap < > ( ) ; Set < String > merged = new HashSet < > ( ) ; // create aggregate messages for ( Map . Entry < String , Properties > entry : internalMessages . entrySet ( ) ) { String language = entry . getKey ( ) ; Properties messages = entry . getValue ( ) ; allMessages . put ( language , messages ) ; if ( applicationMessages . containsKey ( language ) ) { // override internal messages with application messages messages . putAll ( applicationMessages . get ( language ) ) ; } merged . add ( language ) ; } // bring in the application languages which do not have an internal counterpart Set < String > unmerged = new HashSet < > ( applicationMessages . keySet ( ) ) ; unmerged . removeAll ( merged ) ; for ( String language : unmerged ) { allMessages . put ( language , applicationMessages . get ( language ) ) ; } return allMessages ;
public class GraphInferenceGrpcClient { /** * This method sends inference request to the GraphServer instance , and returns result as array of INDArrays * @ param graphId id of the graph * @ param inputs graph inputs with their string ides * @ return */ public INDArray [ ] output ( long graphId , Pair < String , INDArray > ... inputs ) { } }
val operands = new Operands ( ) ; for ( val in : inputs ) operands . addArgument ( in . getFirst ( ) , in . getSecond ( ) ) ; return output ( graphId , operands ) . asArray ( ) ;
public class JsApiHdrsImpl { /** * Get the value of the JMSDeliveryMode field from the message header . * Javadoc description supplied by JsJmsMessage interface . */ public final PersistenceType getJmsDeliveryMode ( ) { } }
if ( getHdr2 ( ) . getChoiceField ( JsHdr2Access . JMSDELIVERYMODE ) == JsHdr2Access . IS_JMSDELIVERYMODE_EMPTY ) { return getDerivedJmsDeliveryMode ( ) ; } else { Byte pType = ( Byte ) getHdr2 ( ) . getField ( JsHdr2Access . JMSDELIVERYMODE_DATA ) ; return PersistenceType . getPersistenceType ( pType ) ; }
public class PathUtils { /** * This method normalized paths by removing ' / / ' and redundant ' . . ' . * @ param file * is the path to be normalized . * @ return A normalized { @ link File } is returned . */ public static File normalizePath ( File file ) { } }
if ( file == null ) { return new File ( "" ) ; } String normalizedFile = file . getPath ( ) ; // remove all ' / / ' . . . String quotedSeparator = Pattern . quote ( File . separator ) ; String replacementSeparator = File . separator . equals ( "/" ) ? File . separator : File . separator + File . separator ; boolean isAbsolute = file . isAbsolute ( ) || normalizedFile . startsWith ( File . separator ) ; while ( normalizedFile . contains ( replacementSeparator + replacementSeparator ) ) { normalizedFile = normalizedFile . replaceAll ( quotedSeparator + quotedSeparator , replacementSeparator ) ; } // remove all redundant ' . . ' Pattern pattern = Pattern . compile ( "([^\\." + replacementSeparator + "]+" + quotedSeparator + "\\.\\.)" ) ; Matcher matcher = pattern . matcher ( normalizedFile ) ; while ( matcher . find ( ) ) { normalizedFile = normalizedFile . replace ( matcher . group ( 1 ) , "" ) . replaceAll ( quotedSeparator + quotedSeparator , replacementSeparator ) ; if ( ( ! isAbsolute ) && ( normalizedFile . startsWith ( File . separator ) ) ) { normalizedFile = normalizedFile . replaceFirst ( quotedSeparator , "" ) ; } else if ( ( isAbsolute ) && ( ! normalizedFile . startsWith ( File . separator ) ) ) { normalizedFile = File . separator + normalizedFile ; } matcher = pattern . matcher ( normalizedFile ) ; } /* remove all ' / . / ' and / . at the end */ pattern = Pattern . compile ( quotedSeparator + "\\." + quotedSeparator ) ; matcher = pattern . matcher ( normalizedFile ) ; while ( matcher . find ( ) ) { normalizedFile = normalizedFile . replace ( matcher . group ( 0 ) , replacementSeparator ) ; matcher = pattern . matcher ( normalizedFile ) ; } if ( normalizedFile . endsWith ( File . separator + "." ) ) { normalizedFile = normalizedFile . substring ( 0 , normalizedFile . length ( ) - 2 ) ; } return new File ( normalizedFile ) ;
public class GenericRepository { /** * Count the number of E instances . * @ param entity a sample entity whose non - null properties may be used as search hint * @ param sp carries additional search information * @ param attributes the list of attributes to the property * @ return the number of entities matching the search . */ @ Transactional public int findPropertyCount ( E entity , SearchParameters sp , Attribute < ? , ? > ... attributes ) { } }
return findPropertyCount ( entity , sp , newArrayList ( attributes ) ) ;
public class InternalXtextParser { /** * InternalXtext . g : 459:1 : entryRuleReferencedMetamodel returns [ EObject current = null ] : iv _ ruleReferencedMetamodel = ruleReferencedMetamodel EOF ; */ public final EObject entryRuleReferencedMetamodel ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleReferencedMetamodel = null ; try { // InternalXtext . g : 459:60 : ( iv _ ruleReferencedMetamodel = ruleReferencedMetamodel EOF ) // InternalXtext . g : 460:2 : iv _ ruleReferencedMetamodel = ruleReferencedMetamodel EOF { newCompositeNode ( grammarAccess . getReferencedMetamodelRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; iv_ruleReferencedMetamodel = ruleReferencedMetamodel ( ) ; state . _fsp -- ; current = iv_ruleReferencedMetamodel ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class StringUtils { /** * Replace the dots in the property with underscore and * transform to uppercase based on given flag . * @ param dottedProperty The property with dots , example - a . b . c * @ param uppercase To transform to uppercase string * @ return The converted value */ public static String convertDotToUnderscore ( String dottedProperty , boolean uppercase ) { } }
if ( dottedProperty == null ) { return dottedProperty ; } Optional < String > converted = Optional . of ( dottedProperty ) . map ( value -> value . replace ( '.' , '_' ) ) . map ( value -> uppercase ? value . toUpperCase ( ) : value ) ; return converted . get ( ) ;
public class ListPolicyVersionsResult { /** * A list of policy versions . * For more information about managed policy versions , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / policies - managed - versions . html " > Versioning for Managed * Policies < / a > in the < i > IAM User Guide < / i > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setVersions ( java . util . Collection ) } or { @ link # withVersions ( java . util . Collection ) } if you want to override * the existing values . * @ param versions * A list of policy versions . < / p > * For more information about managed policy versions , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / policies - managed - versions . html " > Versioning for * Managed Policies < / a > in the < i > IAM User Guide < / i > . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListPolicyVersionsResult withVersions ( PolicyVersion ... versions ) { } }
if ( this . versions == null ) { setVersions ( new com . amazonaws . internal . SdkInternalList < PolicyVersion > ( versions . length ) ) ; } for ( PolicyVersion ele : versions ) { this . versions . add ( ele ) ; } return this ;
public class HttpClient { /** * Put file as ' application / octet - stream ' . * @ param url URL of service * @ param response response pre - populated with request to send . Response content and * statusCode will be filled . * @ param headers http headers to add * @ param file file containing binary data to put . */ public void put ( String url , HttpResponse response , Map < String , Object > headers , File file ) { } }
HttpPut methodPut = new HttpPut ( url ) ; HttpEntity multipart = buildBodyWithFile ( file ) ; methodPut . setEntity ( multipart ) ; getResponse ( url , response , methodPut , headers ) ;
public class AmazonIdentityManagementClient { /** * Adds a new client ID ( also known as audience ) to the list of client IDs already registered for the specified IAM * OpenID Connect ( OIDC ) provider resource . * This operation is idempotent ; it does not fail or return an error if you add an existing client ID to the * provider . * @ param addClientIDToOpenIDConnectProviderRequest * @ return Result of the AddClientIDToOpenIDConnectProvider operation returned by the service . * @ throws InvalidInputException * The request was rejected because an invalid or out - of - range value was supplied for an input parameter . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error message describes the limit exceeded . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . AddClientIDToOpenIDConnectProvider * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / AddClientIDToOpenIDConnectProvider " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AddClientIDToOpenIDConnectProviderResult addClientIDToOpenIDConnectProvider ( AddClientIDToOpenIDConnectProviderRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAddClientIDToOpenIDConnectProvider ( request ) ;
public class DomainCommandBuilder { /** * Adds a JVM arguments to the host controller ignoring { @ code null } values . * @ param args the arguments to add * @ return the builder */ public DomainCommandBuilder addHostControllerJavaOptions ( final String ... args ) { } }
if ( args != null ) { for ( String arg : args ) { addHostControllerJavaOption ( arg ) ; } } return this ;
public class ContentIdUtil { /** * Determines the content ID of a file : the path of the file relative to * the watched directory . If the watched directory is null , the content ID * is simply the name of the file . * If a prefix is being used , the prefix is added as the initial characters * in the contentId . * @ param file * @ param watchDir * @ param contentIdPrefix * @ return */ public static String getContentId ( File file , File watchDir , String contentIdPrefix ) { } }
String contentId = file . getName ( ) ; if ( null != watchDir ) { URI relativeFileURI = watchDir . toURI ( ) . relativize ( file . toURI ( ) ) ; contentId = relativeFileURI . getPath ( ) ; } if ( null != contentIdPrefix ) { contentId = contentIdPrefix + contentId ; } return contentId ;
public class ProvFactory { /** * A factory method to create an instance of an invalidation { @ link WasInvalidatedBy } * @ param id an optional identifier for a usage * @ param entity an identifier for the created < a href = " http : / / www . w3 . org / TR / prov - dm / # invalidation . entity " > entity < / a > * @ param activity an optional identifier for the < a href = " http : / / www . w3 . org / TR / prov - dm / # invalidation . activity " > activity < / a > that creates the entity * @ return an instance of { @ link WasInvalidatedBy } */ public WasInvalidatedBy newWasInvalidatedBy ( QualifiedName id , QualifiedName entity , QualifiedName activity ) { } }
WasInvalidatedBy res = of . createWasInvalidatedBy ( ) ; res . setId ( id ) ; res . setEntity ( entity ) ; res . setActivity ( activity ) ; return res ;
public class JavaNames { /** * Appends { @ code tokens } to { @ code target } in CamelCase format . * @ param tokens parts of the word * @ param target string builder the word is appended to * @ param firstToLower if true , the first character will be in lowercase */ public static StringBuilder camelCase ( final StringBuilder target , boolean firstToLower , final String ... tokens ) { } }
for ( String t : tokens ) { if ( firstToLower ) { firstToLower = false ; target . append ( t . toLowerCase ( ) ) ; } else { appendFirstToUpper ( target , t ) ; } } return target ;
public class WebLocatorAbstractBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * < p > Result Example : < / p > * < pre > * / / * [ @ placeholder = ' Search ' ] * < / pre > * @ param attribute eg . placeholder * @ param value eg . Search * @ param searchTypes see { @ link SearchType } * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends WebLocatorAbstractBuilder > T setAttribute ( final String attribute , String value , final SearchType ... searchTypes ) { } }
pathBuilder . setAttribute ( attribute , value , searchTypes ) ; return ( T ) this ;
public class ConcurrentRegistry { /** * Remove the given entry from the registry under the specified key , only if the value matches the one given . * @ param removeKey key of the entry to be removed . * @ param removeValue value of the entry to be removed . * @ return true = > if the value was removed ; false = > otherwise . */ public boolean remove ( K removeKey , V removeValue ) { } }
boolean removedInd = this . store . remove ( removeKey , removeValue ) ; if ( removedInd ) { this . notificationExecutor . fireRemoveNotification ( this . listeners . iterator ( ) , removeKey , removeValue ) ; } return removedInd ;
public class PodLocalImpl { /** * Create cluster pods using the configuration as a hint . Both the cluster * and cluster _ hub pods use this . */ private ServerPod [ ] buildClusterServers ( ClusterHeartbeat cluster , int serverCount ) { } }
ArrayList < ServerPod > serversPod = new ArrayList < > ( ) ; for ( ServerHeartbeat server : cluster . getServers ( ) ) { ServerPod serverPod = new ServerPod ( serversPod . size ( ) ) ; serversPod . add ( serverPod ) ; // XXX : need to manage seed servers // serverPod . setHintServerId ( server . getId ( ) ) ; } /* if ( cluster = = _ serverSelf . getCluster ( ) ) { if ( serversPod . size ( ) < serverCount & & ! isServerPresent ( serversPod ) ) { ServerPod serverPod = new ServerPod ( serversPod . size ( ) ) ; serversPod . add ( serverPod ) ; serverPod . setServer ( _ serverSelf ) ; */ while ( serversPod . size ( ) < serverCount ) { serversPod . add ( new ServerPod ( serversPod . size ( ) ) ) ; } ServerPod [ ] serverArray = new ServerPod [ serverCount ] ; for ( int i = 0 ; i < serverCount ; i ++ ) { serverArray [ i ] = serversPod . get ( i ) ; } return serverArray ;
public class Types { /** * The element type of an array . */ public Type elemtype ( Type t ) { } }
switch ( t . getTag ( ) ) { case WILDCARD : return elemtype ( wildUpperBound ( t ) ) ; case ARRAY : t = t . unannotatedType ( ) ; return ( ( ArrayType ) t ) . elemtype ; case FORALL : return elemtype ( ( ( ForAll ) t ) . qtype ) ; case ERROR : return t ; default : return null ; }
public class ProxyConnection { /** * { @ inheritDoc } */ @ Override public void setTransactionIsolation ( int level ) throws SQLException { } }
delegate . setTransactionIsolation ( level ) ; transactionIsolation = level ; dirtyBits |= DIRTY_BIT_ISOLATION ;
public class Utils { /** * Return the timeout value to use , first checking the argument provided to the CLI invocation , * then an environment variable , then the default value . */ private static int parseTimeout ( final Namespace options , final String dest , final String envVarName , final int defaultValue ) { } }
if ( options . getInt ( dest ) != null ) { return options . getInt ( dest ) ; } if ( System . getenv ( envVarName ) != null ) { // if this is not an integer then let it blow up return Integer . parseInt ( System . getenv ( envVarName ) ) ; } return defaultValue ;
public class CmsPermissionViewTable { /** * Makes item for table . < p > * @ param cms CmsObject * @ param entry ACE * @ param view permission table * @ param resPath parentResource ( or null ) * @ return VerticalLayout */ private VerticalLayout getLayoutFromEntry ( CmsObject cms , CmsAccessControlEntry entry , final CmsPermissionView view , String resPath ) { } }
VerticalLayout res = new VerticalLayout ( ) ; res . setSpacing ( false ) ; I_CmsPrincipal principal = null ; try { principal = CmsPrincipal . readPrincipalIncludingHistory ( cms , entry . getPrincipal ( ) ) ; } catch ( CmsException e ) { principal = new CmsGroup ( entry . getPrincipal ( ) , null , "" , "" , 0 ) ; } if ( principal != null ) { CmsResourceInfo info = CmsAccountsApp . getPrincipalInfo ( principal ) ; if ( view . isEditable ( ) ) { CssLayout cssl = new CssLayout ( ) ; Button removeButton = new Button ( FontOpenCms . TRASH_SMALL ) ; removeButton . addStyleName ( "borderless o-toolbar-button o-resourceinfo-toolbar o-toolbar-icon-visible" ) ; removeButton . addClickListener ( new ClickListener ( ) { private static final long serialVersionUID = - 6112693137800596485L ; public void buttonClick ( ClickEvent event ) { view . deletePermissionSet ( ) ; } } ) ; cssl . addComponent ( removeButton ) ; info . setButtonWidget ( cssl ) ; } res . addComponent ( info ) ; if ( resPath != null ) { Label resLabel = new Label ( CmsVaadinUtils . getMessageText ( Messages . GUI_PERMISSION_INHERITED_FROM_1 , resPath ) ) ; resLabel . addStyleName ( "o-report" ) ; res . addComponent ( resLabel ) ; } } res . addComponent ( view ) ; return res ;
public class Row { /** * End key column index , the index after the final key . */ private int keyColumnEnd ( ) { } }
int offset = 0 ; for ( int i = 0 ; i < _columns . length ; i ++ ) { if ( offset == _keyStart + _keyLength ) { return i ; } offset += _columns [ i ] . length ( ) ; } if ( offset == _keyStart + _keyLength ) { return _columns . length ; } throw new IllegalStateException ( ) ;
public class TreeScanner { /** * Scan a single node . */ public R scan ( Tree node , P p ) { } }
return ( node == null ) ? null : node . accept ( this , p ) ;
public class ListManagementTermListsImpl { /** * Refreshes the index of the list with list Id equal to list ID passed . * @ param listId List Id of the image list . * @ param language Language of the terms . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RefreshIndex object */ public Observable < ServiceResponse < RefreshIndex > > refreshIndexMethodWithServiceResponseAsync ( String listId , String language ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( listId == null ) { throw new IllegalArgumentException ( "Parameter listId is required and cannot be null." ) ; } if ( language == null ) { throw new IllegalArgumentException ( "Parameter language is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . refreshIndexMethod ( listId , language , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < RefreshIndex > > > ( ) { @ Override public Observable < ServiceResponse < RefreshIndex > > call ( Response < ResponseBody > response ) { try { ServiceResponse < RefreshIndex > clientResponse = refreshIndexMethodDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class LogManager { /** * Returns a Logger with the specified name . * @ param fqcn The fully qualified class name of the class that this method is a member of . * @ param name The logger name . * @ return The Logger . */ protected static Logger getLogger ( final String fqcn , final String name ) { } }
return factory . getContext ( fqcn , null , null , false ) . getLogger ( name ) ;
public class CmsUserEditDialog { /** * Sets the password status for the user . < p > * @ param user CmsUser * @ param reset true or false */ private void setUserPasswordStatus ( CmsUser user , boolean reset ) { } }
if ( reset ) { user . setAdditionalInfo ( CmsUserSettings . ADDITIONAL_INFO_PASSWORD_RESET , "true" ) ; } else { user . deleteAdditionalInfo ( CmsUserSettings . ADDITIONAL_INFO_PASSWORD_RESET ) ; } CmsUserTable . USER_PASSWORD_STATUS . put ( user . getId ( ) , new Boolean ( reset ) ) ;
public class GCLINERGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GCLINERG__XPOS : setXPOS ( XPOS_EDEFAULT ) ; return ; case AfplibPackage . GCLINERG__YPOS : setYPOS ( YPOS_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class MapDBContext { /** * This DB returned by this method gets deleted on JVM shutdown . * @ param name name of the DB file * @ return an offline instance of { @ link MapDBContext } */ public static DBContext offlineInstance ( String name ) { } }
DB db = DBMaker . fileDB ( name ) . fileMmapEnableIfSupported ( ) . closeOnJvmShutdown ( ) . cleanerHackEnable ( ) . transactionEnable ( ) . fileDeleteAfterClose ( ) . make ( ) ; return new MapDBContext ( db ) ;
public class DynamoDBExecutor { /** * May misused with toItem */ @ SafeVarargs public static Map < String , AttributeValue > asItem ( Object ... a ) { } }
if ( 0 != ( a . length % 2 ) ) { throw new IllegalArgumentException ( "The parameters must be the pairs of property name and value, or Map, or an entity class with getter/setter methods." ) ; } final Map < String , AttributeValue > item = new LinkedHashMap < > ( N . initHashCapacity ( a . length / 2 ) ) ; for ( int i = 0 ; i < a . length ; i ++ ) { item . put ( ( String ) a [ i ] , attrValueOf ( a [ ++ i ] ) ) ; } return item ;
public class FaunusPipeline { /** * Emit the property value of an element . * @ param key the key identifying the property * @ param type the class of the property value ( so Hadoop can intelligently handle the result ) * @ return the extended FaunusPipeline */ public FaunusPipeline property ( final String key , final Class type ) { } }
this . state . assertNotLocked ( ) ; this . state . assertNoProperty ( ) ; this . state . setProperty ( key , type ) ; return this ;
public class OperatingSystemVersion { /** * Gets the operatorType value for this OperatingSystemVersion . * @ return operatorType * The operator type . * < span class = " constraint ReadOnly " > This field is * read only and will be ignored when sent to the API . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . OperatingSystemVersionOperatorType getOperatorType ( ) { } }
return operatorType ;
public class ThroughputInfo { /** * 对应size的数据统计 */ public Long getSize ( ) { } }
Long size = 0L ; if ( items . size ( ) != 0 ) { for ( ThroughputStat item : items ) { size += item . getSize ( ) ; } } return size ;
public class MapTileCache { /** * Just a helper method in order to parse all indices without concurrency side effects * @ since 6.0.0 */ private void populateSyncCachedTiles ( final MapTileList pList ) { } }
synchronized ( mCachedTiles ) { pList . ensureCapacity ( mCachedTiles . size ( ) ) ; pList . clear ( ) ; for ( final long index : mCachedTiles . keySet ( ) ) { pList . put ( index ) ; } }
public class Graph { /** * Performs union on the vertices and edges sets of the input graphs * removing duplicate vertices but maintaining duplicate edges . * @ param graph the graph to perform union with * @ return a new graph */ public Graph < K , VV , EV > union ( Graph < K , VV , EV > graph ) { } }
DataSet < Vertex < K , VV > > unionedVertices = graph . getVertices ( ) . union ( this . getVertices ( ) ) . name ( "Vertices" ) . distinct ( ) . name ( "Vertices" ) ; DataSet < Edge < K , EV > > unionedEdges = graph . getEdges ( ) . union ( this . getEdges ( ) ) . name ( "Edges" ) ; return new Graph < > ( unionedVertices , unionedEdges , this . context ) ;
public class CPDefinitionLinkPersistenceImpl { /** * Returns the last cp definition link in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition link * @ throws NoSuchCPDefinitionLinkException if a matching cp definition link could not be found */ @ Override public CPDefinitionLink findByUuid_Last ( String uuid , OrderByComparator < CPDefinitionLink > orderByComparator ) throws NoSuchCPDefinitionLinkException { } }
CPDefinitionLink cpDefinitionLink = fetchByUuid_Last ( uuid , orderByComparator ) ; if ( cpDefinitionLink != null ) { return cpDefinitionLink ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionLinkException ( msg . toString ( ) ) ;
public class TypeLord { /** * parameterized type for Base < T > . */ private static IType findActualDeclaringType ( IType ownersType , IType declaringType ) { } }
if ( ownersType == null ) { return null ; } if ( declaringType . isParameterizedType ( ) && ! declaringType . isGenericType ( ) ) { return declaringType ; } if ( ownersType == declaringType ) { return declaringType ; } if ( ownersType . getGenericType ( ) == declaringType ) { return ownersType ; } IType actualDeclaringType = findActualDeclaringType ( ownersType . getSupertype ( ) , declaringType ) ; if ( actualDeclaringType != null && actualDeclaringType != declaringType ) { return actualDeclaringType ; } for ( IType iface : ownersType . getInterfaces ( ) ) { actualDeclaringType = findActualDeclaringType ( iface , declaringType ) ; if ( actualDeclaringType != null && actualDeclaringType != declaringType ) { return actualDeclaringType ; } } return declaringType ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getGSCA ( ) { } }
if ( gscaEClass == null ) { gscaEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 466 ) ; } return gscaEClass ;
public class ServerOperations { /** * Creates an operation to list the deployments . * @ return the operation */ public static ModelNode createListDeploymentsOperation ( ) { } }
final ModelNode op = createOperation ( READ_CHILDREN_NAMES ) ; op . get ( CHILD_TYPE ) . set ( DEPLOYMENT ) ; return op ;
public class SMailConventionReceptionist { @ Override public void accept ( Postcard postcard ) { } }
readyPostcardFirst ( postcard ) ; checkPostcardFirst ( postcard ) ; if ( postcard . isForcedlyDirect ( ) ) { // should ignore body file assertPlainBodyExistsForDirectBody ( postcard ) ; postcard . getBodyFile ( ) . ifPresent ( bodyFile -> { // but wants logging officeManagedLogging ( postcard , bodyFile , prepareReceiverLocale ( postcard ) ) ; } ) ; return ; } postcard . getBodyFile ( ) . ifPresent ( bodyFile -> { if ( postcard . getHtmlBody ( ) . isPresent ( ) ) { String msg = "Cannot use direct HTML body when body file is specified: " + postcard ; throw new SMailIllegalStateException ( msg ) ; } final boolean filesystem = postcard . isFromFilesystem ( ) ; final OptionalThing < Locale > receiverLocale = prepareReceiverLocale ( postcard ) ; final OptionalThing < Object > dynamicData = prepareDynamicData ( postcard , bodyFile , filesystem , receiverLocale ) ; dynamicData . ifPresent ( data -> acceptDynamicProperty ( postcard , bodyFile , filesystem , receiverLocale , data ) ) ; officeManagedLogging ( postcard , bodyFile , receiverLocale ) ; final String plainText = readText ( postcard , bodyFile , false , filesystem , receiverLocale , dynamicData ) ; analyzeBodyMeta ( postcard , bodyFile , plainText ) ; final DirectBodyOption option = postcard . useDirectBody ( plainText ) ; if ( postcard . isAlsoHtmlFile ( ) ) { final String htmlFilePath = deriveHtmlFilePath ( bodyFile ) ; final String readHtml = readText ( postcard , htmlFilePath , true , filesystem , receiverLocale , dynamicData ) ; verifyMailHtmlTemplateTextFormat ( htmlFilePath , readHtml ) ; option . alsoDirectHtml ( readHtml ) ; } // no check about unneeded HTML template file because of runtime performance // DBFlute generator checks it instead } ) . orElse ( ( ) -> { /* direct body , check only here */ assertPlainBodyExistsForDirectBody ( postcard ) ; } ) ; if ( ! postcard . getFrom ( ) . isPresent ( ) ) { // is moved from office check of postcard for dynamic property throwMailFromAddressNotFoundException ( postcard ) ; }
public class FilteredIndexIterator { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPIterator # finished ( ) */ public void finished ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "finished" ) ; cursor . finished ( ) ; cursor = null ; synchronized ( this ) { filter = null ; index = null ; next = null ; removable = null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "finished" ) ;
public class EmbeddedElastic { /** * Index documents * @ param indexName target index * @ param indexType target index type * @ param idJsonMap map where keys are documents ids and values are documents represented as JSON */ public void index ( String indexName , String indexType , Map < CharSequence , CharSequence > idJsonMap ) { } }
index ( idJsonMap . entrySet ( ) . stream ( ) . map ( entry -> new IndexRequest . IndexRequestBuilder ( indexName , indexType , entry . getValue ( ) . toString ( ) ) . withId ( entry . getKey ( ) . toString ( ) ) . build ( ) ) . collect ( toList ( ) ) ) ;
public class AbstractPrintQuery { /** * Add an attribute to the PrintQuery . It is used to get editable values * from the eFaps DataBase . * @ param _ attributes Attribute to add * @ return this PrintQuery * @ throws EFapsException on error */ public AbstractPrintQuery addAttribute ( final CIAttribute ... _attributes ) throws EFapsException { } }
if ( isMarked4execute ( ) ) { for ( final CIAttribute attr : _attributes ) { addAttribute ( attr . name ) ; } } return this ;
public class HCExtHelper { /** * Convert the passed text to a list of & lt ; div & gt ; elements . Each \ n is used * to split the text into separate lines . \ r characters are removed from the * string ! Empty lines are preserved except for the last line . E . g . * < code > Hello \ nworld < / code > results in 2 & lt ; div & gt ; s : * & lt ; div & gt ; Hello & lt ; / div & gt ; and & lt ; div & gt ; world & lt ; / div & gt ; * @ param sText * The text to be split . May be < code > null < / code > . * @ param aTarget * The consumer to be invoked with every { @ link HCDiv } . May not be * < code > null < / code > . */ public static void nl2divList ( @ Nullable final String sText , @ Nonnull final Consumer < ? super HCDiv > aTarget ) { } }
forEachLine ( sText , ( sLine , bLast ) -> aTarget . accept ( new HCDiv ( ) . addChild ( sLine ) ) ) ;
public class IOUtil { /** * close inputstream without a Exception * @ param is */ public static void closeEL ( InputStream is ) { } }
try { if ( is != null ) is . close ( ) ; } // catch ( AlwaysThrow at ) { throw at ; } catch ( Throwable t ) { ExceptionUtil . rethrowIfNecessary ( t ) ; }
public class Functions { /** * Generate a message integrity check for a given received message . * @ param context GSSContext for which a connection has been established to the remote peer * @ param prop the MessageProp used for exchanging messages * @ param message the bytes of the received message * @ return the bytes of the message integrity check ( like a checksum ) that is * sent to a peer for verifying that the message was received correctly */ @ Function public static byte [ ] generateMIC ( GSSContext context , MessageProp prop , byte [ ] message ) { } }
try { // Ensure the default Quality - of - Protection is applied . prop . setQOP ( 0 ) ; byte [ ] initialToken = context . getMIC ( message , 0 , message . length , prop ) ; return getTokenWithLengthPrefix ( initialToken ) ; } catch ( GSSException ex ) { throw new RuntimeException ( "Exception generating MIC for message" , ex ) ; }
public class ModifyVpcEndpointRequest { /** * ( Interface endpoint ) One or more subnets IDs in which to remove the endpoint . * @ return ( Interface endpoint ) One or more subnets IDs in which to remove the endpoint . */ public java . util . List < String > getRemoveSubnetIds ( ) { } }
if ( removeSubnetIds == null ) { removeSubnetIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return removeSubnetIds ;
public class CFFFontSubset { /** * Function Copies the header from the original fileto the output list */ protected void CopyHeader ( ) { } }
seek ( 0 ) ; @ SuppressWarnings ( "unused" ) int major = getCard8 ( ) ; @ SuppressWarnings ( "unused" ) int minor = getCard8 ( ) ; int hdrSize = getCard8 ( ) ; @ SuppressWarnings ( "unused" ) int offSize = getCard8 ( ) ; nextIndexOffset = hdrSize ; OutputList . addLast ( new RangeItem ( buf , 0 , hdrSize ) ) ;
public class ZoneMeta { /** * Returns an immutable set of system time zone IDs . * Etc / Unknown is excluded . * @ return An immutable set of system time zone IDs . */ private static synchronized Set < String > getSystemZIDs ( ) { } }
Set < String > systemZones = null ; if ( REF_SYSTEM_ZONES != null ) { systemZones = REF_SYSTEM_ZONES . get ( ) ; } if ( systemZones == null ) { Set < String > systemIDs = new TreeSet < String > ( ) ; String [ ] allIDs = getZoneIDs ( ) ; for ( String id : allIDs ) { // exclude Etc / Unknown if ( id . equals ( TimeZone . UNKNOWN_ZONE_ID ) ) { continue ; } systemIDs . add ( id ) ; } systemZones = Collections . unmodifiableSet ( systemIDs ) ; REF_SYSTEM_ZONES = new SoftReference < Set < String > > ( systemZones ) ; } return systemZones ;
public class GetApplicationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetApplicationRequest getApplicationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getApplicationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getApplicationRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( getApplicationRequest . getSemanticVersion ( ) , SEMANTICVERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CsvClassifierMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CsvClassifier csvClassifier , ProtocolMarshaller protocolMarshaller ) { } }
if ( csvClassifier == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( csvClassifier . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getCreationTime ( ) , CREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getLastUpdated ( ) , LASTUPDATED_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getVersion ( ) , VERSION_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getDelimiter ( ) , DELIMITER_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getQuoteSymbol ( ) , QUOTESYMBOL_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getContainsHeader ( ) , CONTAINSHEADER_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getHeader ( ) , HEADER_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getDisableValueTrimming ( ) , DISABLEVALUETRIMMING_BINDING ) ; protocolMarshaller . marshall ( csvClassifier . getAllowSingleColumn ( ) , ALLOWSINGLECOLUMN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CacheStatistics { /** * Returns the ratio of cache loading attempts which threw exceptions . This is defined as { @ code * loadExceptionCount / ( loadSuccessCount + loadExceptionCount ) } , or { @ code 0.0 } when { @ code * loadSuccessCount + loadExceptionCount = = 0 } . */ @ Override public double getLoadExceptionRate ( ) { } }
long totalLoadCount = loadSuccessCount . get ( ) + loadExceptionCount . get ( ) ; return ( totalLoadCount == 0 ) ? 0.0 : ( double ) loadExceptionCount . get ( ) / totalLoadCount ;
public class DataFrames { /** * Standard deviation for a column * @ param dataFrame the dataframe to * get the column from * @ param columnName the name of the column to get the standard * deviation for * @ return the column that represents the standard deviation */ public static Column var ( DataRowsFacade dataFrame , String columnName ) { } }
return dataFrame . get ( ) . groupBy ( columnName ) . agg ( functions . variance ( columnName ) ) . col ( columnName ) ;
public class ScanlineFiller { /** * Fills clipped area starting at xx , yy . Area must be surrounded with * replacement ( or clip ) * @ param xx * @ param yy * @ param clip * @ param target * @ param replacement */ public void floodFill ( int xx , int yy , Rectangle clip , IntPredicate target , int replacement ) { } }
floodFill ( xx , yy , clip . x , clip . y , clip . x + clip . width , clip . y + clip . height , target , replacement ) ;
public class HOSECodeGenerator { /** * Generates the string code for a given sphere . * @ param sphereNodes A vector of TreeNodes for which a string code is to be generated * @ return The SphereCode value * @ exception org . openscience . cdk . exception . CDKException Thrown if something goes wrong */ private String getSphereCode ( List < TreeNode > sphereNodes ) throws CDKException { } }
if ( sphereNodes == null || sphereNodes . size ( ) < 1 ) { return sphereDelimiters [ sphere - 1 ] ; } TreeNode treeNode = null ; StringBuffer code = new StringBuffer ( ) ; /* * append the tree node code to the HOSECode in their now determined * order , using commas to separate nodes from different branches */ IAtom branch = sphereNodes . get ( 0 ) . source . atom ; StringBuffer tempCode = null ; for ( int i = 0 ; i < sphereNodes . size ( ) ; i ++ ) { treeNode = sphereNodes . get ( i ) ; tempCode = new StringBuffer ( ) ; if ( ! treeNode . source . stopper && ! treeNode . source . atom . equals ( branch ) ) { branch = treeNode . source . atom ; code . append ( ',' ) ; } if ( ! treeNode . source . stopper && treeNode . source . atom . equals ( branch ) ) { if ( treeNode . bondType <= 4 ) { tempCode . append ( bondSymbols [ ( int ) treeNode . bondType ] ) ; } else { throw new CDKException ( "Unknown bond type" ) ; } if ( treeNode . atom != null && ! treeNode . atom . getFlag ( CDKConstants . VISITED ) ) { tempCode . append ( getElementSymbol ( treeNode . symbol ) ) ; } else if ( treeNode . atom != null && treeNode . atom . getFlag ( CDKConstants . VISITED ) ) { tempCode . append ( '&' ) ; treeNode . stopper = true ; } code . append ( tempCode + createChargeCode ( treeNode . atom ) ) ; treeNode . hSymbol = tempCode . toString ( ) ; } if ( treeNode . atom != null ) treeNode . atom . setFlag ( CDKConstants . VISITED , true ) ; if ( treeNode . source . stopper ) treeNode . stopper = true ; } code . append ( sphereDelimiters [ sphere - 1 ] ) ; return code . toString ( ) ;
public class InputEvent { @ Nonnull public static InputEvent key ( @ Nonnull InputKeyEvent key ) { } }
InputEvent self = new InputEvent ( ) ; self . type = Discriminator . key ; self . key = key ; return self ;
public class CreateUserPoolRequest { /** * Specifies whether email addresses or phone numbers can be specified as usernames when a user signs up . * @ param usernameAttributes * Specifies whether email addresses or phone numbers can be specified as usernames when a user signs up . * @ return Returns a reference to this object so that method calls can be chained together . * @ see UsernameAttributeType */ public CreateUserPoolRequest withUsernameAttributes ( UsernameAttributeType ... usernameAttributes ) { } }
java . util . ArrayList < String > usernameAttributesCopy = new java . util . ArrayList < String > ( usernameAttributes . length ) ; for ( UsernameAttributeType value : usernameAttributes ) { usernameAttributesCopy . add ( value . toString ( ) ) ; } if ( getUsernameAttributes ( ) == null ) { setUsernameAttributes ( usernameAttributesCopy ) ; } else { getUsernameAttributes ( ) . addAll ( usernameAttributesCopy ) ; } return this ;
public class AbstractCasWebflowConfigurer { /** * Create expression expression . * @ param expression the expression * @ param expectedType the expected type * @ return the expression */ public Expression createExpression ( final String expression , final Class expectedType ) { } }
val parserContext = new FluentParserContext ( ) . expectResult ( expectedType ) ; return getSpringExpressionParser ( ) . parseExpression ( expression , parserContext ) ;
public class TLVElement { /** * Creates TLV element with { @ link Long } content . * TLV element nonCritical and forwarded flags are set to false . * @ param type TLV element type . * @ param value value to be the content of the TLV element . * @ return { @ link TLVElement } * @ throws TLVParserException */ public static TLVElement create ( int type , long value ) throws TLVParserException { } }
TLVElement element = create ( type ) ; element . setLongContent ( value ) ; return element ;
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns a range of all the commerce tier price entries where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceTierPriceEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of commerce tier price entries * @ param end the upper bound of the range of commerce tier price entries ( not inclusive ) * @ return the range of matching commerce tier price entries */ @ Override public List < CommerceTierPriceEntry > findByGroupId ( long groupId , int start , int end ) { } }
return findByGroupId ( groupId , start , end , null ) ;
public class Windowing { /** * Adapts an iterable to an iterator showing a sliding window of the * contained elements . e . g : iterable : [ 1,2,3 ] windowSize : 2 - > [ [ 1,2 ] , * [ 2,3 ] ] * @ param < T > the iterator element type * @ param windowSize the window size * @ param iterable the iterable to be adapted * @ return the window iterator */ public static < T > Iterator < List < T > > window ( int windowSize , Iterable < T > iterable ) { } }
dbc . precondition ( iterable != null , "cannot create a window iterator from a null iterable" ) ; final Supplier < List < T > > factory = Compositions . compose ( new Vary < ArrayList < T > , List < T > > ( ) , new ArrayListFactory < T > ( ) ) ; return new PreciseWindowIterator < List < T > , T > ( iterable . iterator ( ) , windowSize , factory ) ;
public class MavenResolverSystemBaseImpl { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . resolver . api . ResolveStage # resolve ( java . lang . String [ ] ) */ @ Override public STRATEGYSTAGETYPE resolve ( String ... coordinates ) throws IllegalArgumentException , ResolutionException { } }
return delegate . resolve ( coordinates ) ;
public class XGMMLExporter { /** * Export KAM to XGMML format using the KAM API . * @ param kam { @ link Kam } the kam to export to XGMML * @ param kAMStore { @ link KAMStore } the kam store to read kam details from * @ param outputPath { @ link String } the output path to write XGMML file to , * which can be null , in which case the kam ' s name will be used and it will * be written to the current directory ( user . dir ) . * @ throws KAMStoreException Thrown if an error occurred retrieving the KAM * @ throws FileNotFoundException Thrown if the export file cannot be * written to * @ throws InvalidArgument Thrown if either the kam , kamInfo , kamStore , or * outputPath arguments were null */ public static void exportKam ( final Kam kam , final KAMStore kAMStore , String outputPath ) throws KAMStoreException , FileNotFoundException { } }
if ( nulls ( kam , kAMStore , outputPath ) ) { throw new InvalidArgument ( "argument(s) were null" ) ; } // Set up a writer to write the XGMML PrintWriter writer = new PrintWriter ( outputPath ) ; // Start to process the Kam // Write xgmml < graph > element header XGMMLUtility . writeStart ( kam . getKamInfo ( ) . getName ( ) , writer ) ; // We iterate over all the nodes in the Kam first for ( KamNode kamNode : kam . getNodes ( ) ) { Node xNode = new Node ( ) ; xNode . id = kamNode . getId ( ) ; xNode . label = kamNode . getLabel ( ) ; xNode . function = kamNode . getFunctionType ( ) ; List < BelTerm > supportingTerms = kAMStore . getSupportingTerms ( kamNode ) ; XGMMLUtility . writeNode ( xNode , supportingTerms , writer ) ; } // Iterate over all the edges for ( KamEdge kamEdge : kam . getEdges ( ) ) { Edge xEdge = new Edge ( ) ; xEdge . id = kamEdge . getId ( ) ; xEdge . rel = kamEdge . getRelationshipType ( ) ; KamNode knsrc = kamEdge . getSourceNode ( ) ; KamNode kntgt = kamEdge . getTargetNode ( ) ; xEdge . source = knsrc . getId ( ) ; xEdge . target = kntgt . getId ( ) ; Node src = new Node ( ) ; src . function = knsrc . getFunctionType ( ) ; src . label = knsrc . getLabel ( ) ; Node tgt = new Node ( ) ; tgt . function = kntgt . getFunctionType ( ) ; tgt . label = kntgt . getLabel ( ) ; XGMMLUtility . writeEdge ( src , tgt , xEdge , writer ) ; } // Close out the writer XGMMLUtility . writeEnd ( writer ) ; writer . close ( ) ;
public class NDArrayMessage { /** * Prepare a whole array update * which includes the default dimensions * for indicating updating * the whole array ( a 1 length int array with - 1 as its only element ) * - 1 representing the dimension * @ param arr * @ return */ public static NDArrayMessage wholeArrayUpdate ( INDArray arr ) { } }
return NDArrayMessage . builder ( ) . arr ( arr ) . dimensions ( WHOLE_ARRAY_UPDATE ) . index ( WHOLE_ARRAY_INDEX ) . sent ( getCurrentTimeUtc ( ) ) . build ( ) ;
public class Table { /** * Returns a table consisting of randomly selected records from this table . The sample size is based on the * given proportion * @ param proportion The proportion to go in the sample */ public Table sampleX ( double proportion ) { } }
Preconditions . checkArgument ( proportion <= 1 && proportion >= 0 , "The sample proportion must be between 0 and 1" ) ; int tableSize = ( int ) Math . round ( rowCount ( ) * proportion ) ; return where ( selectNRowsAtRandom ( tableSize , rowCount ( ) ) ) ;
public class InjectionPointFactory { /** * Notifies CDI extension of a given { @ link InjectionPoint } . */ protected < T , X > FieldInjectionPointAttributes < T , X > processInjectionPoint ( FieldInjectionPointAttributes < T , X > injectionPointAttributes , Class < ? > declaringComponentClass , BeanManagerImpl manager ) { } }
return manager . getContainerLifecycleEvents ( ) . fireProcessInjectionPoint ( injectionPointAttributes , declaringComponentClass , manager ) ;
public class JdbcFlowTriggerInstanceLoaderImpl { /** * Retrieve recently finished trigger instances , but flow trigger properties are not populated * into the returned trigger instances for efficiency . Flow trigger properties will be * retrieved only on request time . */ @ Override public Collection < TriggerInstance > getRecentlyFinished ( final int limit ) { } }
final String query = String . format ( SELECT_RECENTLY_FINISHED , limit ) ; try { return this . dbOperator . query ( query , new TriggerInstanceHandler ( SORT_MODE . SORT_ON_START_TIME_ASC ) ) ; } catch ( final SQLException ex ) { handleSQLException ( ex ) ; } return Collections . emptyList ( ) ;
public class DataStore { /** * Add a data row to the batch at a particular timestamp . * This method will throw a ` MismatchedLengthException ` if the length of ` columns ` and ` values ` * are not the same . * See { @ link # add ( long , Map ) } for more information . * @ param timestamp Timestamp for all data points * @ param columns The list of columns for a field to value mapping * @ param values The list of values for a field to value mapping */ public void add ( long timestamp , String [ ] columns , Object [ ] values ) { } }
add ( timestamp , Arrays . asList ( columns ) , Arrays . asList ( values ) ) ;
public class JNRPE { /** * Starts a new thread that listen for requests . The method is < b > not * blocking < / b > * @ param address * The address to bind to * @ param port * The listening port * @ param useSSL * < code > true < / code > if an SSL socket must be created . * @ throws UnknownHostException */ public void listen ( final String address , final int port , final boolean useSSL ) throws UnknownHostException { } }
// Bind and start to accept incoming connections . ChannelFuture cf = getServerBootstrap ( useSSL ) . bind ( address , port ) ; cf . addListener ( new ChannelFutureListener ( ) { public void operationComplete ( final ChannelFuture future ) throws Exception { if ( future . isSuccess ( ) ) { context . getEventBus ( ) . post ( new JNRPEStatusEvent ( STATUS . STARTED , this , "JNRPE Server started" ) ) ; LOG . info ( context , "Listening on " + ( useSSL ? "SSL/" : "" ) + address + ":" + port ) ; } else { getExecutionContext ( ) . getEventBus ( ) . post ( new JNRPEStatusEvent ( STATUS . FAILED , this , "JNRPE Server start failed" ) ) ; LOG . error ( context , "Unable to listen on " + ( useSSL ? "SSL/" : "" ) + address + ":" + port , future . cause ( ) ) ; } } } ) ;
public class NatsBench { /** * Runs the benchmark . * @ throws Exception if an exception occurs */ public void start ( ) throws Exception { } }
installShutdownHook ( ) ; System . out . println ( ) ; System . out . printf ( "Starting benchmark(s) [msgs=%d, msgsize=%d, pubs=%d, subs=%d]\n" , numMsgs , size , numPubs , numSubs ) ; System . out . printf ( "Current memory usage is %s / %s / %s free/total/max\n" , Utils . humanBytes ( Runtime . getRuntime ( ) . freeMemory ( ) ) , Utils . humanBytes ( Runtime . getRuntime ( ) . totalMemory ( ) ) , Utils . humanBytes ( Runtime . getRuntime ( ) . maxMemory ( ) ) ) ; System . out . println ( "Use ctrl-C to cancel." ) ; System . out . println ( ) ; if ( this . numPubs > 0 ) { runTest ( "Pub Only" , this . numPubs , 0 ) ; runTest ( "Pub/Sub" , this . numPubs , this . numSubs ) ; } else { runTest ( "Sub" , this . numPubs , this . numSubs ) ; } System . out . println ( ) ; System . out . printf ( "Final memory usage is %s / %s / %s free/total/max\n" , Utils . humanBytes ( Runtime . getRuntime ( ) . freeMemory ( ) ) , Utils . humanBytes ( Runtime . getRuntime ( ) . totalMemory ( ) ) , Utils . humanBytes ( Runtime . getRuntime ( ) . maxMemory ( ) ) ) ; Runtime . getRuntime ( ) . removeShutdownHook ( shutdownHook ) ;
public class ImagePipeline { /** * Returns a DataSource supplier that will on get submit the request for execution and return a * DataSource representing the pending results of the task . * @ param imageRequest the request to submit ( what to execute ) . * @ param callerContext the caller context of the caller of data source supplier * @ param requestLevel which level to look down until for the image * @ return a DataSource representing pending results and completion of the request */ public Supplier < DataSource < CloseableReference < CloseableImage > > > getDataSourceSupplier ( final ImageRequest imageRequest , final Object callerContext , final ImageRequest . RequestLevel requestLevel ) { } }
return new Supplier < DataSource < CloseableReference < CloseableImage > > > ( ) { @ Override public DataSource < CloseableReference < CloseableImage > > get ( ) { return fetchDecodedImage ( imageRequest , callerContext , requestLevel ) ; } @ Override public String toString ( ) { return Objects . toStringHelper ( this ) . add ( "uri" , imageRequest . getSourceUri ( ) ) . toString ( ) ; } } ;
public class DataModelConverters { /** * Converts { @ link IDataSet } to the new { @ link IDataModel } . */ static IDataModel toDataModel ( final IDataSet dataSet ) { } }
DataModel dataModel = new DataModel ( dataSet . getName ( ) ) ; for ( IDsRow dsRow : dataSet ) { DmRow dmRow = new DmRow ( dsRow . index ( ) ) ; for ( IDsCell dsCell : dsRow ) { DmCell dmCell = new DmCell ( ) ; dmCell . setValue ( Optional . of ( dsCell . getValue ( ) ) ) ; dmCell . setAddress ( new CellAddress ( dataModel . getDataModelId ( ) , A1Address . fromRowColumn ( dsRow . index ( ) , dsCell . index ( ) ) ) ) ; dmCell . setContent ( dsCell . getValue ( ) ) ; dmRow . setCell ( dsCell . index ( ) , dmCell ) ; } dataModel . setRow ( dsRow . index ( ) , dmRow ) ; } return dataModel ;
public class JDBCInputFormat { /** * Connects to the source database and executes the query . * @ param ignored * @ throws IOException */ @ Override public void open ( InputSplit ignored ) throws IOException { } }
try { establishConnection ( ) ; statement = dbConn . createStatement ( ResultSet . TYPE_SCROLL_INSENSITIVE , ResultSet . CONCUR_READ_ONLY ) ; resultSet = statement . executeQuery ( query ) ; } catch ( SQLException se ) { close ( ) ; throw new IllegalArgumentException ( "open() failed." + se . getMessage ( ) , se ) ; } catch ( ClassNotFoundException cnfe ) { throw new IllegalArgumentException ( "JDBC-Class not found. - " + cnfe . getMessage ( ) , cnfe ) ; }
public class CmsXmlContentPropertyHelper { /** * Converts a property value given as a string between server format and client format . < p > * @ param cms the current CMS context * @ param propValue the property value to convert * @ param type the type of the property * @ param toClient if true , convert to client format , else convert to server format * @ return the converted property value */ protected static String convertStringPropertyValue ( CmsObject cms , String propValue , String type , boolean toClient ) { } }
if ( propValue == null ) { return null ; } if ( toClient ) { return CmsXmlContentPropertyHelper . getPropValuePaths ( cms , type , propValue ) ; } else { return CmsXmlContentPropertyHelper . getPropValueIds ( cms , type , propValue ) ; }
public class DeviceDataDAODefaultImpl { @ Deprecated public void insert_u ( final DeviceData deviceData , final short [ ] argin ) { } }
DevVarUShortArrayHelper . insert ( deviceData . getAny ( ) , argin ) ;
public class SecureASTCustomizer { /** * An alternative way of setting { @ link # setReceiversWhiteList ( java . util . List ) receiver classes } . * @ param receiversWhitelist a list of classes . */ public void setReceiversClassesWhiteList ( final List < Class > receiversWhitelist ) { } }
List < String > values = new LinkedList < String > ( ) ; for ( Class aClass : receiversWhitelist ) { values . add ( aClass . getName ( ) ) ; } setReceiversWhiteList ( values ) ;
public class CommonOps_DDF4 { /** * Returns the value of the element in the matrix that has the minimum value . < br > * < br > * Min { a < sub > ij < / sub > } for all i and j < br > * @ param a A matrix . Not modified . * @ return The value of element in the matrix with the minimum value . */ public static double elementMin ( DMatrix4x4 a ) { } }
double min = a . a11 ; if ( a . a12 < min ) min = a . a12 ; if ( a . a13 < min ) min = a . a13 ; if ( a . a14 < min ) min = a . a14 ; if ( a . a21 < min ) min = a . a21 ; if ( a . a22 < min ) min = a . a22 ; if ( a . a23 < min ) min = a . a23 ; if ( a . a24 < min ) min = a . a24 ; if ( a . a31 < min ) min = a . a31 ; if ( a . a32 < min ) min = a . a32 ; if ( a . a33 < min ) min = a . a33 ; if ( a . a34 < min ) min = a . a34 ; if ( a . a41 < min ) min = a . a41 ; if ( a . a42 < min ) min = a . a42 ; if ( a . a43 < min ) min = a . a43 ; if ( a . a44 < min ) min = a . a44 ; return min ;