signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CRestBuilder { /** * < p > Binds a deserializer to a list of interface method ' s return types . < / p > * < p > By default , < b > CRest < / b > handle the following types : < / p > * < ul > * < li > all primitives and wrapper types < / li > * < li > java . io . InputStream < / li > * < li > java . io . Reader < / li > * < / ul > * < p > Meaning that any interface method return type can be by default one of these types . < / p > * @ param deserializer Deserializer class to use for the given interface method ' s return types * @ param classes Interface method ' s return types to bind deserializer to * @ param config State that will be passed to the deserializer along with the CRestConfig object if the deserializer has declared a single argument constructor with CRestConfig parameter type * @ return current builder * @ see org . codegist . crest . CRestConfig */ public CRestBuilder bindDeserializer ( Class < ? extends Deserializer > deserializer , Class < ? > [ ] classes , Map < String , Object > config ) { } }
this . classDeserializerBuilder . register ( deserializer , classes , config ) ; return this ;
public class Table { /** * Starts a BigQuery Job to copy the current table to the provided destination table . Returns the * started { @ link Job } object . * < p > Example copying the table to a destination table . * < pre > { @ code * String dataset = " my _ dataset " ; * String tableName = " my _ destination _ table " ; * TableId destinationId = TableId . of ( dataset , tableName ) ; * JobOption options = JobOption . fields ( JobField . STATUS , JobField . USER _ EMAIL ) ; * Job job = table . copy ( destinationId , options ) ; * / / Wait for the job to complete . * try { * Job completedJob = job . waitFor ( RetryOption . initialRetryDelay ( Duration . ofSeconds ( 1 ) ) , * RetryOption . totalTimeout ( Duration . ofMinutes ( 3 ) ) ) ; * if ( completedJob ! = null & & completedJob . getStatus ( ) . getError ( ) = = null ) { * / / Job completed successfully . * } else { * / / Handle error case . * } catch ( InterruptedException e ) { * / / Handle interrupted wait * } < / pre > * @ param destinationTable the destination table of the copy job * @ param options job options * @ throws BigQueryException upon failure */ public Job copy ( TableId destinationTable , JobOption ... options ) throws BigQueryException { } }
CopyJobConfiguration configuration = CopyJobConfiguration . of ( destinationTable , getTableId ( ) ) ; return bigquery . create ( JobInfo . of ( configuration ) , options ) ;
public class DatabasesInner { /** * Exports a database to a bacpac . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database to be exported . * @ param parameters The required parameters for exporting a database . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ImportExportResponseInner > beginExportAsync ( String resourceGroupName , String serverName , String databaseName , ExportRequest parameters , final ServiceCallback < ImportExportResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginExportWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) , serviceCallback ) ;
public class AnnotationInfoImpl { /** * the enumeration class name and the enumeration literal value . */ public AnnotationValueImpl addAnnotationValue ( String name , String enumClassName , String enumName ) { } }
AnnotationValueImpl annotationValue = new AnnotationValueImpl ( enumClassName , enumName ) ; addAnnotationValue ( name , annotationValue ) ; return annotationValue ;
public class IoUtil { /** * 从流中读取内容 , 读到输出流中 * @ param in 输入流 * @ return 输出流 * @ throws IORuntimeException IO异常 */ public static FastByteArrayOutputStream read ( InputStream in ) throws IORuntimeException { } }
final FastByteArrayOutputStream out = new FastByteArrayOutputStream ( ) ; copy ( in , out ) ; return out ;
public class HeartbeatImpl { /** * Sends a server status message to the hub servers . The server message * contains the status for the current server . */ private void sendSpokeHeartbeats ( ) { } }
for ( ServerTarget target : _targets ) { ServerHeartbeat server = target . getServer ( ) ; if ( server . getRack ( ) != _rack ) { continue ; } if ( isHub ( server ) ) { target . sendServerHeartbeat ( getServerSelf ( ) . getUpdate ( ) ) ; } }
public class JvmTypeAnnotationValueImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_TYPE_ANNOTATION_VALUE__VALUES : getValues ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class CPOptionValuePersistenceImpl { /** * Returns the cp option value where CPOptionId = & # 63 ; and key = & # 63 ; or throws a { @ link NoSuchCPOptionValueException } if it could not be found . * @ param CPOptionId the cp option ID * @ param key the key * @ return the matching cp option value * @ throws NoSuchCPOptionValueException if a matching cp option value could not be found */ @ Override public CPOptionValue findByC_K ( long CPOptionId , String key ) throws NoSuchCPOptionValueException { } }
CPOptionValue cpOptionValue = fetchByC_K ( CPOptionId , key ) ; if ( cpOptionValue == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPOptionId=" ) ; msg . append ( CPOptionId ) ; msg . append ( ", key=" ) ; msg . append ( key ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPOptionValueException ( msg . toString ( ) ) ; } return cpOptionValue ;
public class RandomUtil { /** * 随机获得列表中的一定量的不重复元素 , 返回Set * @ param < T > 元素类型 * @ param collection 列表 * @ param count 随机取出的个数 * @ return 随机元素 * @ throws IllegalArgumentException 需要的长度大于给定集合非重复总数 */ public static < T > Set < T > randomEleSet ( Collection < T > collection , int count ) { } }
ArrayList < T > source = new ArrayList < > ( new HashSet < > ( collection ) ) ; if ( count > source . size ( ) ) { throw new IllegalArgumentException ( "Count is larger than collection distinct size !" ) ; } final HashSet < T > result = new HashSet < T > ( count ) ; int limit = collection . size ( ) ; while ( result . size ( ) < count ) { result . add ( randomEle ( source , limit ) ) ; } return result ;
public class Selector { /** * Unselect all elements . */ private void unSelectAll ( ) { } }
final int n = selected . size ( ) ; for ( int i = 0 ; i < n ; i ++ ) { selected . get ( i ) . onSelection ( false ) ; } selected . clear ( ) ;
public class ns_device_profile { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
ns_device_profile_responses result = ( ns_device_profile_responses ) service . get_payload_formatter ( ) . string_to_resource ( ns_device_profile_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ns_device_profile_response_array ) ; } ns_device_profile [ ] result_ns_device_profile = new ns_device_profile [ result . ns_device_profile_response_array . length ] ; for ( int i = 0 ; i < result . ns_device_profile_response_array . length ; i ++ ) { result_ns_device_profile [ i ] = result . ns_device_profile_response_array [ i ] . ns_device_profile [ 0 ] ; } return result_ns_device_profile ;
public class Mapper { /** * Creates a new mapper instance . * Shares common data ( esp . scanner and parser table with this instance . */ public Mapper newInstance ( ) { } }
Mapper mapper ; mapper = new Mapper ( name , parser . newInstance ( ) , oag . newInstance ( ) ) ; mapper . setLogging ( logParsing , logAttribution ) ; return mapper ;
public class CodecCollector { /** * Creates the kwic . * @ param kwicList * the kwic list * @ param spansMatchData * the spans match data * @ param docList * the doc list * @ param field * the field * @ param docBase * the doc base * @ param uniqueKeyField * the unique key field * @ param mtasCodecInfo * the mtas codec info * @ param searcher * the searcher * @ throws IOException * Signals that an I / O exception has occurred . */ private static void createKwic ( List < ComponentKwic > kwicList , Map < MtasSpanQuery , Map < Integer , List < Match > > > spansMatchData , List < Integer > docList , String field , int docBase , String uniqueKeyField , CodecInfo mtasCodecInfo , IndexSearcher searcher ) throws IOException { } }
if ( kwicList != null ) { for ( ComponentKwic kwic : kwicList ) { Map < Integer , List < Match > > matchData = spansMatchData . get ( kwic . query ) ; List < Match > matchList ; if ( kwic . output . equals ( ComponentKwic . KWIC_OUTPUT_HIT ) ) { for ( int docId : docList ) { if ( matchData != null && ( matchList = matchData . get ( docId ) ) != null ) { // get unique id Document doc = searcher . doc ( docId , new HashSet < String > ( Arrays . asList ( uniqueKeyField ) ) ) ; IndexableField indxfld = doc . getField ( uniqueKeyField ) ; // get other doc info if ( indxfld != null ) { kwic . uniqueKey . put ( docId , indxfld . stringValue ( ) ) ; } kwic . subTotal . put ( docId , matchList . size ( ) ) ; IndexDoc mDoc = mtasCodecInfo . getDoc ( field , ( docId - docBase ) ) ; if ( mDoc != null ) { kwic . minPosition . put ( docId , mDoc . minPosition ) ; kwic . maxPosition . put ( docId , mDoc . maxPosition ) ; } // kwiclist List < KwicHit > kwicItemList = new ArrayList < > ( ) ; int number = 0 ; for ( Match m : matchList ) { if ( kwic . number != null && number >= ( kwic . start + kwic . number ) ) { break ; } if ( number >= kwic . start ) { int startPosition = m . startPosition ; int endPosition = m . endPosition - 1 ; List < MtasTreeHit < String > > terms = mtasCodecInfo . getPositionedTermsByPrefixesAndPositionRange ( field , ( docId - docBase ) , kwic . prefixes , Math . max ( mDoc . minPosition , startPosition - kwic . left ) , Math . min ( mDoc . maxPosition , endPosition + kwic . right ) ) ; // construct hit Map < Integer , List < String > > kwicListHits = new HashMap < > ( ) ; for ( int position = Math . max ( mDoc . minPosition , startPosition - kwic . left ) ; position <= Math . min ( mDoc . maxPosition , endPosition + kwic . right ) ; position ++ ) { kwicListHits . put ( position , new ArrayList < String > ( ) ) ; } List < String > termList ; for ( MtasTreeHit < String > term : terms ) { for ( int position = Math . max ( ( startPosition - kwic . left ) , term . startPosition ) ; position <= Math . min ( ( endPosition + kwic . right ) , term . endPosition ) ; position ++ ) { termList = kwicListHits . get ( position ) ; termList . add ( term . data ) ; } } kwicItemList . add ( new KwicHit ( m , kwicListHits ) ) ; } number ++ ; } kwic . hits . put ( docId , kwicItemList ) ; } } } else if ( kwic . output . equals ( ComponentKwic . KWIC_OUTPUT_TOKEN ) ) { for ( int docId : docList ) { if ( matchData != null && ( matchList = matchData . get ( docId ) ) != null ) { // get unique id Document doc = searcher . doc ( docId , new HashSet < String > ( Arrays . asList ( uniqueKeyField ) ) ) ; // get other doc info IndexableField indxfld = doc . getField ( uniqueKeyField ) ; if ( indxfld != null ) { kwic . uniqueKey . put ( docId , indxfld . stringValue ( ) ) ; } kwic . subTotal . put ( docId , matchList . size ( ) ) ; IndexDoc mDoc = mtasCodecInfo . getDoc ( field , ( docId - docBase ) ) ; if ( mDoc != null ) { kwic . minPosition . put ( docId , mDoc . minPosition ) ; kwic . maxPosition . put ( docId , mDoc . maxPosition ) ; List < KwicToken > kwicItemList = new ArrayList < > ( ) ; int number = 0 ; for ( Match m : matchList ) { if ( kwic . number != null && number >= ( kwic . start + kwic . number ) ) { break ; } if ( number >= kwic . start ) { int startPosition = m . startPosition ; int endPosition = m . endPosition - 1 ; List < MtasTokenString > tokens ; tokens = mtasCodecInfo . getPrefixFilteredObjectsByPositions ( field , ( docId - docBase ) , kwic . prefixes , Math . max ( mDoc . minPosition , startPosition - kwic . left ) , Math . min ( mDoc . maxPosition , endPosition + kwic . right ) ) ; kwicItemList . add ( new KwicToken ( m , tokens ) ) ; } number ++ ; } kwic . tokens . put ( docId , kwicItemList ) ; } } } } } }
public class CmsGalleryController { /** * Remove the gallery from the search object . < p > * @ param galleryPath the gallery path as id */ public void removeGallery ( String galleryPath ) { } }
m_searchObject . removeGallery ( galleryPath ) ; m_searchObjectChanged = true ; m_galleriesChanged = true ; ValueChangeEvent . fire ( this , m_searchObject ) ;
public class PassThruTable { /** * Remove this table from this table list . * @ param table The table to remove . */ public boolean removeTable ( BaseTable table ) { } }
if ( m_mapTable != null ) { for ( Object objKey : m_mapTable . keySet ( ) ) { if ( table == m_mapTable . get ( objKey ) ) { return ( m_mapTable . remove ( objKey ) != null ) ; } } } else { if ( this . getNextTable ( ) != null ) return this . getNextTable ( ) . removeTable ( table ) ; } return super . removeTable ( table ) ;
public class AdminAccesstokenAction { private HtmlResponse asListHtml ( ) { } }
return asHtml ( path_AdminAccesstoken_AdminAccesstokenJsp ) . renderWith ( data -> { RenderDataUtil . register ( data , "accessTokenItems" , accessTokenService . getAccessTokenList ( accessTokenPager ) ) ; } ) . useForm ( SearchForm . class , setup -> { setup . setup ( form -> { copyBeanToBean ( accessTokenPager , form , op -> op . include ( "id" ) ) ; } ) ; } ) ;
public class PBS { /** * Executes a PBS command . * @ param cmdLine command * @ param environment env vars * @ param out output stream * @ param err err stream * @ return execute handler * @ throws ExecuteException if there is an error executing a command * @ throws IOException in case of an IO problem */ static DefaultExecuteResultHandler execute ( CommandLine cmdLine , Map < String , String > environment , OutputStream out , OutputStream err ) throws ExecuteException , IOException { } }
DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler ( ) ; ExecuteStreamHandler streamHandler = new PumpStreamHandler ( out , err ) ; DefaultExecutor executor = new DefaultExecutor ( ) ; executor . setExitValue ( 0 ) ; executor . setStreamHandler ( streamHandler ) ; if ( environment != null ) { executor . execute ( cmdLine , environment , resultHandler ) ; } else { executor . execute ( cmdLine , resultHandler ) ; } return resultHandler ;
public class BaseMessageFilter { /** * Add this message listener to the listener list . * Also adds the filter to my filter list . * @ param listener The message listener to set this filter to . */ public void addMessageListener ( JMessageListener listener ) { } }
if ( listener == null ) return ; // Never this . removeDuplicateFilters ( listener ) ; // Remove any duplicate record filters if ( m_vListenerList == null ) m_vListenerList = new Vector < JMessageListener > ( ) ; for ( int i = 0 ; i < m_vListenerList . size ( ) ; i ++ ) { if ( m_vListenerList . get ( i ) == listener ) { Util . getLogger ( ) . warning ( "--------Error-Added message listener twice--------" ) ; return ; // I ' m sure you didn ' t mean to do that . } } m_vListenerList . add ( listener ) ; listener . addListenerMessageFilter ( this ) ;
public class CachingURLStreamHandlerFactory { /** * returns the URL class ' s stream handler for a protocol . this uses inspection . * @ param protocol * the protocol * @ return the URLStreamHandler , null if it cannot be retrieved . */ private URLStreamHandler getURLStreamHandler ( final String protocol ) { } }
try { final Method method = URL . class . getDeclaredMethod ( "getURLStreamHandler" , String . class ) ; method . setAccessible ( true ) ; return ( URLStreamHandler ) method . invoke ( null , protocol ) ; } catch ( final Exception e ) { if ( logger . isWarnEnabled ( ) ) { logger . warn ( "could not access URL.getUrlStreamHandler" ) ; } return null ; }
public class SwingGroovyMethods { /** * Returns an { @ link java . util . Iterator } which traverses the DefaultTableModel one item at a time . * @ param self a DefaultTableModel * @ return an Iterator for a DefaultTableModel * @ since 1.6.4 */ public static Iterator iterator ( final DefaultTableModel self ) { } }
return new Iterator ( ) { private int row = 0 ; public boolean hasNext ( ) { return row > - 1 && row < self . getRowCount ( ) ; } public Object next ( ) { int cols = self . getColumnCount ( ) ; Object [ ] rowData = new Object [ cols ] ; for ( int col = 0 ; col < cols ; col ++ ) { rowData [ col ] = self . getValueAt ( row , col ) ; } row ++ ; return rowData ; } public void remove ( ) { if ( hasNext ( ) ) self . removeRow ( row -- ) ; } } ;
public class SpeechToText { /** * List grammars . * Lists information about all grammars from a custom language model . The information includes the total number of * out - of - vocabulary ( OOV ) words , name , and status of each grammar . You must use credentials for the instance of the * service that owns a model to list its grammars . * * * See also : * * [ Listing grammars from a custom language model ] ( https : / / cloud . ibm . com / docs / services / speech - to - text / ) . * @ param listGrammarsOptions the { @ link ListGrammarsOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Grammars } */ public ServiceCall < Grammars > listGrammars ( ListGrammarsOptions listGrammarsOptions ) { } }
Validator . notNull ( listGrammarsOptions , "listGrammarsOptions cannot be null" ) ; String [ ] pathSegments = { "v1/customizations" , "grammars" } ; String [ ] pathParameters = { listGrammarsOptions . customizationId ( ) } ; RequestBuilder builder = RequestBuilder . get ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "speech_to_text" , "v1" , "listGrammars" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Grammars . class ) ) ;
public class SQLExceptionMapper { /** * Maps an exception to a response or returns null if it wasn ' t handled * @ param sqlEx * @ return */ public static Response map ( SQLException sqlEx ) { } }
if ( null != sqlEx . getSQLState ( ) ) { switch ( sqlEx . getSQLState ( ) ) { // query _ canceled case "57014" : return Response . status ( 504 ) . entity ( sqlEx . getMessage ( ) ) . build ( ) ; case "2201B" : // regular expression did not compile AqlParseError error = new AqlParseError ( sqlEx . getMessage ( ) ) ; return Response . status ( Response . Status . BAD_REQUEST ) . entity ( new GenericEntity < List < AqlParseError > > ( Arrays . asList ( error ) ) { } ) . type ( "application/xml" ) . build ( ) ; } } return null ;
public class DescribeRecoveryPointRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeRecoveryPointRequest describeRecoveryPointRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeRecoveryPointRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeRecoveryPointRequest . getBackupVaultName ( ) , BACKUPVAULTNAME_BINDING ) ; protocolMarshaller . marshall ( describeRecoveryPointRequest . getRecoveryPointArn ( ) , RECOVERYPOINTARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SaltClient { /** * Returns a WebSocket @ ClientEndpoint annotated object connected * to the / ws ServerEndpoint . * The stream object supports the { @ link WebSocketEventStream } interface which allows the caller * to register / unregister for stream event notifications as well as close the event * stream . * Note : { @ link SaltClient # login ( String , String , AuthModule ) } or must be called prior * to calling this method . * { @ code GET / events } * @ param listeners event listeners to be added before the stream is initialized * @ param idleTimeout idle timeout to pass to the http client config * @ param maxMsgSize maximum event data size to accept * @ param sessionIdleTimeout session idle timeout to pass to the http client config * @ param token salt session token to use for authentication * @ return the event stream * @ throws SaltException in case of an error during websocket stream initialization */ public WebSocketEventStream events ( Token token , long sessionIdleTimeout , long idleTimeout , int maxMsgSize , EventListener ... listeners ) throws SaltException { } }
return new WebSocketEventStream ( uri , token , sessionIdleTimeout , idleTimeout , maxMsgSize , listeners ) ;
public class AbstractSimpleBeanConverter { /** * Convert element to another object given a parameterized type signature * @ param context * @ param destinationType * the destination type * @ param source * the source object * @ return the converted object * @ throws ConverterException * if conversion failed */ @ SuppressWarnings ( "unchecked" ) public < T > T convertElement ( ConversionContext context , Object source , TypeReference < T > destinationType ) throws ConverterException { } }
return ( T ) elementConverter . convert ( context , source , destinationType ) ;
public class FileSystem { /** * Test if the given filename is a local filename and extract * the path component . * @ param filename the filename . * @ return the path . */ @ Pure @ SuppressWarnings ( "checkstyle:magicnumber" ) private static String extractLocalPath ( String filename ) { } }
if ( filename == null ) { return null ; } final int max = Math . min ( FILE_PREFIX . length , filename . length ( ) ) ; final int inner = max - 2 ; if ( inner <= 0 ) { return filename ; } boolean foundInner = false ; boolean foundFull = false ; for ( int i = 0 ; i < max ; ++ i ) { final char c = Character . toLowerCase ( filename . charAt ( i ) ) ; if ( FILE_PREFIX [ i ] != c ) { foundFull = false ; foundInner = i >= inner ; break ; } foundFull = true ; } String fn ; if ( foundFull ) { fn = filename . substring ( FILE_PREFIX . length ) ; } else if ( foundInner ) { fn = filename . substring ( inner ) ; } else { fn = filename ; } if ( Pattern . matches ( "^(" + Pattern . quote ( URL_PATH_SEPARATOR ) + "|" // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ + Pattern . quote ( WINDOWS_SEPARATOR_STRING ) + ")[a-zA-Z][:|].*$" , fn ) ) { // $ NON - NLS - 1 $ fn = fn . substring ( 1 ) ; } return fn ;
public class JMTimeUtil { /** * Change timestamp to new format string . * @ param originDateFormat the origin date format * @ param originTimestamp the origin timestamp * @ param newDateFormat the new date format * @ return the string */ public static String changeTimestampToNewFormat ( String originDateFormat , String originTimestamp , String newDateFormat ) { } }
return getTime ( changeTimestampToLong ( getSimpleDateFormat ( originDateFormat ) , originTimestamp ) , newDateFormat ) ;
public class filterhtmlinjectionvariable { /** * Use this API to fetch filterhtmlinjectionvariable resource of given name . */ public static filterhtmlinjectionvariable get ( nitro_service service , String variable ) throws Exception { } }
filterhtmlinjectionvariable obj = new filterhtmlinjectionvariable ( ) ; obj . set_variable ( variable ) ; filterhtmlinjectionvariable response = ( filterhtmlinjectionvariable ) obj . get_resource ( service ) ; return response ;
public class AdministeredObjectDefinitionProcessor { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . injectionengine . InjectionProcessor # processXML ( ) */ @ Override public void processXML ( ) throws InjectionException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "processXML : " + this ) ; List < ? extends AdministeredObject > administeredObjectDefinitions = ivNameSpaceConfig . getJNDIEnvironmentRefs ( AdministeredObject . class ) ; if ( administeredObjectDefinitions != null ) { for ( AdministeredObject administeredObject : administeredObjectDefinitions ) { String jndiName = administeredObject . getName ( ) ; InjectionBinding < AdministeredObjectDefinition > injectionBinding = ivAllAnnotationsCollection . get ( jndiName ) ; AdministeredObjectDefinitionInjectionBinding binding ; if ( injectionBinding != null ) { binding = ( AdministeredObjectDefinitionInjectionBinding ) injectionBinding ; } else { binding = new AdministeredObjectDefinitionInjectionBinding ( jndiName , ivNameSpaceConfig ) ; addInjectionBinding ( binding ) ; } binding . mergeXML ( administeredObject ) ; } } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "processXML : " + this ) ;
public class MurmurHash3v2 { /** * Returns a 128 - bit hash of the input . * @ param in a String * @ param seed A long valued seed . * @ param hashOut A long array of size 2 * @ return the hash */ public static long [ ] hash ( final String in , final long seed , final long [ ] hashOut ) { } }
if ( ( in == null ) || ( in . length ( ) == 0 ) ) { return emptyOrNull ( seed , hashOut ) ; } final byte [ ] byteArr = in . getBytes ( UTF_8 ) ; return hash ( Memory . wrap ( byteArr ) , 0L , byteArr . length , seed , hashOut ) ;
public class AmazonLightsailClient { /** * Deletes a database in Amazon Lightsail . * The < code > delete relational database < / code > operation supports tag - based access control via resource tags applied * to the resource identified by relationalDatabaseName . For more information , see the < a * href = " https : / / lightsail . aws . amazon . com / ls / docs / en / articles / amazon - lightsail - controlling - access - using - tags " * > Lightsail Dev Guide < / a > . * @ param deleteRelationalDatabaseRequest * @ return Result of the DeleteRelationalDatabase operation returned by the service . * @ throws ServiceException * A general service exception . * @ throws InvalidInputException * Lightsail throws this exception when user input does not conform to the validation rules of an input * field . < / p > < note > * Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region * configuration to us - east - 1 to create , view , or edit these resources . * @ throws NotFoundException * Lightsail throws this exception when it cannot find a resource . * @ throws OperationFailureException * Lightsail throws this exception when an operation fails to execute . * @ throws AccessDeniedException * Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to * access a resource . * @ throws AccountSetupInProgressException * Lightsail throws this exception when an account is still in the setup in progress state . * @ throws UnauthenticatedException * Lightsail throws this exception when the user has not been authenticated . * @ sample AmazonLightsail . DeleteRelationalDatabase * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / DeleteRelationalDatabase " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteRelationalDatabaseResult deleteRelationalDatabase ( DeleteRelationalDatabaseRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteRelationalDatabase ( request ) ;
public class ContextPreprocessorIt { /** * Finds multiwords in context . * @ param context data structure of input label * @ return context with multiwords * @ throws ContextPreprocessorException ContextPreprocessorException */ private IContext findMultiwordsInContextStructure ( IContext context ) throws ContextPreprocessorException { } }
for ( Iterator < INode > i = context . getNodes ( ) ; i . hasNext ( ) ; ) { INode sourceNode = i . next ( ) ; // sense disambiguation within the context structure // for all ACoLs in the source node for ( Iterator < IAtomicConceptOfLabel > j = sourceNode . getNodeData ( ) . getACoLs ( ) ; j . hasNext ( ) ; ) { IAtomicConceptOfLabel synSource = j . next ( ) ; // in all descendants and ancestors findMultiwordsAmong ( sourceNode . getDescendants ( ) , synSource ) ; findMultiwordsAmong ( sourceNode . getAncestors ( ) , synSource ) ; } } return context ;
public class BindDataSourceSubProcessor { /** * Analyze round . * @ param annotations * the annotations * @ param roundEnv * the round env * @ return true , if successful */ public boolean analyzeRound ( Set < ? extends TypeElement > annotations , RoundEnvironment roundEnv ) { } }
parseBindType ( roundEnv ) ; // Put all @ BindSqlType elements in beanElements for ( Element item : roundEnv . getElementsAnnotatedWith ( BindSqlType . class ) ) { if ( item . getKind ( ) != ElementKind . CLASS ) { String msg = String . format ( "%s %s, only class can be annotated with @%s annotation" , item . getKind ( ) , item , BindSqlType . class . getSimpleName ( ) ) ; throw ( new InvalidKindForAnnotationException ( msg ) ) ; } globalBeanElements . put ( item . toString ( ) , ( TypeElement ) item ) ; } // we have generated bean too . // Put all @ BindDao elements in daoElements for ( Element item : roundEnv . getElementsAnnotatedWith ( BindDao . class ) ) { // dao generated will used to replace original dao , so it can not be // inserted like others . if ( item . getAnnotation ( BindGeneratedDao . class ) != null ) continue ; if ( item . getKind ( ) != ElementKind . INTERFACE ) { String msg = String . format ( "%s %s can not be annotated with @%s annotation, because it is not an interface" , item . getKind ( ) , item , BindDao . class . getSimpleName ( ) ) ; throw ( new InvalidKindForAnnotationException ( msg ) ) ; } globalDaoElements . put ( item . toString ( ) , ( TypeElement ) item ) ; } for ( Element item : roundEnv . getElementsAnnotatedWith ( BindDaoMany2Many . class ) ) { if ( item . getKind ( ) != ElementKind . INTERFACE ) { String msg = String . format ( "%s %s can not be annotated with @%s annotation, because it is not an interface" , item . getKind ( ) , item , BindDaoMany2Many . class . getSimpleName ( ) ) ; throw ( new InvalidKindForAnnotationException ( msg ) ) ; } globalDaoElements . put ( item . toString ( ) , ( TypeElement ) item ) ; } // Get all database schema definitions for ( Element item : roundEnv . getElementsAnnotatedWith ( BindDataSource . class ) ) { dataSets . add ( ( TypeElement ) item ) ; } // exit without error if ( dataSets . size ( ) == 0 ) return false ; // No bind type is present if ( globalDaoElements . size ( ) == 0 ) { throw ( new NoDaoElementFound ( ) ) ; } return false ;
public class ns_config_diff { /** * < pre > * Use this operation to get config diff between source and target configuration files in the tabular format . * < / pre > */ public static ns_config_diff diff_table ( nitro_service client , ns_config_diff resource ) throws Exception { } }
return ( ( ns_config_diff [ ] ) resource . perform_operation ( client , "diff_table" ) ) [ 0 ] ;
public class SipSessionImpl { /** * This method is called immediately when the conditions for read to invalidate * session are met */ public void onReadyToInvalidate ( ) { } }
if ( isB2BUAOrphan ( ) ) { logger . debug ( "Session is B2BUA Orphaned, lets invalidate" ) ; setReadyToInvalidate ( true ) ; } if ( ! readyToInvalidate ) { logger . debug ( "Session not ready to invalidate, wait next chance." ) ; return ; } boolean allDerivedReady = true ; Iterator < MobicentsSipSession > derivedSessionsIterator = this . getDerivedSipSessions ( ) ; while ( derivedSessionsIterator . hasNext ( ) ) { MobicentsSipSession mobicentsSipSession = ( MobicentsSipSession ) derivedSessionsIterator . next ( ) ; boolean derivedIsOrphaned = mobicentsSipSession . isB2BUAOrphan ( ) ; boolean derivedReady = ! mobicentsSipSession . isValid ( ) || derivedIsOrphaned ; allDerivedReady = allDerivedReady & derivedReady ; } if ( logger . isDebugEnabled ( ) ) { String msg = String . format ( "Session [%s] onReadyToInvalidate, hasParent [%s], hasDerivedSessions [%s], will invalidate [%s]" , key , parentSession != null , derivedSipSessions != null , allDerivedReady ) ; logger . debug ( msg ) ; } if ( ! allDerivedReady ) { logger . debug ( "Cant invalidate yet, lets wait until all derived to be ready." ) ; return ; } else { logger . debug ( "All Derived ready, lets proceed." ) ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( "invalidateWhenReady flag is set to " + invalidateWhenReady ) ; } if ( isValid ( ) && this . invalidateWhenReady ) { this . notifySipSessionListeners ( SipSessionEventType . READYTOINVALIDATE ) ; // If the application does not explicitly invalidate the session in the callback or has not defined a listener , // the container will invalidate the session . if ( isValid ( ) ) { invalidate ( true ) ; } }
public class Parser { /** * Generate sql for escaped functions . * @ param newsql destination StringBuilder * @ param functionName the escaped function name * @ param sql input SQL text ( containing arguments of a function call with possible JDBC escapes ) * @ param i position in the input SQL * @ param stdStrings whether standard _ conforming _ strings is on * @ return the right PostgreSQL sql * @ throws SQLException if something goes wrong */ private static int escapeFunctionArguments ( StringBuilder newsql , String functionName , char [ ] sql , int i , boolean stdStrings ) throws SQLException { } }
// Maximum arity of functions in EscapedFunctions is 3 List < CharSequence > parsedArgs = new ArrayList < CharSequence > ( 3 ) ; while ( true ) { StringBuilder arg = new StringBuilder ( ) ; int lastPos = i ; i = parseSql ( sql , i , arg , true , stdStrings ) ; if ( i != lastPos ) { parsedArgs . add ( arg ) ; } if ( i >= sql . length // should not happen || sql [ i ] != ',' ) { break ; } i ++ ; } Method method = EscapedFunctions2 . getFunction ( functionName ) ; if ( method == null ) { newsql . append ( functionName ) ; EscapedFunctions2 . appendCall ( newsql , "(" , "," , ")" , parsedArgs ) ; return i ; } try { method . invoke ( null , newsql , parsedArgs ) ; } catch ( InvocationTargetException e ) { Throwable targetException = e . getTargetException ( ) ; if ( targetException instanceof SQLException ) { throw ( SQLException ) targetException ; } else { throw new PSQLException ( targetException . getMessage ( ) , PSQLState . SYSTEM_ERROR ) ; } } catch ( IllegalAccessException e ) { throw new PSQLException ( e . getMessage ( ) , PSQLState . SYSTEM_ERROR ) ; } return i ;
public class UriEscape { /** * Perform am URI fragment identifier < strong > escape < / strong > operation * on a < tt > String < / tt > input , writing results to a < tt > Writer < / tt > . * The following are the only allowed chars in an URI fragment identifier ( will not be escaped ) : * < ul > * < li > < tt > A - Z a - z 0-9 < / tt > < / li > * < li > < tt > - . _ ~ < / tt > < / li > * < li > < tt > ! $ & amp ; ' ( ) * + , ; = < / tt > < / li > * < li > < tt > : @ < / tt > < / li > * < li > < tt > / ? < / tt > < / li > * < / ul > * All other chars will be escaped by converting them to the sequence of bytes that * represents them in the specified < em > encoding < / em > and then representing each byte * in < tt > % HH < / tt > syntax , being < tt > HH < / tt > the hexadecimal representation of the byte . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param encoding the encoding to be used for escaping . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeUriFragmentId ( final String text , final Writer writer , final String encoding ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } UriEscapeUtil . escape ( new InternalStringReader ( text ) , writer , UriEscapeUtil . UriEscapeType . FRAGMENT_ID , encoding ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcActuatorTypeEnum ( ) { } }
if ( ifcActuatorTypeEnumEEnum == null ) { ifcActuatorTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 772 ) ; } return ifcActuatorTypeEnumEEnum ;
public class PCMMetadata { /** * Return the sorted features concordingly with metadata * @ return an ordered list of features */ public List < Feature > getSortedFeatures ( ) { } }
ArrayList < Feature > result = new ArrayList < > ( pcm . getConcreteFeatures ( ) ) ; Collections . sort ( result , new Comparator < Feature > ( ) { @ Override public int compare ( Feature f1 , Feature f2 ) { Integer fp1 = getFeaturePosition ( f1 ) ; Integer fp2 = getFeaturePosition ( f2 ) ; return fp1 . compareTo ( fp2 ) ; } } ) ; return result ;
public class CommonAhoCorasickSegmentUtil { /** * 逆向最长分词 , 合并未知语素 * @ param charArray 文本 * @ param trie 自动机 * @ param < V > 类型 * @ return 结果链表 */ public static < V > LinkedList < ResultTerm < V > > segmentReverseOrder ( final char [ ] charArray , AhoCorasickDoubleArrayTrie < V > trie ) { } }
LinkedList < ResultTerm < V > > termList = new LinkedList < ResultTerm < V > > ( ) ; final ResultTerm < V > [ ] wordNet = new ResultTerm [ charArray . length + 1 ] ; trie . parseText ( charArray , new AhoCorasickDoubleArrayTrie . IHit < V > ( ) { @ Override public void hit ( int begin , int end , V value ) { if ( wordNet [ end ] == null || wordNet [ end ] . word . length ( ) < end - begin ) { wordNet [ end ] = new ResultTerm < V > ( new String ( charArray , begin , end - begin ) , value , begin ) ; } } } ) ; for ( int i = charArray . length ; i > 0 ; ) { if ( wordNet [ i ] == null ) { StringBuilder sbTerm = new StringBuilder ( ) ; int offset = i - 1 ; byte preCharType = CharType . get ( charArray [ offset ] ) ; while ( i > 0 && wordNet [ i ] == null && CharType . get ( charArray [ i - 1 ] ) == preCharType ) { sbTerm . append ( charArray [ i - 1 ] ) ; preCharType = CharType . get ( charArray [ i - 1 ] ) ; -- i ; } termList . addFirst ( new ResultTerm < V > ( sbTerm . reverse ( ) . toString ( ) , null , offset ) ) ; } else { termList . addFirst ( wordNet [ i ] ) ; i -= wordNet [ i ] . word . length ( ) ; } } return termList ;
public class XML { /** * Adds an annotated Class to xmlJmapper structure . * @ param aClass Class to convert to XmlClass * @ return this instance of XML */ private XML addClass ( Class < ? > aClass ) { } }
xmlJmapper . classes . add ( Converter . toXmlClass ( aClass ) ) ; return this ;
public class UserProfileExample { /** * Print the user profiles and actions for all users with the provided last * name * This method demonstrates how to open a scanner with a start key . It ' s using * the composite dao , so the records it returns will be a composite of both * the profile model and actions model . * @ param lastName * The last name of users to scan . */ public void printUserProfileActionsForLastName ( String lastName ) { } }
// Create a partial key that will allow us to start the scanner from the // first user record that has last name equal to the one provided . PartitionKey startKey = new PartitionKey ( "lastName" ) ; // Get the scanner with the start key . Null for stopKey in the getScanner // method indicates that the scanner will scan to the end of the table . Our // loop will break out when it encounters a record without the last name . EntityScanner < UserProfileActionsModel > scanner = userProfileActionsDao . getScanner ( startKey , null ) ; scanner . initialize ( ) ; try { // scan until we find a last name not equal to the one provided for ( UserProfileActionsModel entity : scanner ) { if ( ! entity . getUserProfileModel ( ) . getLastName ( ) . equals ( lastName ) ) { // last name of row different , break out of the scan . break ; } System . out . println ( entity . toString ( ) ) ; } } finally { // scanners need to be closed . scanner . close ( ) ; }
public class Between { /** * Returns the maximum condition . */ public BoundedAssertion getMax ( ) { } }
ICondition max = null ; Iterator < ICondition > conditions = getConditions ( ) ; for ( int i = 0 ; i < 2 ; i ++ ) max = conditions . next ( ) ; return ( BoundedAssertion ) max ;
public class AdaptedIterator { /** * / * ( non - Javadoc ) * @ see java . util . Iterator # next ( ) */ public T next ( ) { } }
if ( cachedNext == null ) { throw new NoSuchElementException ( "call hasNext first!" ) ; } T o = cachedNext ; cachedNext = null ; return o ;
public class ConsumerSessionProxy { /** * This method will turn this consumer from a synchronous consumer to asynchronous * consumer , registering the callback as the means by which to consume messages . * Calling this method successively causes the existing callback to be discarded * and replaced with the new one . Supplying null as the callback will have exactly * the same effect as to deregistering the consumer ( and will cause the state to * back to synchronous ) . * @ param callback * @ param maxActiveMessages * @ param messageLockExpiry * @ param maxBatchSize * @ param orderingContext * @ throws com . ibm . wsspi . sib . core . exception . SISessionUnavailableException * @ throws com . ibm . wsspi . sib . core . exception . SISessionDroppedException * @ throws com . ibm . wsspi . sib . core . exception . SIConnectionUnavailableException * @ throws com . ibm . wsspi . sib . core . exception . SIConnectionDroppedException * @ throws com . ibm . websphere . sib . exception . SIErrorException * @ throws com . ibm . websphere . sib . exception . SIIncorrectCallException */ public void registerAsynchConsumerCallback ( AsynchConsumerCallback callback , int maxActiveMessages , long messageLockExpiry , int maxBatchSize , OrderingContext orderingContext ) throws SISessionUnavailableException , SISessionDroppedException , SIConnectionUnavailableException , SIConnectionDroppedException , SIErrorException , SIIncorrectCallException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "registerAsynchConsumerCallback" , new Object [ ] { callback , maxActiveMessages , messageLockExpiry , maxBatchSize , orderingContext } ) ; if ( executingOnCallbackThread ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "called from within asynch consumer callback, callbackThreadState=" + callbackThreadState ) ; if ( callbackThreadState == CallbackThreadState . CLOSED || state == StateEnum . CLOSED || state == StateEnum . CLOSING ) { throw new SISessionUnavailableException ( nls . getFormattedMessage ( "SESSION_CLOSED_SICO1013" , null , null ) ) ; } else if ( callbackThreadState == CallbackThreadState . STARTED_DEREGISTERED || callbackThreadState == CallbackThreadState . STARTED_REGISTERED ) { throw new SIIncorrectCallException ( nls . getFormattedMessage ( "CALLBACK_CHANGE_WHILE_STARTED_SICO1015" , null , null ) ) ; } else if ( callback == null ) { if ( callbackThreadState == CallbackThreadState . STOPPED_REGISTERED ) { callbackThreadState = CallbackThreadState . STOPPED_DEREGISTERED ; } } else { callbackThreadCallback = callback ; callbackThreadMaxActiveMessages = maxActiveMessages ; callbackThreadMessageLockExpiry = messageLockExpiry ; callbackThreadMaxBatchSize = maxBatchSize ; callbackThreadOrderingContext = orderingContext ; callbackStoppable = false ; if ( callbackThreadState == CallbackThreadState . STOPPED_DEREGISTERED ) { callbackThreadState = CallbackThreadState . STOPPED_REGISTERED ; } } } else { synchronized ( callbackLock ) { synchronized ( synchLock ) { _registerAsynchConsumerCallback ( callback , maxActiveMessages , messageLockExpiry , maxBatchSize , orderingContext , 0 , // SIB0115d . comms 0 , false ) ; // 472879 if ( proxyQueue != null ) proxyQueue . setAsynchCallback ( callback , maxActiveMessages , messageLockExpiry , maxBatchSize , orderingContext , 0 , // SIB0115d . comms 0 , false ) ; // 472879 } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "registerAsynchConsumerCallback" ) ;
public class TypeMappings { /** * Get the input user display name mapping for the UserRegistry . * @ param inputVirtualRealm Virtual realm to find the mappings . * @ return The input user display name property . * @ pre inputVirtualRealm ! = null * @ pre inputVirtualRealm ! = " " * @ post $ return ! = " " * @ post $ return ! = null */ public String getInputUserDisplayName ( String inputVirtualRealm ) { } }
// initialize the return value String returnValue = getInputMapping ( inputVirtualRealm , Service . CONFIG_DO_USER_DISPLAY_NAME_MAPPING , USER_DISPLAY_NAME_DEFAULT ) ; return returnValue ;
public class Message { /** * Returns a copy of the array returned by { @ link Message # unsafeBitcoinSerialize ( ) } , which is safe to mutate . * If you need extra performance and can guarantee you won ' t write to the array , you can use the unsafe version . * @ return a freshly allocated serialized byte array */ public byte [ ] bitcoinSerialize ( ) { } }
byte [ ] bytes = unsafeBitcoinSerialize ( ) ; byte [ ] copy = new byte [ bytes . length ] ; System . arraycopy ( bytes , 0 , copy , 0 , bytes . length ) ; return copy ;
public class ConfigFactory { /** * Parses a file with a flexible extension . If the < code > fileBasename < / code > * already ends in a known extension , this method parses it according to * that extension ( the file ' s syntax must match its extension ) . If the * < code > fileBasename < / code > does not end in an extension , it parses files * with all known extensions and merges whatever is found . * In the current implementation , the extension " . conf " forces * { @ link ConfigSyntax # CONF } , " . json " forces { @ link ConfigSyntax # JSON } , and * " . properties " forces { @ link ConfigSyntax # PROPERTIES } . When merging files , * " . conf " falls back to " . json " falls back to " . properties " . * Future versions of the implementation may add additional syntaxes or * additional extensions . However , the ordering ( fallback priority ) of the * three current extensions will remain the same . * If < code > options < / code > forces a specific syntax , this method only parses * files with an extension matching that syntax . * If { @ link ConfigParseOptions # getAllowMissing options . getAllowMissing ( ) } * is true , then no files have to exist ; if false , then at least one file * has to exist . * @ param fileBasename * a filename with or without extension * @ param options * parse options * @ return the parsed configuration */ public static Config parseFileAnySyntax ( File fileBasename , ConfigParseOptions options ) { } }
return ConfigImpl . parseFileAnySyntax ( fileBasename , options ) . toConfig ( ) ;
public class LoggingConfiguration { /** * Replace the default log handler with the given log handler . * This will remove all { @ link CLISmartHandler } found on the root logger . It * will leave any other handlers in place . * @ param handler Logging handler . */ public static void replaceDefaultHandler ( Handler handler ) { } }
Logger rootlogger = LogManager . getLogManager ( ) . getLogger ( "" ) ; for ( Handler h : rootlogger . getHandlers ( ) ) { if ( h instanceof CLISmartHandler ) { rootlogger . removeHandler ( h ) ; } } addHandler ( handler ) ;
public class CeylonUtil { /** * Extracts a single file from a zip archive . * @ param in Input zip stream * @ param outdir Output directory * @ param name File name * @ throws IOException In case of IO error */ public static void extractFile ( final ZipInputStream in , final File outdir , final String name ) throws IOException { } }
byte [ ] buffer = new byte [ BUFFER_SIZE ] ; BufferedOutputStream out = new BufferedOutputStream ( new FileOutputStream ( new File ( outdir , name ) ) ) ; int count = - 1 ; while ( ( count = in . read ( buffer ) ) != - 1 ) { out . write ( buffer , 0 , count ) ; } out . close ( ) ;
public class ImpreciseDateTimeField { /** * Computes the difference between two instants , as measured in the units * of this field . Any fractional units are dropped from the result . Calling * getDifference reverses the effect of calling add . In the following code : * < pre > * long instant = . . . * long v = . . . * long age = getDifferenceAsLong ( add ( instant , v ) , instant ) ; * < / pre > * The value ' age ' is the same as the value ' v ' . * The default implementation performs a guess - and - check algorithm using * getDurationField ( ) . getUnitMillis ( ) and the add ( ) method . Subclasses are * encouraged to provide a more efficient implementation . * @ param minuendInstant the milliseconds from 1970-01-01T00:00:00Z to * subtract from * @ param subtrahendInstant the milliseconds from 1970-01-01T00:00:00Z to * subtract off the minuend * @ return the difference in the units of this field */ public long getDifferenceAsLong ( long minuendInstant , long subtrahendInstant ) { } }
if ( minuendInstant < subtrahendInstant ) { return - getDifferenceAsLong ( subtrahendInstant , minuendInstant ) ; } long difference = ( minuendInstant - subtrahendInstant ) / iUnitMillis ; if ( add ( subtrahendInstant , difference ) < minuendInstant ) { do { difference ++ ; } while ( add ( subtrahendInstant , difference ) <= minuendInstant ) ; difference -- ; } else if ( add ( subtrahendInstant , difference ) > minuendInstant ) { do { difference -- ; } while ( add ( subtrahendInstant , difference ) > minuendInstant ) ; } return difference ;
public class ExecuteCommandInstruction { /** * ( non - Javadoc ) * @ see net . roboconf . core . commands . AbstractCommandInstruction # doValidate ( ) */ @ Override public List < ParsingError > doValidate ( ) { } }
String fileName = this . commandName + Constants . FILE_EXT_COMMANDS ; File commandsDirectory = new File ( this . context . getApp ( ) . getDirectory ( ) , Constants . PROJECT_DIR_COMMANDS ) ; File commandFileToExecute ; List < ParsingError > result = new ArrayList < > ( ) ; if ( Utils . isEmptyOrWhitespaces ( this . commandName ) ) { result . add ( error ( ErrorCode . CMD_MISSING_COMMAND_NAME ) ) ; } // Prevent a commands file from invoking itself recursively // If the command was loaded from a file . . . else if ( this . context . getCommandFile ( ) != null && fileName . equals ( this . context . getCommandFile ( ) . getName ( ) ) ) { result . add ( error ( ErrorCode . CMD_LOOPING_COMMAND , name ( this . commandName ) ) ) ; } // The commands file to execute must exist . else if ( ! ( commandFileToExecute = new File ( commandsDirectory , fileName ) ) . exists ( ) ) { result . add ( error ( ErrorCode . CMD_INEXISTING_COMMAND , name ( this . commandName ) ) ) ; } // If it exists , we do not want it to contain the same instruction . // Can happen if we execute a commands ( not loaded from a file ) that executes // a commands file with the same instruction . . . else { try { String content = Utils . readFileContent ( commandFileToExecute ) ; Pattern p = Pattern . compile ( PREFIX + "\\s+" + Pattern . quote ( this . commandName ) , Pattern . CASE_INSENSITIVE ) ; if ( p . matcher ( content ) . find ( ) ) result . add ( error ( ErrorCode . CMD_NASTY_LOOPING_COMMAND , name ( this . commandName ) ) ) ; } catch ( IOException e ) { // If we cannot load the file ' s content , do not push the validation further . . . Utils . logException ( Logger . getLogger ( getClass ( ) . getName ( ) ) , e ) ; } } return result ;
public class KeyManagerActor { /** * Fetching own private pre key by public key * @ param publicKey public key material for search */ private Promise < PrivateKey > fetchPreKey ( byte [ ] publicKey ) { } }
try { return Promise . success ( ManagedList . of ( ownKeys . getPreKeys ( ) ) . filter ( PrivateKey . PRE_KEY_EQUALS ( publicKey ) ) . first ( ) ) ; } catch ( Exception e ) { Log . d ( TAG , "Unable to find own pre key " + Crypto . keyHash ( publicKey ) ) ; for ( PrivateKey p : ownKeys . getPreKeys ( ) ) { Log . d ( TAG , "Have: " + Crypto . keyHash ( p . getPublicKey ( ) ) ) ; } throw e ; }
public class StatefulASActivationStrategy { /** * Overridden to enlist the bean in the current activity session , * if one exists . < p > * This method is called when a user transaction or user activity * session is beginning or ending . An activity session will only * be active for the case where beginSession is being called . < p > */ @ Override void atEnlist ( ContainerTx tx , BeanO bean ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "atEnlist (" + tx + ", " + bean + ")" ) ; // Get the current ContainerAS ContainerAS as = ContainerAS . getContainerAS ( tx ) ; // Allow the parent to properly enlist the bean in the transaction . // Really , just takes a pin , as processTxContextChange did the enlist . super . atEnlist ( tx , bean ) ; // If there is an AS , then perform any ActivitySession specific stuff . . . if ( as != null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "atEnlist : running in AS : " + as ) ; // Make sure the beanO is associated with the current TX bean . setContainerTx ( tx ) ; // Enlist the bean with the AS ; a no - op if already enlisted . // Another pin is not needed , nor does one need to be dropped . // Here are the possible scenarios : // 1 - UAS . beginSession : a pin will have been taken for the method // call . . . that becomes the AS pin . // 2 - UAS completion : AS is not active , AS pin is now the pin // for the current method call . // 3 - UTx . begin : either an AS is not present or the bean is already // enlisted with the AS and pinned . If there is an AS , there is // no method pin , as atAtivate would not have taken one . // 4 - UTx completion - if an AS is present , it already has a pin // and a method pin was not taken . If no AS , then method has // a pin . // Net is , we must insure the bean is enlisted with the AS , as it // takes over ownership of one of the pins . If not enlisted here , // then the next activate will take an extra pin . d655854 as . enlist ( bean ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "atEnlist" ) ;
public class FsItemFilterUtils { /** * returns a FsItemFilter according to given mimeFilters * @ param mimeFilters * An array of MIME types , if < code > null < / code > no filtering is * done * @ return A filter that only accepts the supplied MIME types . */ public static FsItemFilter createMimeFilter ( final String [ ] mimeFilters ) { } }
if ( mimeFilters == null || mimeFilters . length == 0 ) return FILTER_ALL ; return new FsItemFilter ( ) { @ Override public boolean accepts ( FsItemEx item ) { String mimeType = item . getMimeType ( ) . toUpperCase ( ) ; for ( String mf : mimeFilters ) { mf = mf . toUpperCase ( ) ; if ( mimeType . startsWith ( mf + "/" ) || mimeType . equals ( mf ) ) return true ; } return false ; } } ;
public class GVRPose { /** * Set the local matrix for this bone ( relative to parent bone ) . * All bones in the skeleton start out at the origin oriented along the bone axis ( usually 0,0,1 ) . * The pose orients and positions each bone in the skeleton with respect to this initial state . * The local bone matrix expresses the orientation and position of the bone relative * to the parent of this bone . * @ param boneindexzero based index of bone to set matrix for . * @ param mtxnew bone matrix . * @ see # getLocalRotation * @ see # setWorldRotation * @ see # getWorldMatrix */ public void setLocalMatrix ( int boneindex , Matrix4f mtx ) { } }
Bone bone = mBones [ boneindex ] ; int parentid = mSkeleton . getParentBoneIndex ( boneindex ) ; bone . LocalMatrix . set ( mtx ) ; bone . Changed = Bone . LOCAL_ROT ; if ( parentid < 0 ) { bone . WorldMatrix . set ( bone . LocalMatrix ) ; } else { mNeedSync = true ; } if ( sDebug ) { Log . d ( "BONE" , "setLocalMatrix: %s %s" , mSkeleton . getBoneName ( boneindex ) , bone . toString ( ) ) ; }
public class PatientResourceProvider { /** * This is the " read " operation . The " @ Read " annotation indicates that this method supports the read and / or vread operation . * Read operations take a single parameter annotated with the { @ link IdParam } paramater , and should return a single resource instance . * @ param theId * The read operation takes one parameter , which must be of type IdDt and must be annotated with the " @ Read . IdParam " annotation . * @ return Returns a resource matching this identifier , or null if none exists . */ @ Read ( version = true ) public Patient readPatient ( @ IdParam IdDt theId ) { } }
Deque < Patient > retVal ; try { retVal = myIdToPatientVersions . get ( theId . getIdPartAsLong ( ) ) ; } catch ( NumberFormatException e ) { /* * If we can ' t parse the ID as a long , it ' s not valid so this is an unknown resource */ throw new ResourceNotFoundException ( theId ) ; } if ( theId . hasVersionIdPart ( ) == false ) { return retVal . getLast ( ) ; } else { for ( Patient nextVersion : retVal ) { String nextVersionId = nextVersion . getId ( ) . getVersionIdPart ( ) ; if ( theId . getVersionIdPart ( ) . equals ( nextVersionId ) ) { return nextVersion ; } } // No matching version throw new ResourceNotFoundException ( "Unknown version: " + theId . getValue ( ) ) ; }
public class SqlLineOpts { /** * Builds and returns { @ link org . jline . builtins . Completers . RegexCompleter } for * < code > ! set < / code > command based on * ( in decreasing order of priority ) * < ul > * < li > Customizations via { @ code customCompletions } < / li > * < li > Available values defined in { @ link BuiltInProperty } < / li > * < li > { @ link Type } of property . * Currently there is completion only for boolean type < / li > * < / ul > * @ param customCompletions defines custom completions values per property * @ return a singleton list with a built RegexCompleter */ public List < Completer > setOptionCompleters ( Map < BuiltInProperty , Collection < String > > customCompletions ) { } }
Map < String , Completer > comp = new HashMap < > ( ) ; final String start = "START" ; comp . put ( start , new StringsCompleter ( "!set" ) ) ; Collection < BuiltInProperty > booleanProperties = new ArrayList < > ( ) ; Collection < BuiltInProperty > withDefinedAvailableValues = new ArrayList < > ( ) ; StringBuilder sb = new StringBuilder ( start + " (" ) ; for ( BuiltInProperty property : BuiltInProperty . values ( ) ) { if ( customCompletions . containsKey ( property ) ) { continue ; } else if ( ! property . getAvailableValues ( ) . isEmpty ( ) ) { withDefinedAvailableValues . add ( property ) ; } else if ( property . type ( ) == Type . BOOLEAN ) { booleanProperties . add ( property ) ; } else { sb . append ( property . name ( ) ) . append ( " | " ) ; comp . put ( property . name ( ) , new StringsCompleter ( property . propertyName ( ) ) ) ; } } // all boolean properties without defined available values and // not customized via { @ code customCompletions } have values // for autocompletion specified in SqlLineProperty . BOOLEAN _ VALUES final String booleanTypeString = Type . BOOLEAN . toString ( ) ; sb . append ( booleanTypeString ) ; comp . put ( booleanTypeString , new StringsCompleter ( booleanProperties . stream ( ) . map ( BuiltInProperty :: propertyName ) . toArray ( String [ ] :: new ) ) ) ; final String booleanPropertyValueKey = booleanTypeString + "_value" ; comp . put ( booleanPropertyValueKey , new StringsCompleter ( BuiltInProperty . BOOLEAN_VALUES ) ) ; sb . append ( " " ) . append ( booleanPropertyValueKey ) ; // If a property has defined values they will be used for autocompletion for ( BuiltInProperty property : withDefinedAvailableValues ) { final String propertyName = property . propertyName ( ) ; sb . append ( " | " ) . append ( propertyName ) ; comp . put ( propertyName , new StringsCompleter ( propertyName ) ) ; final String propertyValueKey = propertyName + "_value" ; comp . put ( propertyValueKey , new StringsCompleter ( property . getAvailableValues ( ) . toArray ( new String [ 0 ] ) ) ) ; sb . append ( " " ) . append ( propertyValueKey ) ; } for ( Map . Entry < BuiltInProperty , Collection < String > > mapEntry : customCompletions . entrySet ( ) ) { final String propertyName = mapEntry . getKey ( ) . propertyName ( ) ; comp . put ( propertyName , new StringsCompleter ( propertyName ) ) ; final String propertyValueKey = propertyName + "_value" ; comp . put ( propertyValueKey , new StringsCompleter ( mapEntry . getValue ( ) . toArray ( new String [ 0 ] ) ) ) ; sb . append ( "| " ) . append ( propertyName ) . append ( " " ) . append ( propertyValueKey ) ; } sb . append ( ") " ) ; return Collections . singletonList ( new Completers . RegexCompleter ( sb . toString ( ) , comp :: get ) ) ;
public class SecurityMarks { /** * Use { @ link # getMarksMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , java . lang . String > getMarks ( ) { } }
return getMarksMap ( ) ;
public class FilePath { /** * When this { @ link FilePath } represents a tar file , extracts that tar file . * @ param target * Target directory to expand files to . All the necessary directories will be created . * @ param compression * Compression mode of this tar file . * @ since 1.292 * @ see # untarFrom ( InputStream , TarCompression ) */ public void untar ( final FilePath target , final TarCompression compression ) throws IOException , InterruptedException { } }
// TODO : post release , re - unite two branches by introducing FileStreamCallable that resolves InputStream if ( this . channel != target . channel ) { // local - > remote or remote - > local final RemoteInputStream in = new RemoteInputStream ( read ( ) , Flag . GREEDY ) ; target . act ( new UntarRemote ( compression , in ) ) ; } else { // local - > local or remote - > remote target . act ( new UntarLocal ( compression ) ) ; }
public class BaseMonetaryConversionsSingletonSpi { /** * Allows to quickly check , if a { @ link javax . money . convert . CurrencyConversion } is accessible for the given * { @ link javax . money . convert . ConversionQuery } . * @ param termCurrency the terminating / target currency unit , not null . * @ param providers the provider names defines a corresponding * provider chain that must be encapsulated by the resulting { @ link javax * . money . convert . CurrencyConversion } . By default the provider * chain as defined by # getDefaultRoundingProviderChain will be used . * @ return { @ code true } , if such a conversion is supported , meaning an according * { @ link javax . money . convert . CurrencyConversion } can be * accessed . * @ see # getConversion ( javax . money . convert . ConversionQuery ) * @ see # getConversion ( javax . money . CurrencyUnit , String . . . ) } */ public boolean isConversionAvailable ( CurrencyUnit termCurrency , String ... providers ) { } }
return isConversionAvailable ( ConversionQueryBuilder . of ( ) . setTermCurrency ( termCurrency ) . setProviderNames ( providers ) . build ( ) ) ;
public class JoinObservable { /** * Joins the results from a pattern via its plan . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / and _ then _ when . png " alt = " " > * @ param p1 * the plan to join , created by use of the { @ link # then } Observer on a pattern * @ return an Observable that emits the results from matching a pattern * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Combining - Observables # wiki - and - then - and - when " > RxJava Wiki : when ( ) < / a > * @ see < a href = " http : / / msdn . microsoft . com / en - us / library / hh229889 . aspx " > MSDN : Observable . When < / a > */ @ SuppressWarnings ( "unchecked" ) public final static < R > JoinObservable < R > when ( Plan0 < R > p1 ) { } }
return from ( Observable . create ( OperatorJoinPatterns . when ( p1 ) ) ) ;
public class JmsConnectionImpl { /** * This method is called in order to mark that the clientID may now change . */ void unfixClientID ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "unfixClientID" ) ; synchronized ( stateLock ) { clientIDFixed = false ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "unfixClientID" ) ;
public class URLCodec { /** * Convenience method for { @ link # decode ( CharSequence , OutputStream ) } . */ public static byte [ ] decodeToBytes ( CharSequence in ) throws IOException { } }
if ( in == null ) return null ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( in . length ( ) ) ; decode ( in , baos ) ; return baos . toByteArray ( ) ;
public class UserTaskAdminServiceBase { /** * Helper methods */ protected OrganizationalEntity [ ] convert ( List < String > orgEntities , boolean isUser ) { } }
return orgEntities . stream ( ) . map ( isUser ? mapToUser : mapToGroup ) . toArray ( size -> new OrganizationalEntity [ size ] ) ;
public class GELFMessageChunk { /** * lol duplication */ private void extractSequenceCount ( ) { } }
if ( this . sequenceCount == - 1 ) { final int seqCnt = payload . getUnsignedByte ( HEADER_PART_SEQCNT_START ) ; if ( seqCnt >= 0 ) { this . sequenceCount = seqCnt ; } else { throw new IllegalStateException ( "Could not extract sequence count" ) ; } }
public class MainWindow { /** * Start the worker thread . */ private void startWorker ( ) { } }
JobServiceFactory serviceFactory = new JobServiceFactory ( ) { private boolean first = true ; public JobService connect ( ) { SwingUtilities . invokeLater ( new Runnable ( ) { public void run ( ) { setStatus ( "Connecting..." ) ; } } ) ; JobService service = first ? MainWindow . this . connect ( ) : reconnect ( ) ; if ( service == null ) { setVisible ( false ) ; throw new RuntimeException ( "Unable to connect" ) ; } SwingUtilities . invokeLater ( new Runnable ( ) { public void run ( ) { setStatus ( "Connected" ) ; } } ) ; first = false ; return service ; } // public JobService connect ( ) { // try { // SwingUtilities . invokeAndWait ( task ) ; // } catch ( Exception e ) { // logger . warn ( " Exception thrown trying to reconnect " , e ) ; // throw new RuntimeException ( e ) ; // if ( task . service = = null ) { // throw new RuntimeException ( " No service . " ) ; // return task . service ; } ; int availableCpus = Runtime . getRuntime ( ) . availableProcessors ( ) ; if ( options . numberOfCpus < 0 ) { options . numberOfCpus = pref . getInt ( "maxCpus" , 0 ) ; } if ( options . numberOfCpus <= 0 || options . numberOfCpus > availableCpus ) { options . numberOfCpus = availableCpus ; } if ( worker != null ) { setStatus ( "Shutting down worker..." ) ; worker . shutdown ( ) ; try { workerThread . join ( ) ; } catch ( InterruptedException e ) { } progressPanel . clear ( ) ; } setStatus ( "Starting worker..." ) ; CourtesyMonitor courtesyMonitor = ( powerMonitor != null ) ? powerMonitor : new UnconditionalCourtesyMonitor ( ) ; ThreadFactory threadFactory = new BackgroundThreadFactory ( ) ; worker = new ThreadServiceWorker ( serviceFactory , threadFactory , getProgressPanel ( ) , courtesyMonitor ) ; worker . setMaxWorkers ( options . numberOfCpus ) ; boolean cacheClassDefinitions = pref . getBoolean ( "cacheClassDefinitions" , true ) ; if ( cacheClassDefinitions ) { setStatus ( "Preparing data source..." ) ; EmbeddedDataSource ds = null ; try { Class . forName ( "org.apache.derby.jdbc.EmbeddedDriver" ) ; ds = new EmbeddedDataSource ( ) ; ds . setConnectionAttributes ( "create=true" ) ; ds . setDatabaseName ( "classes" ) ; worker . setDataSource ( ds ) ; } catch ( ClassNotFoundException e ) { logger . error ( "Could not locate database driver." , e ) ; } catch ( SQLException e ) { logger . error ( "Error occurred while initializing data source." , e ) ; } } onPreferencesChanged ( ) ; workerThread = new Thread ( worker ) ; workerThread . start ( ) ;
public class StreamMetadataResourceImpl { /** * Implementation of deleteScope REST API . * @ param scopeName The scope name of stream . * @ param securityContext The security for API access . * @ param asyncResponse AsyncResponse provides means for asynchronous server side response processing . */ @ Override public void deleteScope ( final String scopeName , final SecurityContext securityContext , final AsyncResponse asyncResponse ) { } }
long traceId = LoggerHelpers . traceEnter ( log , "deleteScope" ) ; try { restAuthHelper . authenticateAuthorize ( getAuthorizationHeader ( ) , AuthResourceRepresentation . ofScopes ( ) , READ_UPDATE ) ; } catch ( AuthException e ) { log . warn ( "Delete scope for {} failed due to authentication failure." , scopeName ) ; asyncResponse . resume ( Response . status ( Status . fromStatusCode ( e . getResponseCode ( ) ) ) . build ( ) ) ; LoggerHelpers . traceLeave ( log , "createStream" , traceId ) ; return ; } controllerService . deleteScope ( scopeName ) . thenApply ( scopeStatus -> { if ( scopeStatus . getStatus ( ) == DeleteScopeStatus . Status . SUCCESS ) { log . info ( "Successfully deleted scope: {}" , scopeName ) ; return Response . status ( Status . NO_CONTENT ) . build ( ) ; } else if ( scopeStatus . getStatus ( ) == DeleteScopeStatus . Status . SCOPE_NOT_FOUND ) { log . warn ( "Scope: {} not found" , scopeName ) ; return Response . status ( Status . NOT_FOUND ) . build ( ) ; } else if ( scopeStatus . getStatus ( ) == DeleteScopeStatus . Status . SCOPE_NOT_EMPTY ) { log . warn ( "Cannot delete scope: {} with non-empty streams" , scopeName ) ; return Response . status ( Status . PRECONDITION_FAILED ) . build ( ) ; } else { log . warn ( "deleteScope for {} failed" , scopeName ) ; return Response . status ( Status . INTERNAL_SERVER_ERROR ) . build ( ) ; } } ) . exceptionally ( exception -> { log . warn ( "deleteScope for {} failed with exception: {}" , scopeName , exception ) ; return Response . status ( Status . INTERNAL_SERVER_ERROR ) . build ( ) ; } ) . thenApply ( asyncResponse :: resume ) . thenAccept ( x -> LoggerHelpers . traceLeave ( log , "deleteScope" , traceId ) ) ;
public class PolicyChecker { /** * Removes those nodes which do not intersect with the initial policies * specified by the user . * @ param rootNode the root node of the valid policy tree * @ param certIndex the index of the certificate being processed * @ param initPolicies the Set of policies required by the user * @ param currCertPolicies the CertificatePoliciesExtension of the * certificate being processed * @ returns the root node of the valid policy tree after modification * @ exception CertPathValidatorException Exception thrown if error occurs . */ private static PolicyNodeImpl removeInvalidNodes ( PolicyNodeImpl rootNode , int certIndex , Set < String > initPolicies , CertificatePoliciesExtension currCertPolicies ) throws CertPathValidatorException { } }
List < PolicyInformation > policyInfo = null ; try { policyInfo = currCertPolicies . get ( CertificatePoliciesExtension . POLICIES ) ; } catch ( IOException ioe ) { throw new CertPathValidatorException ( "Exception while " + "retrieving policyOIDs" , ioe ) ; } boolean childDeleted = false ; for ( PolicyInformation curPolInfo : policyInfo ) { String curPolicy = curPolInfo . getPolicyIdentifier ( ) . getIdentifier ( ) . toString ( ) ; if ( debug != null ) debug . println ( "PolicyChecker.processPolicies() " + "processing policy second time: " + curPolicy ) ; Set < PolicyNodeImpl > validNodes = rootNode . getPolicyNodesValid ( certIndex , curPolicy ) ; for ( PolicyNodeImpl curNode : validNodes ) { PolicyNodeImpl parentNode = ( PolicyNodeImpl ) curNode . getParent ( ) ; if ( parentNode . getValidPolicy ( ) . equals ( ANY_POLICY ) ) { if ( ( ! initPolicies . contains ( curPolicy ) ) && ( ! curPolicy . equals ( ANY_POLICY ) ) ) { if ( debug != null ) debug . println ( "PolicyChecker.processPolicies() " + "before deleting: policy tree = " + rootNode ) ; parentNode . deleteChild ( curNode ) ; childDeleted = true ; if ( debug != null ) debug . println ( "PolicyChecker.processPolicies() " + "after deleting: policy tree = " + rootNode ) ; } } } } if ( childDeleted ) { rootNode . prune ( certIndex ) ; if ( ! rootNode . getChildren ( ) . hasNext ( ) ) { rootNode = null ; } } return rootNode ;
public class PolynomialSolver { /** * Finds real and imaginary roots in a polynomial using the companion matrix and * Eigenvalue decomposition . The coefficients order is specified from smallest to largest . * Example , 5 + 6 * x + 7 * x ^ 2 + 8 * x ^ 3 = [ 5,6,7,8] * @ param coefficients Polynomial coefficients from smallest to largest . * @ return The found roots . */ @ SuppressWarnings ( "ToArrayCallWithZeroLengthArrayArgument" ) public static Complex_F64 [ ] polynomialRootsEVD ( double ... coefficients ) { } }
PolynomialRoots alg = new RootFinderCompanion ( ) ; if ( ! alg . process ( Polynomial . wrap ( coefficients ) ) ) throw new IllegalArgumentException ( "Algorithm failed, was the input bad?" ) ; List < Complex_F64 > coefs = alg . getRoots ( ) ; return coefs . toArray ( new Complex_F64 [ 0 ] ) ;
public class ClusteringFeature { /** * Calculates the k - means costs of the ClusteringFeature and a point too a * center . * @ param center * the center too calculate the costs * @ param point * the point too calculate the costs * @ return the costs */ public double calcKMeansCosts ( double [ ] center , double [ ] point ) { } }
assert ( this . sumPoints . length == center . length && this . sumPoints . length == point . length ) ; return ( this . sumSquaredLength + Metric . distanceSquared ( point ) ) - 2 * Metric . dotProductWithAddition ( this . sumPoints , point , center ) + ( this . numPoints + 1 ) * Metric . dotProduct ( center ) ;
public class DiscordApiImpl { /** * Gets a known custom emoji or creates a new ( unknown ) custom emoji object . * @ param id The id of the emoji . * @ param name The name of the emoji . * @ param animated Whether the emoji is animated or not . * @ return The emoji for the given json object . */ public CustomEmoji getKnownCustomEmojiOrCreateCustomEmoji ( long id , String name , boolean animated ) { } }
CustomEmoji emoji = customEmojis . get ( id ) ; return emoji == null ? new CustomEmojiImpl ( this , id , name , animated ) : emoji ;
public class Matrices { /** * Makes a maximum matrix accumulator that accumulates the maximum of matrix elements . * @ return a maximum vector accumulator */ public static MatrixAccumulator mkMaxAccumulator ( ) { } }
return new MatrixAccumulator ( ) { private double result = Double . NEGATIVE_INFINITY ; @ Override public void update ( int i , int j , double value ) { result = Math . max ( result , value ) ; } @ Override public double accumulate ( ) { double value = result ; result = Double . NEGATIVE_INFINITY ; return value ; } } ;
public class AbstractStrategy { /** * { @ inheritDoc } */ @ Override public void printGroundtruth ( final Long user , final PrintStream out , final OUTPUT_FORMAT format ) { } }
final Map < Long , Double > relItems = new HashMap < Long , Double > ( ) ; for ( Long i : test . getUserItems ( user ) ) { Double d = test . getUserItemPreference ( user , i ) ; if ( d >= threshold ) { relItems . put ( i , d ) ; } } printGroundtruth ( "" + user , relItems , out , format ) ;
public class ConcurrentLinkedDeque { /** * Unlinks non - null first node . */ private void unlinkFirst ( Node < E > first , Node < E > next ) { } }
// assert first ! = null ; // assert next ! = null ; // assert first . item = = null ; for ( Node < E > o = null , p = next , q ; ; ) { if ( p . item != null || ( q = p . next ) == null ) { if ( o != null && p . prev != p && first . casNext ( next , p ) ) { skipDeletedPredecessors ( p ) ; if ( first . prev == null && ( p . next == null || p . item != null ) && p . prev == first ) { updateHead ( ) ; // Ensure o is not reachable from head updateTail ( ) ; // Ensure o is not reachable from tail // Finally , actually gc - unlink o . lazySetNext ( sentinel ( ) ) ; o . lazySetPrev ( prevTerminator ( ) ) ; } } return ; } else if ( sentinel ( ) == q ) // j2objc : q = = p . next = = sentinel means node GC - unlinked return ; else { o = p ; p = q ; } }
public class Check { /** * Ensures that a passed map as a parameter of the calling method is not empty . * We recommend to use the overloaded method { @ link Check # notEmpty ( Collection , String ) } and pass as second argument * the name of the parameter to enhance the exception message . * @ param map * a map which should not be empty * @ return the passed reference that is not empty * @ throws IllegalNullArgumentException * if the given argument { @ code map } is { @ code null } * @ throws IllegalEmptyArgumentException * if the given argument { @ code map } is empty */ @ ArgumentsChecked @ Throws ( { } }
IllegalNullArgumentException . class , IllegalEmptyArgumentException . class } ) public static < T extends Map < ? , ? > > T notEmpty ( @ Nonnull final T map ) { notNull ( map ) ; notEmpty ( map , map . isEmpty ( ) , EMPTY_ARGUMENT_NAME ) ; return map ;
public class TypeElementCatalog { /** * Return all of the classes specified on the command - line that belong to the given package . * @ param packageName the name of the package specified on the command - line . */ public SortedSet < TypeElement > allUnnamedClasses ( ) { } }
for ( PackageElement pkg : allClasses . keySet ( ) ) { if ( pkg . isUnnamed ( ) ) { return allClasses . get ( pkg ) ; } } return new TreeSet < > ( comparator ) ;
public class BasicRandomRoutingTable { /** * Internal helper method . * Implements the policy for removing multiple neighbors from the random * ordering . By default , this removes all the neighbors specified and * leaves the list otherwise in the same ordering . * This method assumes that any necessary locking is performed externally . */ protected void removeNeighborsFromOrdering ( final Collection < TrustGraphNodeId > neighbors ) { } }
final Set < TrustGraphNodeId > killSet = new HashSet < TrustGraphNodeId > ( neighbors ) ; ListIterator < TrustGraphNodeId > i = orderedNeighbors . listIterator ( ) ; while ( i . hasNext ( ) ) { if ( killSet . contains ( i . next ( ) ) ) { i . remove ( ) ; } }
public class PushNotificationPayload { /** * Create a custom alert ( if none exist ) and add sub - parameters for the title - loc - key parameter . * @ param args * @ throws JSONException */ public void addCustomAlertTitleLocArgs ( List args ) throws JSONException { } }
Object value = args != null && ! args . isEmpty ( ) ? args : new JSONNull ( ) ; put ( "title-loc-args" , value , getOrAddCustomAlert ( ) , false ) ;
public class InstanceHelpers { /** * Finds an instance by name . * @ param application the application * @ param instancePath the instance path * @ return an instance , or null if it was not found */ public static Instance findInstanceByPath ( AbstractApplication application , String instancePath ) { } }
Collection < Instance > currentList = new ArrayList < > ( ) ; if ( application != null ) currentList . addAll ( application . getRootInstances ( ) ) ; List < String > instanceNames = new ArrayList < > ( ) ; if ( instancePath != null ) instanceNames . addAll ( Arrays . asList ( instancePath . split ( "/" ) ) ) ; if ( instanceNames . size ( ) > 0 && Utils . isEmptyOrWhitespaces ( instanceNames . get ( 0 ) ) ) instanceNames . remove ( 0 ) ; // Every path segment points to an instance Instance result = null ; for ( String instanceName : instanceNames ) { result = null ; for ( Instance instance : currentList ) { if ( instanceName . equals ( instance . getName ( ) ) ) { result = instance ; break ; } } // The segment does not match any instance if ( result == null ) break ; // Otherwise , prepare the next iteration currentList = result . getChildren ( ) ; } return result ;
public class JsApiMessageImpl { /** * Return a boolean indicating whether a User Property with the given name * exists in the message . * Javadoc description supplied by JsApiMessage interface . */ @ Override public boolean userPropertyExists ( String name ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "userPropertyExists" , name ) ; boolean result = false ; /* If the name is null there is nothing to do . so only proceed if it is */ /* supplied . */ if ( name != null ) { /* Got to check Maelstrom ' s transportVersion first as performance for it */ /* is critical . */ if ( name . equals ( MfpConstants . PRP_TRANSVER ) && isTransportVersionSet ( ) ) { result = true ; } /* otherwise , first try the JMS user property map as the most likely */ else if ( ( mayHaveJmsUserProperties ( ) ) && ( getJmsUserPropertyMap ( ) . containsKey ( name ) ) ) { result = true ; } /* then try the non - JMS user property map */ else if ( ( mayHaveOtherUserProperties ( ) ) && ( getOtherUserPropertyMap ( ) . containsKey ( name ) ) ) { result = true ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "userPropertyExists" , result ) ; return result ;
public class CollectionLiteralsTypeComputer { /** * Creates a map type reference that comes as close as possible / necessary to its expected type . */ protected LightweightTypeReference createMapTypeReference ( JvmGenericType mapType , LightweightTypeReference pairType , LightweightTypeReference expectation , ITypeReferenceOwner owner ) { } }
List < LightweightTypeReference > leftAndRight = pairType . getTypeArguments ( ) ; LightweightTypeReference left = leftAndRight . get ( 0 ) . getInvariantBoundSubstitute ( ) ; LightweightTypeReference right = leftAndRight . get ( 1 ) . getInvariantBoundSubstitute ( ) ; LightweightTypeReference mapExpectation = getMapExpectation ( expectation ) ; if ( mapExpectation != null ) { List < LightweightTypeReference > typeArguments = expectation . getTypeArguments ( ) ; left = doNormalizeElementType ( left , typeArguments . get ( 0 ) ) ; right = doNormalizeElementType ( right , typeArguments . get ( 1 ) ) ; } ParameterizedTypeReference result = owner . newParameterizedTypeReference ( mapType ) ; result . addTypeArgument ( left . copyInto ( owner ) ) ; result . addTypeArgument ( right . copyInto ( owner ) ) ; if ( mapExpectation != null && ! expectation . isAssignableFrom ( result ) ) { // expectation does not match the computed type , but looks good according to the element types : // use expected type if ( matchesExpectation ( left , mapExpectation . getTypeArguments ( ) . get ( 0 ) ) && matchesExpectation ( right , mapExpectation . getTypeArguments ( ) . get ( 1 ) ) ) { return expectation ; } } return result ;
public class JCusparse { /** * Returns number of bytes */ public static int cusparseCsrmvEx_bufferSize ( cusparseHandle handle , int alg , int transA , int m , int n , int nnz , Pointer alpha , int alphatype , cusparseMatDescr descrA , Pointer csrValA , int csrValAtype , Pointer csrRowPtrA , Pointer csrColIndA , Pointer x , int xtype , Pointer beta , int betatype , Pointer y , int ytype , int executiontype , long [ ] bufferSizeInBytes ) { } }
return checkResult ( cusparseCsrmvEx_bufferSizeNative ( handle , alg , transA , m , n , nnz , alpha , alphatype , descrA , csrValA , csrValAtype , csrRowPtrA , csrColIndA , x , xtype , beta , betatype , y , ytype , executiontype , bufferSizeInBytes ) ) ;
public class OutputUtil { /** * Appends all elements of < code > array < / code > to buffer , separated by delimiter * @ param < T > Type of elements stored in < code > array < / code > * @ param sb StringBuilder to be modified * @ param array Array of elements * @ param delimiter Delimiter to separate elements * @ return Modified < code > sb < / code > to allow chaining */ public static < T > StringBuilder appendArray ( StringBuilder sb , T [ ] array , String delimiter ) { } }
boolean firstRun = true ; for ( T elem : array ) { if ( ! firstRun ) sb . append ( delimiter ) ; else firstRun = false ; sb . append ( elem . toString ( ) ) ; } return sb ;
public class AWSCognitoIdentityProviderClient { /** * Signs out users from all devices . * @ param globalSignOutRequest * Represents the request to sign out all devices . * @ return Result of the GlobalSignOut operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws PasswordResetRequiredException * This exception is thrown when a password reset is required . * @ throws UserNotConfirmedException * This exception is thrown when a user is not confirmed successfully . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . GlobalSignOut * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / GlobalSignOut " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GlobalSignOutResult globalSignOut ( GlobalSignOutRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGlobalSignOut ( request ) ;
public class GetBuiltinSlotTypesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetBuiltinSlotTypesRequest getBuiltinSlotTypesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getBuiltinSlotTypesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getBuiltinSlotTypesRequest . getLocale ( ) , LOCALE_BINDING ) ; protocolMarshaller . marshall ( getBuiltinSlotTypesRequest . getSignatureContains ( ) , SIGNATURECONTAINS_BINDING ) ; protocolMarshaller . marshall ( getBuiltinSlotTypesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getBuiltinSlotTypesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Choice6 { /** * Static factory method for wrapping a value of type < code > A < / code > in a { @ link Choice6 } . * @ param a the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ param < D > the fourth possible type * @ param < E > the fifth possible type * @ param < F > the sixth possible type * @ return the wrapped value as a { @ link Choice6 } & lt ; A , B , C , D , E , F & gt ; */ public static < A , B , C , D , E , F > Choice6 < A , B , C , D , E , F > a ( A a ) { } }
return new _A < > ( a ) ;
public class CancelableSwap { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime . * Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time . * Cashflows prior evaluationTime are not considered . * @ param evaluationTime The time on which this products value should be observed . * @ param model The model used to price the product . * @ return The random variable representing the value of the product discounted to evaluation time * @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */ @ Override public RandomVariableInterface getValue ( double evaluationTime , LIBORModelMonteCarloSimulationInterface model ) throws CalculationException { } }
// After the last period the product has value zero : Initialize values to zero . RandomVariableInterface values = new RandomVariable ( fixingDates [ fixingDates . length - 1 ] , 0.0 ) ; // Loop backward over the swap periods for ( int period = fixingDates . length - 1 ; period >= 0 ; period -- ) { double fixingDate = fixingDates [ period ] ; double periodLength = periodLengths [ period ] ; double paymentDate = paymentDates [ period ] ; double notional = periodNotionals [ period ] ; double swaprate = swaprates [ period ] ; // Get random variables - note that this is the rate at simulation time = exerciseDate RandomVariableInterface libor = model . getLIBOR ( fixingDate , fixingDate , fixingDate + periodLength ) ; RandomVariableInterface numeraire = model . getNumeraire ( paymentDate ) ; RandomVariableInterface monteCarloProbabilities = model . getMonteCarloWeights ( paymentDate ) ; // foreach ( path ) values [ path ] + = notional * ( libor . get ( path ) - swaprate ) * periodLength / numeraire . get ( path ) * monteCarloProbabilities . get ( path ) ; RandomVariableInterface payoff = libor . sub ( swaprate ) . mult ( periodLength ) . mult ( notional ) ; // Apply discounting and Monte - Carlo probabilities payoff = payoff . div ( numeraire ) . mult ( monteCarloProbabilities ) ; values = values . add ( payoff ) ; if ( isPeriodStartDateExerciseDate [ period ] ) { // Remove foresight through condition expectation ConditionalExpectationEstimatorInterface condExpEstimator = getConditionalExpectationEstimator ( period , model ) ; // Calculate conditional expectation . Note that no discounting ( numeraire division ) is required ! RandomVariableInterface valueIfExcercised = condExpEstimator . getConditionalExpectation ( values ) ; // Apply the exercise criteria // foreach ( path ) if ( valueIfExcercided . get ( path ) < 0.0 ) values [ path ] = 0.0; values = values . barrier ( valueIfExcercised , values , 0.0 ) ; } } RandomVariableInterface numeraireAtZero = model . getNumeraire ( evaluationTime ) ; RandomVariableInterface monteCarloProbabilitiesAtZero = model . getMonteCarloWeights ( evaluationTime ) ; values = values . mult ( numeraireAtZero ) . div ( monteCarloProbabilitiesAtZero ) ; return values ;
public class DummyInternalTransaction { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . InternalTransaction # notifyFromCheckpoint ( com . ibm . ws . objectManager . Token , com . ibm . ws . objectManager . Transaction ) */ protected synchronized void notifyFromCheckpoint ( Token token , Transaction transaction ) throws ObjectManagerException { } }
throw new InvalidStateException ( this , InternalTransaction . stateTerminated , InternalTransaction . stateNames [ InternalTransaction . stateTerminated ] ) ;
public class Where { /** * Add a ' & gt ; ' clause so the column must be greater - than the value . */ public Where < T , ID > gt ( String columnName , Object value ) throws SQLException { } }
addClause ( new SimpleComparison ( columnName , findColumnFieldType ( columnName ) , value , SimpleComparison . GREATER_THAN_OPERATION ) ) ; return this ;
public class OfflineDownloadService { /** * Several actions can take place inside this service including starting and canceling a specific * region download . First , it is determined what action to take by using the { @ code intentAction } * parameter . This action is finally passed in to the correct map offline methods . * @ param intentAction string holding the task that should be performed on the specific * { @ link OfflineDownloadOptions } regional download . * @ param offlineDownload the download model which defines the region and other metadata needed to * download the correct region . * @ since 0.1.0 */ private void onResolveCommand ( String intentAction , OfflineDownloadOptions offlineDownload ) { } }
if ( OfflineConstants . ACTION_START_DOWNLOAD . equals ( intentAction ) ) { createDownload ( offlineDownload ) ; } else if ( OfflineConstants . ACTION_CANCEL_DOWNLOAD . equals ( intentAction ) ) { cancelDownload ( offlineDownload ) ; }
public class FileSystemWatcher { /** * Watch the given path . * This will launch the thread if not yet launched . * The returned Runnable can be called to stop watching . */ public static Runnable watch ( Path path , PathEventListener listener , WatchEvent . Kind < ? > ... kinds ) throws IOException { } }
synchronized ( FileSystemWatcher . class ) { Application app = LCCore . getApplication ( ) ; Watcher watcher = app . getInstance ( Watcher . class ) ; if ( watcher == null || watcher . stop ) { watcher = new Watcher ( app ) ; app . setInstance ( Watcher . class , watcher ) ; } return watcher . watch ( path , listener , kinds ) ; }
public class CmsAttributeHandler { /** * Removes the attribute value ( and corresponding widget ) with the given index , and returns * the parent widget . < p > * @ param valueIndex the value index * @ param force < code > true < / code > if the widget should be removed even if it is the last one * @ return the parent widget */ public Panel removeAttributeValueAndReturnPrevParent ( int valueIndex , boolean force ) { } }
if ( m_attributeValueViews . size ( ) > valueIndex ) { CmsAttributeValueView view = m_attributeValueViews . get ( valueIndex ) ; Panel result = ( Panel ) view . getParent ( ) ; removeAttributeValue ( view , force ) ; return result ; } return null ;
public class AmazonWorkspacesClient { /** * Modifies the properties of the specified Amazon WorkSpaces clients . * @ param modifyClientPropertiesRequest * @ return Result of the ModifyClientProperties operation returned by the service . * @ throws InvalidParameterValuesException * One or more parameter values are not valid . * @ throws ResourceNotFoundException * The resource could not be found . * @ throws AccessDeniedException * The user is not authorized to access a resource . * @ sample AmazonWorkspaces . ModifyClientProperties * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workspaces - 2015-04-08 / ModifyClientProperties " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ModifyClientPropertiesResult modifyClientProperties ( ModifyClientPropertiesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeModifyClientProperties ( request ) ;
public class AddTagsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AddTagsRequest addTagsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( addTagsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( addTagsRequest . getResourceArn ( ) , RESOURCEARN_BINDING ) ; protocolMarshaller . marshall ( addTagsRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DiagramBuilder { /** * crates a StencilSet object and add it to the current diagram * @ param modelJSON * @ param current * @ throws org . json . JSONException */ private static void parseStencilSet ( JSONObject modelJSON , Diagram current ) throws JSONException { } }
// get stencil type if ( modelJSON . has ( "stencilset" ) ) { JSONObject object = modelJSON . getJSONObject ( "stencilset" ) ; String url = null ; String namespace = null ; if ( object . has ( "url" ) ) { url = object . getString ( "url" ) ; } if ( object . has ( "namespace" ) ) { namespace = object . getString ( "namespace" ) ; } current . setStencilset ( new StencilSet ( url , namespace ) ) ; }
public class HttpServerConnection { /** * Pushes back the given data . This should only be used by transfer coding handlers that have read past * the end of the request when handling pipelined requests * @ param unget The buffer to push back */ public void ungetRequestBytes ( final PooledByteBuffer unget ) { } }
if ( getExtraBytes ( ) == null ) { setExtraBytes ( unget ) ; } else { PooledByteBuffer eb = getExtraBytes ( ) ; ByteBuffer buf = eb . getBuffer ( ) ; final ByteBuffer ugBuffer = unget . getBuffer ( ) ; if ( ugBuffer . limit ( ) - ugBuffer . remaining ( ) > buf . remaining ( ) ) { // stuff the existing data after the data we are ungetting ugBuffer . compact ( ) ; ugBuffer . put ( buf ) ; ugBuffer . flip ( ) ; eb . close ( ) ; setExtraBytes ( unget ) ; } else { // TODO : this is horrible , but should not happen often final byte [ ] data = new byte [ ugBuffer . remaining ( ) + buf . remaining ( ) ] ; int first = ugBuffer . remaining ( ) ; ugBuffer . get ( data , 0 , ugBuffer . remaining ( ) ) ; buf . get ( data , first , buf . remaining ( ) ) ; eb . close ( ) ; unget . close ( ) ; final ByteBuffer newBuffer = ByteBuffer . wrap ( data ) ; setExtraBytes ( new ImmediatePooledByteBuffer ( newBuffer ) ) ; } }
public class ConvertKit { /** * byteArr转inputStream * @ param bytes 字节数组 * @ return 输入流 */ public static InputStream bytes2InputStream ( final byte [ ] bytes ) { } }
if ( bytes == null || bytes . length <= 0 ) return null ; return new ByteArrayInputStream ( bytes ) ;
public class PointPanel { /** * < / editor - fold > / / GEN - END : initComponents */ @ Override protected void paintComponent ( Graphics g ) { } }
if ( type == TYPE_PLAIN ) { point . updateWeight ( RunVisualizer . getCurrentTimestamp ( ) , decayRate ) ; if ( point . weight ( ) < decayThreshold ) { getParent ( ) . remove ( this ) ; return ; } } Color color = getColor ( ) ; // Color errcolor = getErrorColor ( ) ; // if ( errcolor = = null ) { // errcolor = color ; panel_size = POINTSIZE ; // } else { // panel _ size = POINTSIZE + 2; updateLocation ( ) ; /* g . setColor ( errcolor ) ; g . drawOval ( 0 , 0 , panel _ size , panel _ size ) ; g . setColor ( color ) ; g . fillOval ( 0 , 0 , panel _ size , panel _ size ) ; */ if ( type == TYPE_PLAIN ) { g . setColor ( color ) ; if ( point . isNoise ( ) ) { g . setFont ( g . getFont ( ) . deriveFont ( 9.0f ) ) ; g . drawChars ( new char [ ] { 'x' } , 0 , 1 , 0 , panel_size ) ; } else { g . drawOval ( 0 , 0 , panel_size , panel_size ) ; g . setColor ( color ) ; g . fillOval ( 0 , 0 , panel_size , panel_size ) ; } } else if ( type == TYPE_CLUSTERED ) { g . setColor ( color ) ; g . drawOval ( 0 , 0 , panel_size , panel_size ) ; } setToolTipText ( point . getInfo ( x_dim , y_dim ) ) ;
public class TableIndexDao { /** * Delete all table indices * @ return rows deleted * @ throws SQLException * upon deletion failure * @ since 1.1.5 */ public int deleteAll ( ) throws SQLException { } }
int count = 0 ; if ( isTableExists ( ) ) { DeleteBuilder < TableIndex , String > db = deleteBuilder ( ) ; PreparedDelete < TableIndex > deleteQuery = db . prepare ( ) ; count = delete ( deleteQuery ) ; } return count ;