signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Embedded { /** * Returns the embedded items by link - relation type .
* This method can be used if the Java type of the embedded representations is known , for example because the
* { @ link HalParser } is used to map the items to a specific HalRepresentation :
* < pre > < code >
* final String json = . . .
* final FooHalRepresentation foo = HalParser
* . parse ( json )
* . as ( FooHalRepresentation . class , with ( " bar " , BarHalRepresentation . class ) ) ;
* final List & lt ; BarHalRepresentation embeddedBars = foo
* . getEmbedded ( )
* . getItemsBy ( " bar " , BarHalRepresentation . class ) ;
* < / code > < / pre >
* @ param rel the link - relation type
* @ param asType the expected class of the embedded items .
* @ param < E > the specific type of the embedded HalRepresentations
* @ return List of E
* @ throws ClassCastException if the expected type does not fit the actual type of the embedded items .
* @ since 0.1.0 */
@ JsonIgnore public < E extends HalRepresentation > List < E > getItemsBy ( final String rel , final Class < E > asType ) { } } | return getItemsBy ( rel ) . stream ( ) . map ( asType :: cast ) . collect ( toList ( ) ) ; |
public class ReadyCopyableFileFilter { /** * For every { @ link CopyableFile } in < code > copyableFiles < / code > checks if a { @ link CopyableFile # getOrigin ( ) # getPath ( ) }
* + . ready files is present on < code > sourceFs < / code > { @ inheritDoc }
* @ see org . apache . gobblin . data . management . copy . CopyableFileFilter # filter ( org . apache . hadoop . fs . FileSystem ,
* org . apache . hadoop . fs . FileSystem , java . util . Collection ) */
@ Override public Collection < CopyableFile > filter ( FileSystem sourceFs , FileSystem targetFs , Collection < CopyableFile > copyableFiles ) { } } | Iterator < CopyableFile > cfIterator = copyableFiles . iterator ( ) ; ImmutableList . Builder < CopyableFile > filtered = ImmutableList . builder ( ) ; while ( cfIterator . hasNext ( ) ) { CopyableFile cf = cfIterator . next ( ) ; Path readyFilePath = PathUtils . addExtension ( cf . getOrigin ( ) . getPath ( ) , READY_EXTENSION ) ; try { if ( sourceFs . exists ( readyFilePath ) ) { filtered . add ( cf ) ; } else { log . info ( String . format ( "Removing %s as the .ready file is not found" , cf . getOrigin ( ) . getPath ( ) ) ) ; } } catch ( IOException e ) { log . warn ( String . format ( "Removing %s as the .ready file can not be read. Exception %s" , cf . getOrigin ( ) . getPath ( ) , e . getMessage ( ) ) ) ; } } return filtered . build ( ) ; |
public class ObjectiveMessageResources { protected FwAssistantDirector getAssistantDirector ( ) { } } | if ( cachedAssistantDirector != null ) { return cachedAssistantDirector ; } synchronized ( this ) { if ( cachedAssistantDirector != null ) { return cachedAssistantDirector ; } cachedAssistantDirector = ContainerUtil . getComponent ( FwAssistantDirector . class ) ; } return cachedAssistantDirector ; |
public class BaseRecordMessageFilter { /** * Update this filter with this new information .
* Override this to do something locally .
* Remember to call super after updating this filter , as this method updates the remote copy of this filter .
* @ param properties New filter information ( ie , bookmark = 345 ) .
* @ return The new filter change map ( must contain enough information for the remote filter to sync ) . */
public Map < String , Object > handleUpdateFilterMap ( Map < String , Object > propFilter ) { } } | if ( propFilter != null ) { Object objDBType = propFilter . get ( DB_TYPE ) ; if ( objDBType instanceof Integer ) m_iDatabaseType = ( ( Integer ) objDBType ) . intValue ( ) ; } if ( propFilter == null ) propFilter = new Hashtable < String , Object > ( ) ; propFilter . put ( DB_TYPE , new Integer ( m_iDatabaseType ) ) ; // Make sure remote has the same filter info
return super . handleUpdateFilterMap ( propFilter ) ; // Update any remote copy of this . |
public class Image { /** * gets an instance of an Image
* @ param image
* an Image object
* @ return a new Image object */
public static Image getInstance ( Image image ) { } } | if ( image == null ) return null ; try { Class cs = image . getClass ( ) ; Constructor constructor = cs . getDeclaredConstructor ( new Class [ ] { Image . class } ) ; return ( Image ) constructor . newInstance ( new Object [ ] { image } ) ; } catch ( Exception e ) { throw new ExceptionConverter ( e ) ; } |
public class FileUtils { /** * Devuelve true si es un fichero y se puede leer .
* @ param abstolutePath el path absoluto de un fichero
* @ return */
public static boolean canReadFile ( File file ) { } } | if ( file != null && file . canRead ( ) && ! file . isDirectory ( ) ) { return true ; } else { return false ; } |
public class DataSet { /** * Initiates a CoGroup transformation .
* < p > A CoGroup transformation combines the elements of
* two { @ link DataSet DataSets } into one DataSet . It groups each DataSet individually on a key and
* gives groups of both DataSets with equal keys together into a { @ link org . apache . flink . api . common . functions . RichCoGroupFunction } .
* If a DataSet has a group with no matching key in the other DataSet , the CoGroupFunction
* is called with an empty group for the non - existing group .
* < p > The CoGroupFunction can iterate over the elements of both groups and return any number
* of elements including none .
* < p > This method returns a { @ link CoGroupOperatorSets } on which one of the { @ code where } methods
* can be called to define the join key of the first joining ( i . e . , this ) DataSet .
* @ param other The other DataSet of the CoGroup transformation .
* @ return A CoGroupOperatorSets to continue the definition of the CoGroup transformation .
* @ see CoGroupOperatorSets
* @ see CoGroupOperator
* @ see DataSet */
public < R > CoGroupOperator . CoGroupOperatorSets < T , R > coGroup ( DataSet < R > other ) { } } | return new CoGroupOperator . CoGroupOperatorSets < > ( this , other ) ; |
public class ListGatewaysRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListGatewaysRequest listGatewaysRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listGatewaysRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listGatewaysRequest . getMarker ( ) , MARKER_BINDING ) ; protocolMarshaller . marshall ( listGatewaysRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StrBuilder { /** * Appends each item in an iterable to the builder without any separators .
* Appending a null iterable will have no effect .
* Each object is appended using { @ link # append ( Object ) } .
* @ param iterable the iterable to append
* @ return this , to enable chaining
* @ since 2.3 */
public StrBuilder appendAll ( final Iterable < ? > iterable ) { } } | if ( iterable != null ) { for ( final Object o : iterable ) { append ( o ) ; } } return this ; |
public class ProtoUtils { /** * Returns the JS name of the import for the given extension , suitable for goog . require . */
public static String getJsExtensionImport ( FieldDescriptor desc ) { } } | Descriptor scope = desc . getExtensionScope ( ) ; if ( scope != null ) { while ( scope . getContainingType ( ) != null ) { scope = scope . getContainingType ( ) ; } return calculateQualifiedJsName ( scope ) ; } return getJsPackage ( desc . getFile ( ) ) + "." + computeJsExtensionName ( desc ) ; |
public class Packer { /** * Get native int stored in variable length format ( support positive value , but size is longer )
* @ return
* @ see # getVNegInt ( ) */
public int getVInt ( ) { } } | int value = 0 ; for ( int i = 0 ; i <= 32 ; i += 7 ) { final byte b = getByte ( ) ; value |= ( ( b & 0x7FL ) << i ) ; if ( b >= 0 ) return value ; } return value ; |
public class Record { /** * Accessor method to retrieve an accrue type instance .
* @ param field the index number of the field to be retrieved
* @ return the value of the required field */
public AccrueType getAccrueType ( int field ) { } } | AccrueType result ; if ( ( field < m_fields . length ) && ( m_fields [ field ] . length ( ) != 0 ) ) { result = AccrueTypeUtility . getInstance ( m_fields [ field ] , m_locale ) ; } else { result = null ; } return ( result ) ; |
public class NamespaceRecordFile { /** * Read a specific record into { @ link NamespaceEntry namespace entry }
* object { @ code e } .
* This reuses the entry object given in order to limit garbage collection .
* @ param record { @ code long } the record number to read
* @ param e the { @ link NamespaceEntry entry } to populate from the record
* contents
* @ throws IOException Thrown if an IO exception occurred reading from the
* { @ link RecordFile record file } */
public void readToEntry ( final long record , final NamespaceEntry e ) throws IOException { } } | final byte [ ] recbytes = super . read ( record ) ; nsrec . readTo ( recbytes , e ) ; |
public class SchemaSet { /** * Returns the number of elements in this set ( its cardinality ) . If this
* set contains more than < tt > Integer . MAX _ VALUE < / tt > elements , returns
* < tt > Integer . MAX _ VALUE < / tt > .
* @ return the number of elements in this set ( its cardinality ) . */
public int size ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "size" ) ; /* Because the table could be ' resized ' ( and therefore replaced ) during */
/* the method we get a local ref to the current one & use it throughout . */
Entry [ ] safeTable = table ; long count = 0 ; Entry current ; /* Go through the linked lists in the table , counting non - null entries */
for ( int i = 0 ; i < safeTable . length ; i ++ ) { current = safeTable [ i ] ; while ( current != null ) { count ++ ; current = current . next ; } } if ( count > Integer . MAX_VALUE ) { count = Integer . MAX_VALUE ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "size" , ( int ) count ) ; return ( int ) count ; |
public class GrailsLocaleUtils { /** * Converts the given < code > baseName < / code > and < code > locale < / code > to the
* bundle name . This method is called from the default implementation of the
* { @ link # newBundle ( String , Locale , String , ClassLoader , boolean )
* newBundle } and
* { @ link # needsReload ( String , Locale , String , ClassLoader , ResourceBundle , long )
* needsReload } methods .
* This implementation returns the following value :
* < pre >
* baseName + & quot ; _ & quot ; + language + & quot ; _ & quot ; + country + & quot ; _ & quot ; + variant
* < / pre >
* where < code > language < / code > , < code > country < / code > and
* < code > variant < / code > are the language , country and variant values of
* < code > locale < / code > , respectively . Final component values that are empty
* Strings are omitted along with the preceding ' _ ' . If all of the values
* are empty strings , then < code > baseName < / code > is returned .
* For example , if < code > baseName < / code > is < code > " baseName " < / code > and
* < code > locale < / code > is < code > Locale ( " ja " , & nbsp ; " " , & nbsp ; " XX " ) < / code > ,
* then < code > " baseName _ ja _ & thinsp ; _ XX " < / code > is returned . If the given
* locale is < code > Locale ( " en " ) < / code > , then < code > " baseName _ en " < / code > is
* returned .
* Overriding this method allows applications to use different conventions
* in the organization and packaging of localized resources .
* @ param baseName
* the base name of the resource bundle , a fully qualified class
* name
* @ param locale
* the locale for which a resource bundle should be loaded
* @ return the bundle name for the resource bundle
* @ exception NullPointerException
* if < code > baseName < / code > or < code > locale < / code > is
* < code > null < / code > */
public static String toBundleName ( String bundleBaseName , Locale locale ) { } } | String baseName = bundleBaseName ; if ( ! isPluginResoucePath ( bundleBaseName ) ) { baseName = bundleBaseName . replace ( '.' , '/' ) ; } if ( locale == null ) { return baseName ; } String language = locale . getLanguage ( ) ; String country = locale . getCountry ( ) ; String variant = locale . getVariant ( ) ; if ( language == "" && country == "" && variant == "" ) { return baseName ; } StringBuffer sb = new StringBuffer ( baseName ) ; sb . append ( '_' ) ; if ( variant != "" ) { sb . append ( language ) . append ( '_' ) . append ( country ) . append ( '_' ) . append ( variant ) ; } else if ( country != "" ) { sb . append ( language ) . append ( '_' ) . append ( country ) ; } else { sb . append ( language ) ; } return sb . toString ( ) ; |
public class MCWrapper { /** * Fatal Error Notification Occurred
* When a connection is created it will be assigned a
* free pools fatal error notification value + 1 . When a fatal
* error occurs , the free pools fatal error notification value is
* incremented by 1 . Any connection with a fatal error
* notification value less than or = to free pools fatal error
* notification value will be cleaned up and destroyed */
@ Override public boolean hasFatalErrorNotificationOccurred ( int fatalErrorNotificationTime ) { } } | final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; /* * I have changed this from using a long based on a currentTimeMillis
* to an int value .
* By using an int we will perform better and synchronization is
* not required . */
if ( fatalErrorValue > fatalErrorNotificationTime ) { return false ; } else { if ( isTracingEnabled && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "hasFatalErrorNotificationOccurred is true" ) ; } return true ; } |
public class SourceWaterMarks { /** * Sets the water marks of a source .
* @ param source - the source
* @ param lwmScn - the low water mark SCN
* @ param hwmScn - the high water mark SCN */
public void setWaterMarks ( String source , long lwmScn , long hwmScn ) { } } | WaterMarkEntry e = sourceWaterMarkMap . get ( source ) ; if ( e == null ) { e = new WaterMarkEntry ( source ) ; sourceWaterMarkMap . put ( source , e ) ; } e . setLWMScn ( lwmScn ) ; e . setHWMScn ( hwmScn ) ; |
public class EvaluationTools { /** * Given a { @ link ROCMultiClass } instance and ( optionally ) names for each class , render the ROC chart to a stand - alone
* HTML file ( returned as a String )
* @ param rocMultiClass ROC to render
* @ param classNames Names of the classes . May be null */
public static String rocChartToHtml ( ROCMultiClass rocMultiClass , List < String > classNames ) { } } | int n = rocMultiClass . getNumClasses ( ) ; List < Component > components = new ArrayList < > ( n ) ; for ( int i = 0 ; i < n ; i ++ ) { RocCurve roc = rocMultiClass . getRocCurve ( i ) ; String headerText = "Class " + i ; if ( classNames != null && classNames . size ( ) > i ) { headerText += " (" + classNames . get ( i ) + ")" ; } headerText += " vs. All" ; ; Component headerDivPad = new ComponentDiv ( HEADER_DIV_PAD_STYLE ) ; components . add ( headerDivPad ) ; Component headerDivLeft = new ComponentDiv ( HEADER_DIV_TEXT_PAD_STYLE ) ; Component headerDiv = new ComponentDiv ( HEADER_DIV_STYLE , new ComponentText ( headerText , HEADER_TEXT_STYLE ) ) ; Component c = getRocFromPoints ( ROC_TITLE , roc , rocMultiClass . getCountActualPositive ( i ) , rocMultiClass . getCountActualNegative ( i ) , rocMultiClass . calculateAUC ( i ) , rocMultiClass . calculateAUCPR ( i ) ) ; Component c2 = getPRCharts ( PR_TITLE , PR_THRESHOLD_TITLE , rocMultiClass . getPrecisionRecallCurve ( i ) ) ; components . add ( headerDivLeft ) ; components . add ( headerDiv ) ; components . add ( c ) ; components . add ( c2 ) ; } return StaticPageUtil . renderHTML ( components ) ; |
public class HeronClient { protected Map < String , Message . Builder > getMessageMap ( ) { } } | return new HashMap < String , Message . Builder > ( messageMap ) ; |
public class CmsDefaultUserSettings { /** * Gets the default copy mode when copying a folder of the user . < p >
* @ return the default copy mode when copying a folder of the user */
public String getDialogCopyFolderModeString ( ) { } } | if ( getDialogCopyFolderMode ( ) == CmsResource . COPY_AS_NEW ) { return COPYMODE_RESOURCE ; } else if ( getDialogCopyFolderMode ( ) == CmsResource . COPY_AS_SIBLING ) { return COPYMODE_SIBLING ; } else { return COPYMODE_PRESERVE ; } |
public class WMessages { /** * Adds a message .
* When setting < code > encodeText < / code > to < code > false < / code > , it then becomes the responsibility of the application
* to ensure that the text does not contain any characters which need to be escaped .
* < b > WARNING : < / b > If you are using WMessageBox to display " user entered " or untrusted data , use of this method with
* < code > encodeText < / code > set to < code > false < / code > may result in security issues .
* @ param message the message to add
* @ param encodeText true to encode the message , false to leave it unencoded . */
public void addMessage ( final Message message , final boolean encodeText ) { } } | switch ( message . getType ( ) ) { case Message . SUCCESS_MESSAGE : addMessage ( successMessages , message , encodeText ) ; break ; case Message . INFO_MESSAGE : addMessage ( infoMessages , message , encodeText ) ; break ; case Message . WARNING_MESSAGE : addMessage ( warningMessages , message , encodeText ) ; break ; case Message . ERROR_MESSAGE : addMessage ( errorMessages , message , encodeText ) ; break ; default : LOG . warn ( "Unknown message type: " + message . getType ( ) ) ; } |
public class ClassFileWriter { /** * The effect on the operand stack of a given opcode . */
private static int stackChange ( int opcode ) { } } | // For INVOKE . . . accounts only for popping this ( unless static ) ,
// ignoring parameters and return type
switch ( opcode ) { case ByteCode . DASTORE : case ByteCode . LASTORE : return - 4 ; case ByteCode . AASTORE : case ByteCode . BASTORE : case ByteCode . CASTORE : case ByteCode . DCMPG : case ByteCode . DCMPL : case ByteCode . FASTORE : case ByteCode . IASTORE : case ByteCode . LCMP : case ByteCode . SASTORE : return - 3 ; case ByteCode . DADD : case ByteCode . DDIV : case ByteCode . DMUL : case ByteCode . DREM : case ByteCode . DRETURN : case ByteCode . DSTORE : case ByteCode . DSTORE_0 : case ByteCode . DSTORE_1 : case ByteCode . DSTORE_2 : case ByteCode . DSTORE_3 : case ByteCode . DSUB : case ByteCode . IF_ACMPEQ : case ByteCode . IF_ACMPNE : case ByteCode . IF_ICMPEQ : case ByteCode . IF_ICMPGE : case ByteCode . IF_ICMPGT : case ByteCode . IF_ICMPLE : case ByteCode . IF_ICMPLT : case ByteCode . IF_ICMPNE : case ByteCode . LADD : case ByteCode . LAND : case ByteCode . LDIV : case ByteCode . LMUL : case ByteCode . LOR : case ByteCode . LREM : case ByteCode . LRETURN : case ByteCode . LSTORE : case ByteCode . LSTORE_0 : case ByteCode . LSTORE_1 : case ByteCode . LSTORE_2 : case ByteCode . LSTORE_3 : case ByteCode . LSUB : case ByteCode . LXOR : case ByteCode . POP2 : return - 2 ; case ByteCode . AALOAD : case ByteCode . ARETURN : case ByteCode . ASTORE : case ByteCode . ASTORE_0 : case ByteCode . ASTORE_1 : case ByteCode . ASTORE_2 : case ByteCode . ASTORE_3 : case ByteCode . ATHROW : case ByteCode . BALOAD : case ByteCode . CALOAD : case ByteCode . D2F : case ByteCode . D2I : case ByteCode . FADD : case ByteCode . FALOAD : case ByteCode . FCMPG : case ByteCode . FCMPL : case ByteCode . FDIV : case ByteCode . FMUL : case ByteCode . FREM : case ByteCode . FRETURN : case ByteCode . FSTORE : case ByteCode . FSTORE_0 : case ByteCode . FSTORE_1 : case ByteCode . FSTORE_2 : case ByteCode . FSTORE_3 : case ByteCode . FSUB : case ByteCode . GETFIELD : case ByteCode . IADD : case ByteCode . IALOAD : case ByteCode . IAND : case ByteCode . IDIV : case ByteCode . IFEQ : case ByteCode . IFGE : case ByteCode . IFGT : case ByteCode . IFLE : case ByteCode . IFLT : case ByteCode . IFNE : case ByteCode . IFNONNULL : case ByteCode . IFNULL : case ByteCode . IMUL : case ByteCode . INVOKEINTERFACE : case ByteCode . INVOKESPECIAL : // but needs to account for
case ByteCode . INVOKEVIRTUAL : // pops ' this ' ( unless static )
case ByteCode . IOR : case ByteCode . IREM : case ByteCode . IRETURN : case ByteCode . ISHL : case ByteCode . ISHR : case ByteCode . ISTORE : case ByteCode . ISTORE_0 : case ByteCode . ISTORE_1 : case ByteCode . ISTORE_2 : case ByteCode . ISTORE_3 : case ByteCode . ISUB : case ByteCode . IUSHR : case ByteCode . IXOR : case ByteCode . L2F : case ByteCode . L2I : case ByteCode . LOOKUPSWITCH : case ByteCode . LSHL : case ByteCode . LSHR : case ByteCode . LUSHR : case ByteCode . MONITORENTER : case ByteCode . MONITOREXIT : case ByteCode . POP : case ByteCode . PUTFIELD : case ByteCode . SALOAD : case ByteCode . TABLESWITCH : return - 1 ; case ByteCode . ANEWARRAY : case ByteCode . ARRAYLENGTH : case ByteCode . BREAKPOINT : case ByteCode . CHECKCAST : case ByteCode . D2L : case ByteCode . DALOAD : case ByteCode . DNEG : case ByteCode . F2I : case ByteCode . FNEG : case ByteCode . GETSTATIC : case ByteCode . GOTO : case ByteCode . GOTO_W : case ByteCode . I2B : case ByteCode . I2C : case ByteCode . I2F : case ByteCode . I2S : case ByteCode . IINC : case ByteCode . IMPDEP1 : case ByteCode . IMPDEP2 : case ByteCode . INEG : case ByteCode . INSTANCEOF : case ByteCode . INVOKESTATIC : case ByteCode . INVOKEDYNAMIC : case ByteCode . L2D : case ByteCode . LALOAD : case ByteCode . LNEG : case ByteCode . NEWARRAY : case ByteCode . NOP : case ByteCode . PUTSTATIC : case ByteCode . RET : case ByteCode . RETURN : case ByteCode . SWAP : case ByteCode . WIDE : return 0 ; case ByteCode . ACONST_NULL : case ByteCode . ALOAD : case ByteCode . ALOAD_0 : case ByteCode . ALOAD_1 : case ByteCode . ALOAD_2 : case ByteCode . ALOAD_3 : case ByteCode . BIPUSH : case ByteCode . DUP : case ByteCode . DUP_X1 : case ByteCode . DUP_X2 : case ByteCode . F2D : case ByteCode . F2L : case ByteCode . FCONST_0 : case ByteCode . FCONST_1 : case ByteCode . FCONST_2 : case ByteCode . FLOAD : case ByteCode . FLOAD_0 : case ByteCode . FLOAD_1 : case ByteCode . FLOAD_2 : case ByteCode . FLOAD_3 : case ByteCode . I2D : case ByteCode . I2L : case ByteCode . ICONST_0 : case ByteCode . ICONST_1 : case ByteCode . ICONST_2 : case ByteCode . ICONST_3 : case ByteCode . ICONST_4 : case ByteCode . ICONST_5 : case ByteCode . ICONST_M1 : case ByteCode . ILOAD : case ByteCode . ILOAD_0 : case ByteCode . ILOAD_1 : case ByteCode . ILOAD_2 : case ByteCode . ILOAD_3 : case ByteCode . JSR : case ByteCode . JSR_W : case ByteCode . LDC : case ByteCode . LDC_W : case ByteCode . MULTIANEWARRAY : case ByteCode . NEW : case ByteCode . SIPUSH : return 1 ; case ByteCode . DCONST_0 : case ByteCode . DCONST_1 : case ByteCode . DLOAD : case ByteCode . DLOAD_0 : case ByteCode . DLOAD_1 : case ByteCode . DLOAD_2 : case ByteCode . DLOAD_3 : case ByteCode . DUP2 : case ByteCode . DUP2_X1 : case ByteCode . DUP2_X2 : case ByteCode . LCONST_0 : case ByteCode . LCONST_1 : case ByteCode . LDC2_W : case ByteCode . LLOAD : case ByteCode . LLOAD_0 : case ByteCode . LLOAD_1 : case ByteCode . LLOAD_2 : case ByteCode . LLOAD_3 : return 2 ; } throw new IllegalArgumentException ( "Bad opcode: " + opcode ) ; |
public class AssetsSingleton { /** * Retrieve the list of all asset currently available on the platform .
* @ param useCache whether or not we can returned a cached version of the result . This cache may contain
* invalidated data or may not contain all available assets .
* @ return the list of assets */
@ Override public Collection < Asset < ? > > assets ( boolean useCache ) { } } | if ( useCache && ! cache . isEmpty ( ) ) { return new ArrayList < > ( cache ) ; } return assets ( ) ; |
public class ControlMessageFactoryImpl { /** * Create a new , empty ControlDurableConfirm Message
* @ return The new ControlDurableConfirm
* @ exception MessageCreateFailedException Thrown if such a message can not be created */
public final ControlDurableConfirm createNewControlDurableConfirm ( ) throws MessageCreateFailedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createNewControlDurableConfirm" ) ; ControlDurableConfirm msg = null ; try { msg = new ControlDurableConfirmImpl ( MfpConstants . CONSTRUCTOR_NO_OP ) ; } catch ( MessageDecodeFailedException e ) { /* No need to FFDC this as JsMsgObject will already have done so */
// No FFDC code needed
throw new MessageCreateFailedException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createNewControlDurableConfirm" ) ; return msg ; |
public class Envelope2D { /** * Sets the envelope from the array of points . The envelope will be set to
* empty if the array is null .
* @ param points The points to set the envelope from . No element in the array can be null . */
public void setFromPoints ( Point2D [ ] points ) { } } | if ( points == null || points . length == 0 ) { setEmpty ( ) ; return ; } Point2D pt = points [ 0 ] ; setCoords ( pt . x , pt . y ) ; for ( int i = 1 ; i < points . length ; i ++ ) { Point2D pt2d = points [ i ] ; mergeNE ( pt2d . x , pt2d . y ) ; } |
public class CachingTemplate { /** * Factory method used to construct a new instance of the { @ link CachingTemplate }
* initialized with the given { @ link Cache } .
* @ param < KEY > { @ link Class type } of the { @ link Cache } entry key .
* @ param < VALUE > { @ link Class type } of the { @ link Cache } entry value .
* @ param cache { @ link Cache } used to initialize the new { @ link CachingTemplate } .
* @ return a new { @ link CachingTemplate } initialized with the given { @ link Cache } .
* @ throws IllegalArgumentException if the { @ link Cache } is { @ literal null } .
* @ see org . cp . elements . data . caching . Cache
* @ see # CachingTemplate ( Cache ) */
public static < KEY extends Comparable < KEY > , VALUE > CachingTemplate < KEY , VALUE > with ( Cache < KEY , VALUE > cache ) { } } | return new CachingTemplate < > ( cache ) ; |
public class CachingTemplate { /** * This caching data access operation invokes the supplied { @ link Supplier cacheable operation } and then clears
* the contents of the entire { @ link Cache } , but only if the { @ link Supplier cacheable operation } completes
* successfully .
* @ param < T > { @ link Class type } of the return { @ link VALUE value } .
* @ param cacheableOperation { @ link Supplier } used to compute or load a { @ link VALUE value } .
* @ return the { @ link VALUE result } of the { @ link Supplier cacheable operation } .
* @ throws IllegalArgumentException if the { @ link Supplier } is { @ literal null } .
* @ see java . util . function . Supplier
* @ see # getCache ( )
* @ see # getLock ( ) */
@ SuppressWarnings ( "unchecked" ) public < T extends VALUE > T withCacheClear ( Supplier < VALUE > cacheableOperation ) { } } | Assert . notNull ( cacheableOperation , "Supplier is required" ) ; VALUE returnValue = cacheableOperation . get ( ) ; clear ( getLock ( ) ) ; return ( T ) returnValue ; |
public class Scope { /** * { @ inheritDoc } */
public Set < IConnection > getClientConnections ( ) { } } | Set < IConnection > result = new HashSet < IConnection > ( 3 ) ; log . debug ( "Client count: {}" , clients . size ( ) ) ; for ( IClient cli : clients ) { Set < IConnection > set = cli . getConnections ( ) ; log . debug ( "Client connection count: {}" , set . size ( ) ) ; if ( set . size ( ) > 1 ) { log . warn ( "Client connections exceeded expected single count; size: {}" , set . size ( ) ) ; } for ( IConnection conn : set ) { result . add ( conn ) ; } } return result ; |
public class BeanContextSupport { /** * ( non - Javadoc )
* @ see com . googlecode . openbeans . beancontext . BeanContext # getResourceAsStream ( java . lang . String , com . googlecode . openbeans . beancontext . BeanContextChild ) */
public InputStream getResourceAsStream ( String resourceName , BeanContextChild child ) throws IllegalArgumentException { } } | if ( resourceName == null || child == null ) { throw new NullPointerException ( ) ; } if ( ! contains ( child ) ) { throw new IllegalArgumentException ( Messages . getString ( "beans.6D" ) ) ; } return ClassLoader . getSystemResourceAsStream ( resourceName ) ; |
public class DeviceProxy { public List < PipeInfo > getPipeConfig ( List < String > pipeNames ) throws DevFailed { } } | return deviceProxyDAO . getPipeConfig ( this , pipeNames ) ; |
public class Tsurgeon { /** * hack - in field for seeing whether there was a match . */
public static Tree processPatternsOnTree ( List < Pair < TregexPattern , TsurgeonPattern > > ops , Tree t ) { } } | matchedOnTree = false ; for ( Pair < TregexPattern , TsurgeonPattern > op : ops ) { try { if ( DEBUG ) { System . err . println ( "Running pattern " + op . first ( ) ) ; } TregexMatcher m = op . first ( ) . matcher ( t ) ; while ( m . find ( ) ) { matchedOnTree = true ; t = op . second ( ) . evaluate ( t , m ) ; if ( t == null ) { return null ; } m = op . first ( ) . matcher ( t ) ; } } catch ( NullPointerException npe ) { throw new RuntimeException ( "Tsurgeon.processPatternsOnTree failed to match label for pattern: " + op . first ( ) + ", " + op . second ( ) , npe ) ; } } return t ; |
public class GBSNode { /** * Return true if the node is an inner node ( not a leaf node ) . */
public boolean isInnerNode ( ) { } } | boolean inner = true ; if ( ( _leftChild == null ) || ( _rightChild == null ) ) inner = false ; return inner ; |
public class Predicate { /** * Returns the sub - predicate that applies to the specified schema .
* @ param sch
* the schema
* @ return the sub - predicate applying to the schema */
public Predicate selectPredicate ( Schema sch ) { } } | Predicate result = new Predicate ( ) ; for ( Term t : terms ) if ( t . isApplicableTo ( sch ) ) result . terms . add ( t ) ; if ( result . terms . size ( ) == 0 ) return null ; else return result ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractGeneralOperationParameterRefType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractGeneralOperationParameterRefType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "usesParameter" ) public JAXBElement < AbstractGeneralOperationParameterRefType > createUsesParameter ( AbstractGeneralOperationParameterRefType value ) { } } | return new JAXBElement < AbstractGeneralOperationParameterRefType > ( _UsesParameter_QNAME , AbstractGeneralOperationParameterRefType . class , null , value ) ; |
public class AbstractXbaseSemanticSequencer { /** * Contexts :
* XExpression returns XPostfixOperation
* XAssignment returns XPostfixOperation
* XAssignment . XBinaryOperation _ 1_1_0_0_0 returns XPostfixOperation
* XOrExpression returns XPostfixOperation
* XOrExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XAndExpression returns XPostfixOperation
* XAndExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XEqualityExpression returns XPostfixOperation
* XEqualityExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XRelationalExpression returns XPostfixOperation
* XRelationalExpression . XInstanceOfExpression _ 1_0_0_0_0 returns XPostfixOperation
* XRelationalExpression . XBinaryOperation _ 1_1_0_0_0 returns XPostfixOperation
* XOtherOperatorExpression returns XPostfixOperation
* XOtherOperatorExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XAdditiveExpression returns XPostfixOperation
* XAdditiveExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XMultiplicativeExpression returns XPostfixOperation
* XMultiplicativeExpression . XBinaryOperation _ 1_0_0_0 returns XPostfixOperation
* XUnaryOperation returns XPostfixOperation
* XCastedExpression returns XPostfixOperation
* XCastedExpression . XCastedExpression _ 1_0_0_0 returns XPostfixOperation
* XPostfixOperation returns XPostfixOperation
* XPostfixOperation . XPostfixOperation _ 1_0_0 returns XPostfixOperation
* XMemberFeatureCall returns XPostfixOperation
* XMemberFeatureCall . XAssignment _ 1_0_0_0_0 returns XPostfixOperation
* XMemberFeatureCall . XMemberFeatureCall _ 1_1_0_0_0 returns XPostfixOperation
* XPrimaryExpression returns XPostfixOperation
* XParenthesizedExpression returns XPostfixOperation
* XExpressionOrVarDeclaration returns XPostfixOperation
* Constraint :
* ( operand = XPostfixOperation _ XPostfixOperation _ 1_0_0 feature = [ JvmIdentifiableElement | OpPostfix ] ) */
protected void sequence_XPostfixOperation ( ISerializationContext context , XPostfixOperation semanticObject ) { } } | if ( errorAcceptor != null ) { if ( transientValues . isValueTransient ( semanticObject , XbasePackage . Literals . XPOSTFIX_OPERATION__OPERAND ) == ValueTransient . YES ) errorAcceptor . accept ( diagnosticProvider . createFeatureValueMissing ( semanticObject , XbasePackage . Literals . XPOSTFIX_OPERATION__OPERAND ) ) ; if ( transientValues . isValueTransient ( semanticObject , XbasePackage . Literals . XABSTRACT_FEATURE_CALL__FEATURE ) == ValueTransient . YES ) errorAcceptor . accept ( diagnosticProvider . createFeatureValueMissing ( semanticObject , XbasePackage . Literals . XABSTRACT_FEATURE_CALL__FEATURE ) ) ; } SequenceFeeder feeder = createSequencerFeeder ( context , semanticObject ) ; feeder . accept ( grammarAccess . getXPostfixOperationAccess ( ) . getXPostfixOperationOperandAction_1_0_0 ( ) , semanticObject . getOperand ( ) ) ; feeder . accept ( grammarAccess . getXPostfixOperationAccess ( ) . getFeatureJvmIdentifiableElementOpPostfixParserRuleCall_1_0_1_0_1 ( ) , semanticObject . eGet ( XbasePackage . Literals . XABSTRACT_FEATURE_CALL__FEATURE , false ) ) ; feeder . finish ( ) ; |
public class MetaModels { /** * An edge is a table in a many to many relationship that is not a join .
* @ param join join table
* @ return edges for a join . */
protected List < String > getEdges ( String join ) { } } | List < String > results = new ArrayList < > ( ) ; for ( Many2ManyAssociation a : many2ManyAssociations ) { if ( a . getJoin ( ) . equalsIgnoreCase ( join ) ) { results . add ( getMetaModel ( a . getSourceClass ( ) ) . getTableName ( ) ) ; results . add ( getMetaModel ( a . getTargetClass ( ) ) . getTableName ( ) ) ; } } return results ; |
public class LUDecomposition { /** * Return upper triangular factor
* @ return U */
public Matrix getU ( ) { } } | Matrix X = new Matrix ( n , n ) ; double [ ] [ ] U = X . getArray ( ) ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { if ( i <= j ) { U [ i ] [ j ] = LU [ i ] [ j ] ; } else { U [ i ] [ j ] = 0.0 ; } } } return X ; |
public class NonMonotonicLaxImmutableMapBuilder { /** * See { @ link ImmutableMap . Builder # orderEntriesByValue ( Comparator ) } */
@ Override public LaxImmutableMapBuilder < K , V > orderEntriesByValue ( Comparator < ? super V > valueComparator ) { } } | this . immutableMapEntryOrdering = checkNotNull ( valueComparator ) ; return this ; |
public class IgnoredNonAffectedServerGroupsUtil { /** * Used by the slave host when creating the host info dmr sent across to the DC during the registration process
* @ param ignoreUnaffectedServerGroups whether the slave host is set up to ignore config for server groups it does not have servers for
* @ param hostModel the resource containing the host model
* @ param model the dmr sent across to theDC
* @ return the modified dmr */
public static ModelNode addCurrentServerGroupsToHostInfoModel ( boolean ignoreUnaffectedServerGroups , Resource hostModel , ModelNode model ) { } } | if ( ! ignoreUnaffectedServerGroups ) { return model ; } model . get ( IGNORE_UNUSED_CONFIG ) . set ( ignoreUnaffectedServerGroups ) ; addServerGroupsToModel ( hostModel , model ) ; return model ; |
public class Iterate { /** * Iterates over a collection passing each element and the current relative int index to the specified instance of
* ObjectIntProcedure .
* Example using a Java 8 lambda expression :
* < pre >
* Iterate . < b > forEachWithIndex < / b > ( people , ( Person person , int index ) - > LOGGER . info ( " Index : " + index + " person : " + person . getName ( ) ) ) ;
* < / pre >
* Example using anonymous inner class :
* < pre >
* Iterate . < b > forEachWithIndex < / b > ( people , new ObjectIntProcedure < Person > ( )
* public void value ( Person person , int index )
* LOGGER . info ( " Index : " + index + " person : " + person . getName ( ) ) ;
* < / pre > */
public static < T > void forEachWithIndex ( Iterable < T > iterable , ObjectIntProcedure < ? super T > objectIntProcedure ) { } } | if ( iterable instanceof InternalIterable ) { ( ( InternalIterable < T > ) iterable ) . forEachWithIndex ( objectIntProcedure ) ; } else if ( iterable instanceof ArrayList ) { ArrayListIterate . forEachWithIndex ( ( ArrayList < T > ) iterable , objectIntProcedure ) ; } else if ( iterable instanceof List ) { ListIterate . forEachWithIndex ( ( List < T > ) iterable , objectIntProcedure ) ; } else if ( iterable != null ) { IterableIterate . forEachWithIndex ( iterable , objectIntProcedure ) ; } else { throw new IllegalArgumentException ( "Cannot perform a forEachWithIndex on null" ) ; } |
public class GraphVizHelper { /** * Get the graph in a simple DOT notation suitable for GraphViz
* ( http : / / www . graphviz . org ) . The DOT specs can be found at
* http : / / www . graphviz . org / content / dot - language < br >
* The default file encoding for GraphViz 2.28 is UTF - 8!
* @ param aGraph
* The graph to be converted . May not be < code > null < / code > .
* @ param sNodeLabelAttr
* The name of the attribute to be used for node labels . May be
* < code > null < / code > to use the node ID as the label .
* @ param sRelationLabelAttr
* The name of the attribute to be used for relation labels . May be
* < code > null < / code > to use no relation label .
* @ return The string representation to be used as input for DOT .
* @ param < N >
* Graph node type
* @ param < R >
* Graph relation type */
@ Nonnull public static < N extends IDirectedGraphNode < N , R > , R extends IDirectedGraphRelation < N , R > > String getAsGraphVizDot ( @ Nonnull final IDirectedGraph < N , R > aGraph , @ Nullable final String sNodeLabelAttr , @ Nullable final String sRelationLabelAttr ) { } } | ValueEnforcer . notNull ( aGraph , "Graph" ) ; final StringBuilder aSB = new StringBuilder ( ) ; // It ' s a directed graph
aSB . append ( "digraph " ) . append ( aGraph . getID ( ) ) . append ( "{\n" ) ; aSB . append ( "node[shape=box];" ) ; aGraph . forEachNode ( aGraphNode -> { aSB . append ( aGraphNode . getID ( ) ) ; if ( StringHelper . hasText ( sNodeLabelAttr ) ) { final String sLabel = aGraphNode . attrs ( ) . getAsString ( sNodeLabelAttr ) ; aSB . append ( "[label=<" ) . append ( XMLMaskHelper . getMaskedXMLText ( EXMLSerializeVersion . XML_10 , EXMLCharMode . ELEMENT_NAME , EXMLIncorrectCharacterHandling . DEFAULT , sLabel ) ) . append ( ">]" ) ; } aSB . append ( ';' ) ; } ) ; aSB . append ( '\n' ) ; aGraph . forEachRelation ( aGraphRelation -> { aSB . append ( aGraphRelation . getFromID ( ) ) . append ( "->" ) . append ( aGraphRelation . getToID ( ) ) ; if ( StringHelper . hasText ( sRelationLabelAttr ) ) { final String sLabel = aGraphRelation . attrs ( ) . getAsString ( sRelationLabelAttr ) ; aSB . append ( "[label=<" ) . append ( XMLMaskHelper . getMaskedXMLText ( EXMLSerializeVersion . XML_10 , EXMLCharMode . ELEMENT_NAME , EXMLIncorrectCharacterHandling . DEFAULT , sLabel ) ) . append ( ">]" ) ; } aSB . append ( ";\n" ) ; } ) ; aSB . append ( "overlap=false;\n" ) ; aSB . append ( '}' ) ; return aSB . toString ( ) ; |
public class TranslationRequestDataChangeSet { /** * Sets a map containing target languages indexed by bundle IDs . This method adopts
* the input map without creating a safe copy .
* @ param targetLanguagesByBundle A map containing target languages indexed by
* bundle IDs .
* @ return This object .
* @ throws NullPointerException When the input < code > targetLanguagesByBundle < / code > is null . */
public TranslationRequestDataChangeSet setTargetLanguagesByBundle ( Map < String , Set < String > > targetLanguagesByBundle ) { } } | // TODO - check empty map ?
if ( targetLanguagesByBundle == null ) { throw new NullPointerException ( "The input map is null." ) ; } this . targetLanguagesByBundle = targetLanguagesByBundle ; return this ; |
public class _Private_IonTextWriterBuilder { private _Private_IonTextWriterBuilder fillDefaults ( ) { } } | // Ensure that we don ' t modify the user ' s builder .
IonTextWriterBuilder b = copy ( ) ; if ( b . getCatalog ( ) == null ) { b . setCatalog ( new SimpleCatalog ( ) ) ; } if ( b . getCharset ( ) == null ) { b . setCharset ( UTF8 ) ; } return ( _Private_IonTextWriterBuilder ) b . immutable ( ) ; |
public class UserMappingTable { /** * Get the required columns
* @ return required columns */
public static List < String > requiredColumns ( ) { } } | List < String > requiredColumns = new ArrayList < > ( ) ; requiredColumns . add ( COLUMN_BASE_ID ) ; requiredColumns . add ( COLUMN_RELATED_ID ) ; return requiredColumns ; |
public class CSIv2SubsystemFactory { /** * { @ inheritDoc } */
@ Override public void addTargetORBInitArgs ( Map < String , Object > targetProperties , List < String > args ) { } } | args . add ( "-IIOPconnectionHelper" ) ; args . add ( SocketFactory . class . getName ( ) ) ; |
public class Attributes { /** * Remove an attribute by key . < b > Case insensitive . < / b >
* @ param key attribute key to remove */
public void removeIgnoreCase ( String key ) { } } | int i = indexOfKeyIgnoreCase ( key ) ; if ( i != NotFound ) remove ( i ) ; |
public class AbstractValidator { /** * Creates the error list .
* @ param editor the editor
* @ param value the value
* @ param messageKey the message key
* @ return the list */
public List < EditorError > createErrorList ( Editor < T > editor , T value , String messageKey ) { } } | List < EditorError > result = new ArrayList < EditorError > ( ) ; result . add ( new BasicEditorError ( editor , value , getInvalidMessage ( messageKey ) ) ) ; return result ; |
public class JvmTypesBuilder { /** * Produces an inferred type which will be resolved on demand . It should not be attempted to resolve
* this type during the model inference .
* @ return an inferred type . */
public JvmTypeReference inferredType ( ) { } } | XComputedTypeReference result = xtypesFactory . createXComputedTypeReference ( ) ; result . setTypeProvider ( new InferredTypeIndicator ( null ) ) ; return result ; |
public class ResourceManager { @ Override public CompletableFuture < RegistrationResponse > registerJobManager ( final JobMasterId jobMasterId , final ResourceID jobManagerResourceId , final String jobManagerAddress , final JobID jobId , final Time timeout ) { } } | checkNotNull ( jobMasterId ) ; checkNotNull ( jobManagerResourceId ) ; checkNotNull ( jobManagerAddress ) ; checkNotNull ( jobId ) ; if ( ! jobLeaderIdService . containsJob ( jobId ) ) { try { jobLeaderIdService . addJob ( jobId ) ; } catch ( Exception e ) { ResourceManagerException exception = new ResourceManagerException ( "Could not add the job " + jobId + " to the job id leader service." , e ) ; onFatalError ( exception ) ; log . error ( "Could not add job {} to job leader id service." , jobId , e ) ; return FutureUtils . completedExceptionally ( exception ) ; } } log . info ( "Registering job manager {}@{} for job {}." , jobMasterId , jobManagerAddress , jobId ) ; CompletableFuture < JobMasterId > jobMasterIdFuture ; try { jobMasterIdFuture = jobLeaderIdService . getLeaderId ( jobId ) ; } catch ( Exception e ) { // we cannot check the job leader id so let ' s fail
// TODO : Maybe it ' s also ok to skip this check in case that we cannot check the leader id
ResourceManagerException exception = new ResourceManagerException ( "Cannot obtain the " + "job leader id future to verify the correct job leader." , e ) ; onFatalError ( exception ) ; log . debug ( "Could not obtain the job leader id future to verify the correct job leader." ) ; return FutureUtils . completedExceptionally ( exception ) ; } CompletableFuture < JobMasterGateway > jobMasterGatewayFuture = getRpcService ( ) . connect ( jobManagerAddress , jobMasterId , JobMasterGateway . class ) ; CompletableFuture < RegistrationResponse > registrationResponseFuture = jobMasterGatewayFuture . thenCombineAsync ( jobMasterIdFuture , ( JobMasterGateway jobMasterGateway , JobMasterId leadingJobMasterId ) -> { if ( Objects . equals ( leadingJobMasterId , jobMasterId ) ) { return registerJobMasterInternal ( jobMasterGateway , jobId , jobManagerAddress , jobManagerResourceId ) ; } else { final String declineMessage = String . format ( "The leading JobMaster id %s did not match the received JobMaster id %s. " + "This indicates that a JobMaster leader change has happened." , leadingJobMasterId , jobMasterId ) ; log . debug ( declineMessage ) ; return new RegistrationResponse . Decline ( declineMessage ) ; } } , getMainThreadExecutor ( ) ) ; // handle exceptions which might have occurred in one of the futures inputs of combine
return registrationResponseFuture . handleAsync ( ( RegistrationResponse registrationResponse , Throwable throwable ) -> { if ( throwable != null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Registration of job manager {}@{} failed." , jobMasterId , jobManagerAddress , throwable ) ; } else { log . info ( "Registration of job manager {}@{} failed." , jobMasterId , jobManagerAddress ) ; } return new RegistrationResponse . Decline ( throwable . getMessage ( ) ) ; } else { return registrationResponse ; } } , getRpcService ( ) . getExecutor ( ) ) ; |
public class AllureFileUtils { /** * Returns list of files matches filters in specified directories
* @ param directories which will using to find files
* @ param fileFilter file filter
* @ param dirFilter directory filter
* @ return list of files matches filters in specified directories */
public static List < File > listFiles ( File [ ] directories , IOFileFilter fileFilter , IOFileFilter dirFilter ) { } } | List < File > files = new ArrayList < > ( ) ; for ( File directory : directories ) { if ( ! directory . isDirectory ( ) ) { continue ; } Collection < File > filesInDirectory = FileUtils . listFiles ( directory , fileFilter , dirFilter ) ; files . addAll ( filesInDirectory ) ; } return files ; |
public class DatastreamFilenameHelper { /** * Get datastream filename as defined in RELS - INT
* @ param context
* @ param pid
* @ param dsid
* @ param MIMETYPE
* @ return
* @ throws Exception */
private final String getFilenameFromRels ( Context context , String pid , String dsid , String MIMETYPE ) throws Exception { } } | String filename = "" ; // read rels directly from RELS - INT - can ' t use Management . getRelationships as this requires auth
DOReader reader = m_doManager . getReader ( false , context , pid ) ; Datastream relsInt = reader . GetDatastream ( "RELS-INT" , null ) ; if ( relsInt == null ) return "" ; // try {
// relsInt = m _ apiAService . getDatastreamDissemination ( context , pid , " RELS - INT " , null ) ;
// } catch ( DatastreamNotFoundException e ) {
// return " " ;
Set < RelationshipTuple > relsIntTuples = RDFRelationshipReader . readRelationships ( relsInt . getContentStream ( ) ) ; if ( relsIntTuples . size ( ) == 0 ) return "" ; // find the tuple specifying the filename
int matchingTuples = 0 ; String dsSubject = Constants . FEDORA . uri + pid + "/" + dsid ; for ( RelationshipTuple tuple : relsIntTuples ) { if ( tuple . subject . equals ( dsSubject ) && tuple . predicate . equals ( FILENAME_REL ) ) { // use the first found relationship by default ( report warning later if there are more )
if ( matchingTuples == 0 ) { if ( tuple . isLiteral ) { filename = tuple . object ; } else { logger . warn ( "Object " + pid + " datastream " + dsid + " specifies a filename which is not a literal in RELS-INT" ) ; filename = "" ; } } matchingTuples ++ ; } } if ( matchingTuples > 1 ) { logger . warn ( "Object " + pid + " datastream " + dsid + " specifies more than one filename in RELS-INT." ) ; } return filename ; |
public class CommonOps_DDRM { /** * Creates a new rectangular matrix whose diagonal elements are specified by diagEl and all
* the other elements are zero . < br >
* < br >
* a < sub > ij < / sub > = 0 if i & le ; j < br >
* a < sub > ij < / sub > = diag [ i ] if i = j < br >
* @ see # diag
* @ param numRows Number of rows in the matrix .
* @ param numCols Number of columns in the matrix .
* @ param diagEl Contains the values of the diagonal elements of the resulting matrix .
* @ return A new matrix . */
public static DMatrixRMaj diagR ( int numRows , int numCols , double ... diagEl ) { } } | DMatrixRMaj ret = new DMatrixRMaj ( numRows , numCols ) ; int o = Math . min ( numRows , numCols ) ; for ( int i = 0 ; i < o ; i ++ ) { ret . set ( i , i , diagEl [ i ] ) ; } return ret ; |
public class HintRule { /** * Adds a given product to the list of evidence to add when matched .
* @ param source the source of the evidence
* @ param name the name of the evidence
* @ param value the value of the evidence
* @ param confidence the confidence of the evidence */
public void addAddProduct ( String source , String name , String value , Confidence confidence ) { } } | addProduct . add ( new Evidence ( source , name , value , confidence ) ) ; |
public class CompressedDataOutputStream { /** * Writes a string of any length . */
public void writeLongUTF ( String str ) throws IOException { } } | int length = str . length ( ) ; writeCompressedInt ( length ) ; for ( int position = 0 ; position < length ; position += 20480 ) { int blockLength = length - position ; if ( blockLength > 20480 ) blockLength = 20480 ; String block = str . substring ( position , position + blockLength ) ; writeUTF ( block ) ; } |
public class CheckBoxPainter { /** * { @ inheritDoc } */
@ Override protected void doPaint ( Graphics2D g , JComponent c , int width , int height , Object [ ] extendedCacheKeys ) { } } | g . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; int size = width < height ? width : height ; int x = ( width - size ) / 2 ; int y = ( height - size ) / 2 ; Shape s ; if ( focused ) { boolean useToolBarFocus = isInToolBar ( c ) ; s = shapeGenerator . createRoundRectangle ( x , y , size , size , CornerSize . CHECKBOX_OUTER_FOCUS ) ; g . setPaint ( getFocusPaint ( s , FocusType . OUTER_FOCUS , useToolBarFocus ) ) ; g . fill ( s ) ; s = shapeGenerator . createRoundRectangle ( x + 1 , y + 1 , size - 2 , size - 2 , CornerSize . CHECKBOX_INNER_FOCUS ) ; g . setPaint ( getFocusPaint ( s , FocusType . INNER_FOCUS , useToolBarFocus ) ) ; g . fill ( s ) ; } s = shapeGenerator . createRoundRectangle ( x + 2 , y + 2 , size - 4 , size - 4 , CornerSize . CHECKBOX_BORDER ) ; if ( ! focused ) { dropShadow . fill ( g , s ) ; } g . setPaint ( getCommonBorderPaint ( s , type ) ) ; g . fill ( s ) ; s = shapeGenerator . createRoundRectangle ( x + 3 , y + 3 , size - 6 , size - 6 , CornerSize . CHECKBOX_INTERIOR ) ; g . setPaint ( getCommonInteriorPaint ( s , type ) ) ; g . fill ( s ) ; if ( selected ) { s = createCheckMark ( x , y , size ) ; g . setPaint ( getCheckBoxBulletPaint ( s , type ) ) ; g . fill ( s ) ; } |
public class SimpleDependencyFilenameStrategy { /** * { @ inheritDoc } */
public String getDependencyFileBasename ( Artifact artifact , Boolean outputJarVersion , Boolean useUniqueVersions ) { } } | String filename = artifact . getArtifactId ( ) ; if ( outputJarVersion != null ) { if ( outputJarVersion ) { filename += "__V" ; } else { filename += "-" ; } filename += getDependencyFileVersion ( artifact , useUniqueVersions ) ; } if ( StringUtils . isNotEmpty ( artifact . getClassifier ( ) ) ) { filename += "-" + artifact . getClassifier ( ) ; } return filename ; |
public class Bytes { /** * Creates a Bytes object by copying the data of the given byte array */
public static final Bytes of ( byte [ ] array ) { } } | Objects . requireNonNull ( array ) ; if ( array . length == 0 ) { return EMPTY ; } byte [ ] copy = new byte [ array . length ] ; System . arraycopy ( array , 0 , copy , 0 , array . length ) ; return new Bytes ( copy ) ; |
public class ExportSnapshotRecordMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ExportSnapshotRecord exportSnapshotRecord , ProtocolMarshaller protocolMarshaller ) { } } | if ( exportSnapshotRecord == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( exportSnapshotRecord . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getState ( ) , STATE_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getSourceInfo ( ) , SOURCEINFO_BINDING ) ; protocolMarshaller . marshall ( exportSnapshotRecord . getDestinationInfo ( ) , DESTINATIONINFO_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class HttpFields { /** * Sets the value of a date field .
* @ param name the field name
* @ param date the field date value */
public void putDateField ( String name , long date ) { } } | if ( _dateBuffer == null ) { _dateBuffer = new StringBuffer ( 32 ) ; _calendar = new HttpCal ( ) ; } _dateBuffer . setLength ( 0 ) ; _calendar . setTimeInMillis ( date ) ; formatDate ( _dateBuffer , _calendar , false ) ; put ( name , _dateBuffer . toString ( ) ) ; |
public class UnionSet { /** * Triggers combining . */
@ Override public boolean retainAll ( Collection < ? > c ) { } } | combine ( ) ; if ( combined == null ) combined = new HashSet < > ( ) ; return combined . retainAll ( c ) ; |
public class MapRankDouble { /** * sort the partitions by quick sort , and locate the target index */
private void locate ( double [ ] array , int left , int right , int index ) { } } | int mid = ( left + right ) / 2 ; // System . out . println ( left + " to " + right + " ( " + mid + " ) " ) ;
if ( right == left ) { // System . out . println ( " * " + array [ targetIndex ] ) ;
// result = array [ targetIndex ] ;
return ; } if ( left < right ) { double s = array [ mid ] ; int i = left - 1 ; int j = right + 1 ; while ( true ) { while ( array [ ++ i ] < s ) ; while ( array [ -- j ] > s ) ; if ( i >= j ) break ; swap ( array , i , j ) ; } // System . out . println ( " 2 parts : " + left + " - " + ( i - 1 ) + " and " + ( j + 1 ) + " - " + right ) ;
if ( i > index ) { // the target index in the left partition
// System . out . println ( " left partition " ) ;
locate ( array , left , i - 1 , index ) ; } else { // the target index in the right partition
// System . out . println ( " right partition " ) ;
locate ( array , j + 1 , right , index ) ; } } |
public class ChronoLocalDateTimeImpl { @ Override public long until ( Temporal endExclusive , TemporalUnit unit ) { } } | @ SuppressWarnings ( "unchecked" ) ChronoLocalDateTime < D > end = ( ChronoLocalDateTime < D > ) toLocalDate ( ) . getChronology ( ) . localDateTime ( endExclusive ) ; if ( unit instanceof ChronoUnit ) { ChronoUnit f = ( ChronoUnit ) unit ; if ( f . isTimeBased ( ) ) { long amount = end . getLong ( EPOCH_DAY ) - date . getLong ( EPOCH_DAY ) ; switch ( f ) { case NANOS : amount = Jdk8Methods . safeMultiply ( amount , NANOS_PER_DAY ) ; break ; case MICROS : amount = Jdk8Methods . safeMultiply ( amount , MICROS_PER_DAY ) ; break ; case MILLIS : amount = Jdk8Methods . safeMultiply ( amount , MILLIS_PER_DAY ) ; break ; case SECONDS : amount = Jdk8Methods . safeMultiply ( amount , SECONDS_PER_DAY ) ; break ; case MINUTES : amount = Jdk8Methods . safeMultiply ( amount , MINUTES_PER_DAY ) ; break ; case HOURS : amount = Jdk8Methods . safeMultiply ( amount , HOURS_PER_DAY ) ; break ; case HALF_DAYS : amount = Jdk8Methods . safeMultiply ( amount , 2 ) ; break ; } return Jdk8Methods . safeAdd ( amount , time . until ( end . toLocalTime ( ) , unit ) ) ; } ChronoLocalDate endDate = end . toLocalDate ( ) ; if ( end . toLocalTime ( ) . isBefore ( time ) ) { endDate = endDate . minus ( 1 , ChronoUnit . DAYS ) ; } return date . until ( endDate , unit ) ; } return unit . between ( this , end ) ; |
public class DistributedSortedMapType { /** * Returns a new distributed tree map type .
* @ param < K > the key type
* @ param < V > the value type
* @ return a new distributed tree map type */
@ SuppressWarnings ( "unchecked" ) public static < K extends Comparable < K > , V > DistributedSortedMapType < K , V > instance ( ) { } } | return INSTANCE ; |
public class SemanticSearchServiceImpl { /** * public for testability */
public Hits < ExplainedAttribute > findAttributes ( EntityType sourceEntityType , Set < String > queryTerms , Collection < OntologyTerm > ontologyTerms ) { } } | Iterable < String > attributeIdentifiers = semanticSearchServiceHelper . getAttributeIdentifiers ( sourceEntityType ) ; QueryRule disMaxQueryRule = semanticSearchServiceHelper . createDisMaxQueryRuleForAttribute ( queryTerms , ontologyTerms ) ; List < QueryRule > finalQueryRules = Lists . newArrayList ( new QueryRule ( AttributeMetadata . ID , Operator . IN , attributeIdentifiers ) ) ; if ( ! disMaxQueryRule . getNestedRules ( ) . isEmpty ( ) ) { finalQueryRules . addAll ( Arrays . asList ( new QueryRule ( Operator . AND ) , disMaxQueryRule ) ) ; } Stream < Entity > attributeEntities = dataService . findAll ( ATTRIBUTE_META_DATA , new QueryImpl < > ( finalQueryRules ) ) ; Map < String , String > collectExpanedQueryMap = semanticSearchServiceHelper . collectExpandedQueryMap ( queryTerms , ontologyTerms ) ; // Because the explain - API can be computationally expensive we limit the explanation to the top
// 10 attributes
List < ExplainedAttribute > attributeSearchHits = new ArrayList < > ( ) ; AtomicInteger count = new AtomicInteger ( 0 ) ; attributeEntities . forEach ( attributeEntity -> { Attribute attribute = sourceEntityType . getAttribute ( attributeEntity . getString ( AttributeMetadata . NAME ) ) ; Set < ExplainedQueryString > explainedQueryStrings ; boolean isHighQuality ; if ( count . get ( ) < MAX_NUMBER_EXPLAINED_ATTRIBUTES ) { explainedQueryStrings = convertAttributeToExplainedAttribute ( attribute , collectExpanedQueryMap , new QueryImpl < > ( finalQueryRules ) ) ; isHighQuality = isSingleMatchHighQuality ( queryTerms , Sets . newHashSet ( collectExpanedQueryMap . values ( ) ) , explainedQueryStrings ) ; } else { explainedQueryStrings = emptySet ( ) ; isHighQuality = false ; } attributeSearchHits . add ( ExplainedAttribute . create ( attribute , explainedQueryStrings , isHighQuality ) ) ; count . incrementAndGet ( ) ; } ) ; return Hits . create ( attributeSearchHits . stream ( ) . map ( explainedAttribute -> Hit . create ( explainedAttribute , 1f ) ) . collect ( toList ( ) ) ) ; |
public class WebcamDeallocator { /** * Store devices to be deallocated when TERM signal has been received .
* @ param webcams the webcams array to be stored in deallocator */
protected static void store ( Webcam [ ] webcams ) { } } | if ( HANDLER . get ( ) == null ) { HANDLER . set ( new WebcamDeallocator ( webcams ) ) ; } else { throw new IllegalStateException ( "Deallocator is already set!" ) ; } |
public class InterceptionModelInitializer { /** * CDI lifecycle interceptors */
private void initCdiLifecycleInterceptors ( Set < Annotation > qualifiers ) { } } | if ( qualifiers . isEmpty ( ) ) { return ; } initLifeCycleInterceptor ( InterceptionType . POST_CONSTRUCT , null , qualifiers ) ; initLifeCycleInterceptor ( InterceptionType . PRE_DESTROY , null , qualifiers ) ; initLifeCycleInterceptor ( InterceptionType . PRE_PASSIVATE , null , qualifiers ) ; initLifeCycleInterceptor ( InterceptionType . POST_ACTIVATE , null , qualifiers ) ; |
public class nsacl { /** * Use this API to rename a nsacl resource . */
public static base_response rename ( nitro_service client , nsacl resource , String new_aclname ) throws Exception { } } | nsacl renameresource = new nsacl ( ) ; renameresource . aclname = resource . aclname ; return renameresource . rename_resource ( client , new_aclname ) ; |
public class DataValue { /** * Derive a new { @ link DataValue } from a given { @ link DataValue } .
* The value is assumed to be for a non - value Node attribute , and therefore the source timestamp is not returned .
* @ param from the { @ link DataValue } to derive from .
* @ param timestamps the timestamps to return in the derived value .
* @ return a derived { @ link DataValue } . */
public static DataValue derivedNonValue ( DataValue from , TimestampsToReturn timestamps ) { } } | boolean includeServer = timestamps == TimestampsToReturn . Server || timestamps == TimestampsToReturn . Both ; return new DataValue ( from . value , from . status , null , includeServer ? from . serverTime : null ) ; |
public class BaseField { /** * Sets the field dimension and position .
* @ param box the field dimension and position */
public void setBox ( Rectangle box ) { } } | if ( box == null ) { this . box = null ; } else { this . box = new Rectangle ( box ) ; this . box . normalize ( ) ; } |
public class NodeTraversal { /** * Traverses in post order . */
public static void traversePostOrder ( AbstractCompiler compiler , Node root , AbstractPostOrderCallbackInterface cb ) { } } | traverse ( compiler , root , makePostOrderCallback ( cb ) ) ; |
public class GridGenerator { /** * Method calculates coordinates from a given grid point . */
public Point3d getCoordinatesFromGridPoint ( Point3d gridPoint ) { } } | double dx = minx + latticeConstant * gridPoint . x ; double dy = miny + latticeConstant * gridPoint . y ; double dz = minz + latticeConstant * gridPoint . z ; return new Point3d ( dx , dy , dz ) ; |
public class EventEmitter { /** * Emits an event to < b > ONE < / b > listener from ALL event groups , who are
* listening this event . The service broker uses the default { @ link Strategy
* strategy } of the broker for event redirection and node selection . Sample
* code : < br >
* < br >
* Tree params = new Tree ( ) ; < br >
* params . put ( " a " , true ) ; < br >
* params . putList ( " b " ) . add ( 1 ) . add ( 2 ) . add ( 3 ) ; < br >
* ctx . emit ( " user . modified " , params ) ;
* @ param name
* name of event ( eg . " user . created " )
* @ param payload
* { @ link Tree } structure ( payload of the event ) */
public void emit ( String name , Tree payload ) { } } | eventbus . emit ( name , payload , null , false ) ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public ImageResolutionXBase createImageResolutionXBaseFromString ( EDataType eDataType , String initialValue ) { } } | ImageResolutionXBase result = ImageResolutionXBase . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class WorkbenchEntry { /** * Adds the given visit state to the visit - state queue .
* < p > This method is used for deserialization only .
* @ param visitState the visit state that must be added to the visit - state queue .
* @ see PriorityQueue # add ( Object ) */
public synchronized void add ( VisitState visitState ) { } } | final boolean wasEntirelyBroken = isEntirelyBroken ( ) ; if ( visitState . lastExceptionClass != null ) brokenVisitStates ++ ; visitStates . add ( visitState ) ; assert brokenVisitStates <= visitStates . size ( ) ; if ( wasEntirelyBroken && ! isEntirelyBroken ( ) ) workbenchBroken . decrementAndGet ( ) ; if ( ! wasEntirelyBroken && isEntirelyBroken ( ) ) workbenchBroken . incrementAndGet ( ) ; |
public class DZcs_qr { /** * Sparse QR factorization of an m - by - n matrix A , A = Q * R
* @ param A
* column - compressed matrix
* @ param S
* symbolic QR analysis
* @ return numeric QR factorization , null on error */
public static DZcsn cs_qr ( DZcs A , DZcss S ) { } } | DZcsa Rx = new DZcsa ( ) , Vx = new DZcsa ( ) , Ax = new DZcsa ( ) , x ; double Beta [ ] ; int i , k , p , n , vnz , p1 , top , m2 , len , col , rnz , s [ ] , leftmost [ ] , Ap [ ] , Ai [ ] , parent [ ] , Rp [ ] , Ri [ ] , Vp [ ] , Vi [ ] , w [ ] , pinv [ ] , q [ ] ; DZcs R , V ; DZcsn N ; if ( ! CS_CSC ( A ) || S == null ) return ( null ) ; n = A . n ; Ap = A . p ; Ai = A . i ; Ax . x = A . x ; q = S . q ; parent = S . parent ; pinv = S . pinv ; m2 = S . m2 ; vnz = S . lnz ; rnz = S . unz ; leftmost = S . leftmost ; w = new int [ m2 ] ; /* get int workspace */
x = new DZcsa ( m2 ) ; /* get double workspace */
N = new DZcsn ( ) ; /* allocate result */
s = new int [ n ] ; /* get int workspace , s is size n */
// for ( k = 0 ; k < m2 ; k + + ) x . set ( k , cs _ czero ( ) ) ; / * clear workspace x * /
N . L = V = cs_spalloc ( m2 , n , vnz , true , false ) ; /* allocate result V */
N . U = R = cs_spalloc ( m2 , n , rnz , true , false ) ; /* allocate result R */
N . B = Beta = new double [ n ] ; /* allocate result Beta */
if ( R == null || V == null || Beta == null ) return ( cs_ndone ( N , null , w , x , false ) ) ; Rp = R . p ; Ri = R . i ; Rx . x = R . x ; Vp = V . p ; Vi = V . i ; Vx . x = V . x ; for ( i = 0 ; i < m2 ; i ++ ) w [ i ] = - 1 ; /* clear w , to mark nodes */
rnz = 0 ; vnz = 0 ; for ( k = 0 ; k < n ; k ++ ) /* compute V and R */
{ Rp [ k ] = rnz ; /* R ( : , k ) starts here */
Vp [ k ] = p1 = vnz ; /* V ( : , k ) starts here */
w [ k ] = k ; /* add V ( k , k ) to pattern of V */
Vi [ vnz ++ ] = k ; top = n ; col = q != null ? q [ k ] : k ; for ( p = Ap [ col ] ; p < Ap [ col + 1 ] ; p ++ ) /* find R ( : , k ) pattern */
{ i = leftmost [ Ai [ p ] ] ; /* i = min ( find ( A ( i , q ) ) ) */
for ( len = 0 ; w [ i ] != k ; i = parent [ i ] ) /* traverse up to k */
{ s [ len ++ ] = i ; w [ i ] = k ; } while ( len > 0 ) s [ -- top ] = s [ -- len ] ; /* push path on stack */
i = pinv [ Ai [ p ] ] ; /* i = permuted row of A ( : , col ) */
x . set ( i , Ax . real ( p ) , Ax . imag ( p ) ) ; /* x ( i ) = A ( : , col ) */
if ( i > k && w [ i ] < k ) /* pattern of V ( : , k ) = x ( k + 1 : m ) */
{ Vi [ vnz ++ ] = i ; /* add i to pattern of V ( : , k ) */
w [ i ] = k ; } } for ( p = top ; p < n ; p ++ ) /* for each i in pattern of R ( : , k ) */
{ i = s [ p ] ; /* R ( i , k ) is nonzero */
cs_happly ( V , i , Beta [ i ] , x ) ; /* apply ( V ( i ) , Beta ( i ) ) to x */
Ri [ rnz ] = i ; /* R ( i , k ) = x ( i ) */
Rx . set ( rnz ++ , x . real ( i ) , x . imag ( i ) ) ; x . set ( i , 0.0 , 0.0 ) ; if ( parent [ i ] == k ) vnz = cs_scatter ( V , i , cs_czero ( ) , w , null , k , V , vnz ) ; } for ( p = p1 ; p < vnz ; p ++ ) /* gather V ( : , k ) = x */
{ Vx . set ( p , x . real ( Vi [ p ] ) , x . imag ( Vi [ p ] ) ) ; x . set ( Vi [ p ] , 0.0 , 0.0 ) ; } Ri [ rnz ] = k ; /* R ( k , k ) = norm ( x ) */
double [ ] beta = new double [ ] { Beta [ k ] } ; Rx . set ( rnz ++ , cs_house ( Vx , p1 , beta , vnz - p1 ) ) ; /* [ v , beta ] = house ( x ) */
Beta [ k ] = beta [ 0 ] ; } Rp [ n ] = rnz ; /* finalize R */
Vp [ n ] = vnz ; /* finalize V */
return ( N ) ; |
public class ChunkerME { /** * / * inherieted javadoc */
public String [ ] chunk ( Object [ ] toks , String [ ] tags ) { } } | bestSequence = beam . bestSequence ( Arrays . asList ( toks ) , new Object [ ] { tags } ) ; List c = bestSequence . getOutcomes ( ) ; return ( String [ ] ) c . toArray ( new String [ c . size ( ) ] ) ; |
public class Formats { /** * Try to get the line number associated with the given member .
* The reflection API does not expose such an info and so we need to analyse the bytecode . Unfortunately , it seems there is no way to get this kind of
* information for fields . Moreover , the < code > LineNumberTable < / code > attribute is just optional , i . e . the compiler is not required to store this
* information at all . See also < a href = " http : / / docs . oracle . com / javase / specs / jvms / se8 / html / jvms - 4 . html # jvms - 4.1 " > Java Virtual Machine Specification < / a >
* Implementation note : it wouldn ' t be appropriate to add a bytecode scanning dependency just for this functionality , therefore Apache BCEL included in
* Oracle JDK 1.5 + and OpenJDK 1.6 + is used . Other JVMs should not crash as we only use it if it ' s on the classpath and by means of reflection calls .
* @ param member
* @ param resourceLoader
* @ return the line number or 0 if it ' s not possible to find it */
public static int getLineNumber ( Member member ) { } } | if ( ! ( member instanceof Method || member instanceof Constructor ) ) { // We are not able to get this info for fields
return 0 ; } // BCEL is an optional dependency , if we cannot load it , simply return 0
if ( ! Reflections . isClassLoadable ( BCEL_CLASS , WeldClassLoaderResourceLoader . INSTANCE ) ) { return 0 ; } String classFile = member . getDeclaringClass ( ) . getName ( ) . replace ( '.' , '/' ) ; ClassLoaderResourceLoader classFileResourceLoader = new ClassLoaderResourceLoader ( member . getDeclaringClass ( ) . getClassLoader ( ) ) ; InputStream in = null ; try { URL classFileUrl = classFileResourceLoader . getResource ( classFile + ".class" ) ; if ( classFileUrl == null ) { // The class file is not available
return 0 ; } in = classFileUrl . openStream ( ) ; ClassParser cp = new ClassParser ( in , classFile ) ; JavaClass javaClass = cp . parse ( ) ; // First get all declared methods and constructors
// Note that in bytecode constructor is translated into a method
org . apache . bcel . classfile . Method [ ] methods = javaClass . getMethods ( ) ; org . apache . bcel . classfile . Method match = null ; String signature ; String name ; if ( member instanceof Method ) { signature = DescriptorUtils . methodDescriptor ( ( Method ) member ) ; name = member . getName ( ) ; } else if ( member instanceof Constructor ) { signature = DescriptorUtils . makeDescriptor ( ( Constructor < ? > ) member ) ; name = INIT_METHOD_NAME ; } else { return 0 ; } for ( org . apache . bcel . classfile . Method method : methods ) { // Matching method must have the same name , modifiers and signature
if ( method . getName ( ) . equals ( name ) && member . getModifiers ( ) == method . getModifiers ( ) && method . getSignature ( ) . equals ( signature ) ) { match = method ; } } if ( match != null ) { // If a method is found , try to obtain the optional LineNumberTable attribute
LineNumberTable lineNumberTable = match . getLineNumberTable ( ) ; if ( lineNumberTable != null ) { int line = lineNumberTable . getSourceLine ( 0 ) ; return line == - 1 ? 0 : line ; } } // No suitable method found
return 0 ; } catch ( Throwable t ) { return 0 ; } finally { if ( in != null ) { try { in . close ( ) ; } catch ( Exception e ) { return 0 ; } } } |
public class ContainerCreateConfig { private ContainerCreateConfig add ( String name , String value ) { } } | if ( value != null ) { createConfig . addProperty ( name , value ) ; } return this ; |
public class FLVReader { /** * { @ inheritDoc } */
@ Override public ITag readTag ( ) { } } | ITag tag = null ; try { lock . lockInterruptibly ( ) ; long oldPos = getCurrentPosition ( ) ; tag = readTagHeader ( ) ; if ( tag != null ) { boolean isMetaData = tag . getDataType ( ) == TYPE_METADATA ; log . debug ( "readTag, oldPos: {}, tag header: \n{}" , oldPos , tag ) ; if ( ! metadataSent && ! isMetaData && generateMetadata ) { // Generate initial metadata automatically
setCurrentPosition ( oldPos ) ; KeyFrameMeta meta = analyzeKeyFrames ( ) ; if ( meta != null ) { metadataSent = true ; return createFileMeta ( ) ; } } int bodySize = tag . getBodySize ( ) ; IoBuffer body = IoBuffer . allocate ( bodySize , false ) ; // XXX Paul : this assists in ' properly ' handling damaged FLV files
long newPosition = getCurrentPosition ( ) + bodySize ; if ( newPosition <= getTotalBytes ( ) ) { int limit ; while ( getCurrentPosition ( ) < newPosition ) { fillBuffer ( newPosition - getCurrentPosition ( ) ) ; if ( getCurrentPosition ( ) + in . remaining ( ) > newPosition ) { limit = in . limit ( ) ; in . limit ( ( int ) ( newPosition - getCurrentPosition ( ) ) + in . position ( ) ) ; body . put ( in ) ; in . limit ( limit ) ; } else { body . put ( in ) ; } } body . flip ( ) ; tag . setBody ( body ) ; } // now that we have a tag body , check that config has been sent for codecs that require them
if ( body . array ( ) . length > 0 ) { int firstByte = body . array ( ) [ 0 ] & 0xff ; if ( ( ( firstByte & ITag . MASK_SOUND_FORMAT ) >> 4 ) == AudioCodec . AAC . getId ( ) ) { // read second byte to see if its config data
if ( body . array ( ) [ 1 ] != 0 && ! audioConfigRead . get ( ) ) { log . debug ( "Skipping AAC since config has not beean read yet" ) ; body . clear ( ) ; body . free ( ) ; tag = null ; } else if ( body . array ( ) [ 1 ] == 0 && audioConfigRead . compareAndSet ( false , true ) ) { log . debug ( "AAC config read" ) ; } } else if ( ( firstByte & ITag . MASK_VIDEO_CODEC ) == VideoCodec . AVC . getId ( ) ) { // read second byte to see if its config data
if ( body . array ( ) [ 1 ] != 0 && ! videoConfigRead . get ( ) ) { log . debug ( "Skipping AVC since config has not beean read yet" ) ; body . clear ( ) ; body . free ( ) ; tag = null ; } else if ( body . array ( ) [ 1 ] == 0 && videoConfigRead . compareAndSet ( false , true ) ) { log . debug ( "AVC config read" ) ; } } else { log . trace ( "Media without configuration read" ) ; } } else { log . debug ( "Tag body was empty" ) ; } } else { log . debug ( "Tag was null" ) ; } } catch ( UnsupportedDataTypeException e ) { log . warn ( "readTag" , e ) ; close ( ) ; } catch ( InterruptedException e ) { log . warn ( "Exception acquiring lock" , e ) ; } finally { if ( lock . isLocked ( ) ) { lock . unlock ( ) ; } } return tag ; |
public class Matrix4f { /** * Set only the translation components < code > ( m30 , m31 , m32 ) < / code > of this matrix to the values < code > ( xyz . x , xyz . y , xyz . z ) < / code > .
* Note that this will only work properly for orthogonal matrices ( without any perspective ) .
* To build a translation matrix instead , use { @ link # translation ( Vector3fc ) } .
* To apply a translation , use { @ link # translate ( Vector3fc ) } .
* @ see # translation ( Vector3fc )
* @ see # translate ( Vector3fc )
* @ param xyz
* the units to translate in < code > ( x , y , z ) < / code >
* @ return this */
public Matrix4f setTranslation ( Vector3fc xyz ) { } } | return setTranslation ( xyz . x ( ) , xyz . y ( ) , xyz . z ( ) ) ; |
public class SqlInsertMNStatement { /** * generates a values ( ? , ) for a prepared insert statement .
* @ param stmt the StringBuffer */
private void appendListOfValues ( StringBuffer stmt ) { } } | int cnt = getColumns ( ) . length ; stmt . append ( " VALUES (" ) ; for ( int i = 0 ; i < cnt ; i ++ ) { if ( i > 0 ) { stmt . append ( ',' ) ; } stmt . append ( '?' ) ; } stmt . append ( ')' ) ; |
public class InstanceFailoverGroupsInner { /** * Deletes a failover group .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param locationName The name of the region where the resource is located .
* @ param failoverGroupName The name of the failover group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > deleteAsync ( String resourceGroupName , String locationName , String failoverGroupName ) { } } | return deleteWithServiceResponseAsync ( resourceGroupName , locationName , failoverGroupName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class AppServicePlansInner { /** * Gets server farm usage information .
* Gets server farm usage information .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of App Service Plan
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; CsmUsageQuotaInner & gt ; object if successful . */
public PagedList < CsmUsageQuotaInner > listUsages ( final String resourceGroupName , final String name ) { } } | ServiceResponse < Page < CsmUsageQuotaInner > > response = listUsagesSinglePageAsync ( resourceGroupName , name ) . toBlocking ( ) . single ( ) ; return new PagedList < CsmUsageQuotaInner > ( response . body ( ) ) { @ Override public Page < CsmUsageQuotaInner > nextPage ( String nextPageLink ) { return listUsagesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ; |
public class ClientUtils { /** * Request / Response Helper methods */
protected < T extends Resource > ResourceRequest < T > issueResourceRequest ( String resourceFormat , HttpUriRequest request ) { } } | return issueResourceRequest ( resourceFormat , request , null ) ; |
public class Resource { /** * Close the underlying InputStream , or release / unmap the underlying ByteBuffer . */
@ Override public void close ( ) { } } | // Override in subclasses , and call super . close ( )
if ( inputStream != null ) { try { if ( inputStream instanceof InputStreamResourceCloser ) { ( ( InputStreamResourceCloser ) inputStream ) . closeInputStream ( ) ; } else { inputStream . close ( ) ; } } catch ( final IOException e ) { // Ignore
} inputStream = null ; } |
public class IPTC { /** * Creates the metadata .
* @ return the hash map */
@ Override public Metadata createMetadata ( ) { } } | Metadata metadata = new Metadata ( ) ; try { for ( DataSet ds : iimFile . getDataSets ( ) ) { Object value = "" ; try { value = ds . getValue ( ) ; } catch ( Exception ex ) { } DataSetInfo info = ds . getInfo ( ) ; // System . out . println ( info . toString ( ) + " " + info . getName ( ) + " : " + value ) ;
metadata . add ( info . getName ( ) , new Text ( value . toString ( ) ) ) ; } } catch ( Exception ex ) { /* Nothing to be shown */
} return metadata ; |
public class EntryStream { /** * Returns a stream consisting of the entries of this stream , additionally
* performing the provided action on each entry key as entries are consumed
* from the resulting stream .
* This is an < a href = " package - summary . html # StreamOps " > intermediate < / a >
* operation .
* For parallel stream pipelines , the action may be called at whatever time
* and in whatever thread the element is made available by the upstream
* operation . If the action modifies shared state , it is responsible for
* providing the required synchronization .
* @ param keyAction a non - interfering action to perform on the keys of the
* entries as they are consumed from the stream
* @ return the new stream
* @ since 0.2.3 */
public EntryStream < K , V > peekKeys ( Consumer < ? super K > keyAction ) { } } | return peek ( e -> keyAction . accept ( e . getKey ( ) ) ) ; |
public class Slf4jCurrentTraceContextFactory { /** * Current Slf4j trace context .
* @ return Slf4j trace context */
@ Requires ( classes = { } } | MDC . class , CurrentTraceContext . class } ) @ Context CurrentTraceContext currentTraceContext ( ) { return ThreadLocalCurrentTraceContext . newBuilder ( ) . addScopeDecorator ( new Slf4jScopeDecorator ( ) ) . build ( ) ; |
public class ClassDocNode { /** * rather we just didn ' t care about the difference */
private boolean areTypeNamesEqual ( String name1 , String name2 ) { } } | return name1 . replace ( '$' , '.' ) . equals ( name2 . replace ( '$' , '.' ) ) ; |
public class Elements { /** * Wrap the supplied HTML around each matched elements . For example , with HTML
* { @ code < p > < b > This < / b > is < b > Jsoup < / b > < / p > } ,
* < code > doc . select ( " b " ) . wrap ( " & lt ; i & gt ; & lt ; / i & gt ; " ) ; < / code >
* becomes { @ code < p > < i > < b > This < / b > < / i > is < i > < b > jsoup < / b > < / i > < / p > }
* @ param html HTML to wrap around each element , e . g . { @ code < div class = " head " > < / div > } . Can be arbitrarily deep .
* @ return this ( for chaining )
* @ see Element # wrap */
public Elements wrap ( String html ) { } } | Validate . notEmpty ( html ) ; for ( Element element : this ) { element . wrap ( html ) ; } return this ; |
public class ClusterHeartbeatManager { /** * Accepts the heartbeat message from { @ code member } created at { @ code timestamp } . The timestamp must be
* related to the cluster clock , not the local clock . The heartbeat is ignored if the duration between
* { @ code timestamp } and the current cluster time is more than { @ link GroupProperty # MAX _ NO _ HEARTBEAT _ SECONDS } / 2.
* If the sending node is the master , this node will also calculate and set the cluster clock diff .
* @ param member the member sending the heartbeat
* @ param timestamp the timestamp when the heartbeat was created */
public void onHeartbeat ( MemberImpl member , long timestamp ) { } } | if ( member == null ) { return ; } long clusterTime = clusterClock . getClusterTime ( ) ; if ( logger . isFineEnabled ( ) ) { logger . fine ( format ( "Received heartbeat from %s (now: %s, timestamp: %s)" , member , timeToString ( clusterTime ) , timeToString ( timestamp ) ) ) ; } if ( clusterTime - timestamp > maxNoHeartbeatMillis / 2 ) { logger . warning ( format ( "Ignoring heartbeat from %s since it is expired (now: %s, timestamp: %s)" , member , timeToString ( clusterTime ) , timeToString ( timestamp ) ) ) ; return ; } if ( isMaster ( member ) ) { clusterClock . setMasterTime ( timestamp ) ; } heartbeatFailureDetector . heartbeat ( member , clusterClock . getClusterTime ( ) ) ; MembershipManager membershipManager = clusterService . getMembershipManager ( ) ; membershipManager . clearMemberSuspicion ( member . getAddress ( ) , "Valid heartbeat" ) ; nodeEngine . getQuorumService ( ) . onHeartbeat ( member , timestamp ) ; |
public class GeoJsonReaderDriver { /** * Parses a all type of geometries and check if the geojson is wellformed .
* Syntax :
* " geometry " : { " type " : " Point " , " coordinates " : [ 102.0,0.5 ] }
* @ param jp
* @ throws IOException */
private void parseGeometryMetadata ( JsonParser jp , String geometryType ) throws IOException , SQLException { } } | if ( geometryType . equalsIgnoreCase ( GeoJsonField . POINT ) ) { parsePointMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . POINT ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . MULTIPOINT ) ) { parseMultiPointMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . MULTIPOINT ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . LINESTRING ) ) { parseLinestringMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . LINESTRING ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . MULTILINESTRING ) ) { parseMultiLinestringMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . MULTILINESTRING ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . POLYGON ) ) { parsePolygonMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . POLYGON ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . MULTIPOLYGON ) ) { parseMultiPolygonMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . MULTIPOLYGON ) ; } else if ( geometryType . equalsIgnoreCase ( GeoJsonField . GEOMETRYCOLLECTION ) ) { parseGeometryCollectionMetadata ( jp ) ; finalGeometryTypes . add ( GeoJsonField . GEOMETRYCOLLECTION ) ; } else { throw new SQLException ( "Unsupported geometry : " + geometryType ) ; } |
public class SamlRegisteredServiceServiceProviderMetadataFacade { /** * Adapt saml metadata and parse . Acts as a facade .
* @ param resolver the resolver
* @ param registeredService the service
* @ param request the request
* @ return the saml metadata adaptor */
public static Optional < SamlRegisteredServiceServiceProviderMetadataFacade > get ( final SamlRegisteredServiceCachingMetadataResolver resolver , final SamlRegisteredService registeredService , final RequestAbstractType request ) { } } | return get ( resolver , registeredService , SamlIdPUtils . getIssuerFromSamlObject ( request ) ) ; |
public class LuaScriptBuilder { /** * End building the script , adding a return value statement
* @ param config the configuration for the script to build
* @ param value the value to return
* @ return the new { @ link LuaScript } instance */
public LuaScript endScriptReturn ( LuaValue value , LuaScriptConfig config ) { } } | add ( new LuaAstReturnStatement ( argument ( value ) ) ) ; String scriptText = buildScriptText ( ) ; return new BasicLuaScript ( scriptText , config ) ; |
public class Orders { @ MemberOrder ( sequence = "2" ) public Order create ( @ ParameterLayout ( named = "Order Number" ) final String number , @ ParameterLayout ( named = "Customer Name" ) final String customerName , @ ParameterLayout ( named = "Order Date" ) final LocalDate date , @ Parameter ( optionality = Optionality . OPTIONAL ) @ ParameterLayout ( named = "Preferences" ) final String preferences ) { } } | final Order obj = container . newTransientInstance ( Order . class ) ; obj . setNumber ( number ) ; obj . setDate ( date ) ; obj . setCustomerName ( customerName ) ; obj . setPreferences ( preferences ) ; container . persistIfNotAlready ( obj ) ; return obj ; |
public class MongoNativeExtractor { /** * Gets split data .
* @ param collection the collection
* @ return the split data */
private BasicDBList getSplitData ( DBCollection collection ) { } } | final DBObject cmd = BasicDBObjectBuilder . start ( "splitVector" , collection . getFullName ( ) ) . add ( "keyPattern" , new BasicDBObject ( MONGO_DEFAULT_ID , 1 ) ) . add ( "force" , false ) . add ( "maxChunkSize" , splitSize ) . get ( ) ; CommandResult splitVectorResult = collection . getDB ( ) . getSisterDB ( "admin" ) . command ( cmd ) ; return ( BasicDBList ) splitVectorResult . get ( SPLIT_KEYS ) ; |
public class CmsEntityObserver { /** * Removes this observer from the entities change handler registration and clears registered listeners . < p > */
public void clear ( ) { } } | if ( m_handlerRegistration != null ) { m_handlerRegistration . removeHandler ( ) ; m_handlerRegistration = null ; } m_changeListeners . clear ( ) ; m_scopeValues . clear ( ) ; m_observerdEntity = null ; |
public class AmazonDirectConnectClient { /** * Confirms the creation of the specified hosted connection on an interconnect .
* Upon creation , the hosted connection is initially in the < code > Ordering < / code > state , and remains in this state
* until the owner confirms creation of the hosted connection .
* @ param confirmConnectionRequest
* @ return Result of the ConfirmConnection operation returned by the service .
* @ throws DirectConnectServerException
* A server - side error occurred .
* @ throws DirectConnectClientException
* One or more parameters are not valid .
* @ sample AmazonDirectConnect . ConfirmConnection
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / ConfirmConnection "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ConfirmConnectionResult confirmConnection ( ConfirmConnectionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeConfirmConnection ( request ) ; |
public class AbstractFaxJob { /** * This function sets the path to the file to fax .
* @ param filePath
* The path to the file to fax */
public void setFilePath ( String filePath ) { } } | // create a new file
File fileInstance = null ; if ( filePath != null ) { fileInstance = new File ( filePath ) ; } // set file
this . setFile ( fileInstance ) ; |
public class ThreadIdentitySecurityHelper { /** * The getSubjectString ( ) method uses a doPrivileged wrapper
* to call toString ( ) on a Subject passed as an argument , and
* the result is returned . This method is designed to be
* called in cases where the Subject needs to be traced , but
* the calling classes are not required to have permission to
* access the subject . PrivilegedActionExceptions received
* while calling toString ( ) on the Subject are not rethrown
* so as not to interrupt the flow of the calling method . If
* a null Subject is passed as an argument , a value of null
* is returned .
* @ param Subject subject
* @ return String */
private String getSubjectString ( Subject subject ) { } } | String returnVal = null ; if ( subject != null ) { if ( System . getSecurityManager ( ) != null ) { // Java 2 Security enabled
final Subject newSubject = subject ; PrivilegedExceptionAction privExAction = new PrivilegedExceptionAction ( ) { @ Override public Object run ( ) throws Exception { return newSubject . toString ( ) ; } } ; try { returnVal = ( String ) AccessController . doPrivileged ( privExAction ) ; } catch ( PrivilegedActionException pae ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Exception received in getSubjectString:" , pae ) ; } returnVal = "Subject cannot be traced due to a PrivilegedActionException" ; } } // end security manager ! = null
else { returnVal = subject . toString ( ) ; } } // end subject ! = null
return returnVal ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.