signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TextureAtlas { /** * Attempts to find a usable region of this { @ link TextureAtlas } * @ param width Width of the region * @ param height Height of the region * @ return The data for a valid region , null if none found . */ private RegionData findUsableRegion ( int width , int height ) { } }
final int imageWidth = image . getWidth ( ) ; final int imageHeight = image . getHeight ( ) ; for ( int y = 0 ; y < imageHeight - height ; y ++ ) { for ( int x = 0 ; x < imageWidth - width ; x ++ ) { final RegionData data = new RegionData ( x , y , width , height ) ; if ( ! intersectsOtherTexture ( data ) ) { return data ; } } } return null ;
public class AbstractRepositorySuspender { /** * Suspend repository which means that allow only read operations . * All writing threads will wait until resume operations invoked . */ protected void suspendRepository ( ) throws RepositoryException { } }
SecurityHelper . validateSecurityPermission ( JCRRuntimePermissions . MANAGE_REPOSITORY_PERMISSION ) ; repository . setState ( ManageableRepository . SUSPENDED ) ;
public class PropertiesUtils { /** * Save properties to a file . * @ param file * Destination file - Cannot be < code > null < / code > and parent directory must exist . * @ param props * Properties to save - Cannot be < code > null < / code > . * @ param comment * Comment for the file . */ public static void saveProperties ( final File file , final Properties props , final String comment ) { } }
checkNotNull ( "file" , file ) ; checkNotNull ( "props" , props ) ; if ( ! file . getParentFile ( ) . exists ( ) ) { throw new IllegalArgumentException ( "The parent directory '" + file . getParentFile ( ) + "' does not exist [file='" + file + "']!" ) ; } try ( final OutputStream outStream = new FileOutputStream ( file ) ) { props . store ( outStream , comment ) ; } catch ( final IOException ex ) { throw new RuntimeException ( ex ) ; }
public class SystemHookManager { /** * Verifies the provided Event and fires it off to the registered listeners . * @ param event the Event instance to handle * @ throws GitLabApiException if the event is not supported */ public void handleEvent ( SystemHookEvent event ) throws GitLabApiException { } }
if ( event != null ) { LOGGER . info ( "handleEvent:" + event . getClass ( ) . getSimpleName ( ) + ", eventName=" + event . getEventName ( ) ) ; fireEvent ( event ) ; } else { LOGGER . warning ( "handleEvent: provided event cannot be null!" ) ; }
public class MorphiaMongoDatastore { public < T , K extends Serializable > T findById ( final Class < T > type , final K id ) { } }
_LOG_ . info ( "findById: type=" + type + ", ID=" + id ) ; long ts_start = System . currentTimeMillis ( ) ; T p_object = null ; try { p_object = getDAO ( type ) . get ( id ) ; } catch ( Exception ex ) { throw new RepositoryException ( ex ) ; } _LOG_ . info ( "findById: elapsed time (ms)=" + ( System . currentTimeMillis ( ) - ts_start ) ) ; _LOG_ . debug ( ( p_object == null ? "findById: object NOT found" : "findById: object found" ) ) ; if ( p_object == null ) { ObjectNotFoundException ex = new ObjectNotFoundException ( "type=" + type + ", id=" + id ) ; ex . setType ( type ) ; ex . setId ( id ) ; throw ex ; } return p_object ;
public class LDAPController { /** * Gets a mapping */ @ RequestMapping ( value = "ldap-mapping/{id}" , method = RequestMethod . GET ) public LDAPMapping getMapping ( @ PathVariable ID id ) { } }
securityService . checkGlobalFunction ( AccountGroupManagement . class ) ; return LDAPMapping . of ( accountGroupMappingService . getMapping ( LDAPExtensionFeature . LDAP_GROUP_MAPPING , id ) ) ;
public class QueryDocumentSnapshot { /** * Returns the contents of the document converted to a POJO . * @ param valueType The Java class to create * @ return The contents of the document in an object of type T */ @ Nonnull @ Override public < T > T toObject ( @ Nonnull Class < T > valueType ) { } }
T result = super . toObject ( valueType ) ; Preconditions . checkNotNull ( result , "Object in a QueryDocumentSnapshot should be non-null" ) ; return result ;
public class AstUtil { /** * Return the List of VariableExpression objects referenced by the specified DeclarationExpression . * @ param declarationExpression - the DeclarationExpression * @ return the List of VariableExpression objects */ public static List < Expression > getVariableExpressions ( DeclarationExpression declarationExpression ) { } }
Expression leftExpression = declarationExpression . getLeftExpression ( ) ; // ! important : performance enhancement if ( leftExpression instanceof ArrayExpression ) { List < Expression > expressions = ( ( ArrayExpression ) leftExpression ) . getExpressions ( ) ; return expressions . isEmpty ( ) ? Arrays . asList ( leftExpression ) : expressions ; } else if ( leftExpression instanceof ListExpression ) { List < Expression > expressions = ( ( ListExpression ) leftExpression ) . getExpressions ( ) ; return expressions . isEmpty ( ) ? Arrays . asList ( leftExpression ) : expressions ; } else if ( leftExpression instanceof TupleExpression ) { List < Expression > expressions = ( ( TupleExpression ) leftExpression ) . getExpressions ( ) ; return expressions . isEmpty ( ) ? Arrays . asList ( leftExpression ) : expressions ; } else if ( leftExpression instanceof VariableExpression ) { return Arrays . asList ( leftExpression ) ; } // todo : write warning return Collections . emptyList ( ) ;
public class DFAState { /** * Returns true if state doesn ' t contain any outbound transition * and is not accepting state * @ return */ boolean isDeadEnd ( ) { } }
if ( isAccepting ( ) ) { return false ; } for ( Transition < DFAState < T > > next : transitions . values ( ) ) { if ( next . getTo ( ) != this ) { return false ; } } return true ;
public class ThingDocument { /** * The attributes . * @ param attributes * The attributes . * @ return Returns a reference to this object so that method calls can be chained together . */ public ThingDocument withAttributes ( java . util . Map < String , String > attributes ) { } }
setAttributes ( attributes ) ; return this ;
public class BoxTableExtractor { /** * Adjusts the real table heading rows based on the cell information * @ param YNum * The number of non - duplicated Y axes in the table area * @ param cc * the number of the table columns * @ param tc * the object of the table candidate */ public void getRealHeadingBasedOnCells ( int YNum , int cc , TableCandidate tc ) { } }
int footnoteLineIndex = tc . getFootnoteBeginRow ( ) ; String [ ] [ ] cells = tc . getCells ( ) ; int i = 0 ; while ( i < footnoteLineIndex ) { int j = 0 , nonNullCellNum = 0 ; while ( j < cc ) { if ( cells [ i ] [ j ] == "" ) { break ; } nonNullCellNum ++ ; j ++ ; } if ( nonNullCellNum == cc ) break ; else i ++ ; } /* * if the next row contain unit symbols , this row has a very large possibility to be a table column heading row */ if ( ( i + 1 ) < footnoteLineIndex ) { boolean hasUnitSymbols = false ; int j = 0 ; while ( j < cc ) { if ( ( cells [ i + 1 ] [ j ] . indexOf ( "/yg rnI--" ) > 0 ) || ( cells [ i + 1 ] [ j ] . indexOf ( "(%)" ) > 0 ) || ( cells [ i + 1 ] [ j ] . indexOf ( "w-'" ) > 0 ) || ( cells [ i + 1 ] [ j ] . indexOf ( "CLg g-'" ) > 0 ) || ( cells [ i + 1 ] [ j ] . indexOf ( "pg ml-1" ) >= 0 ) ) { i ++ ; break ; } j ++ ; } } /* * Based on observation , the column heading rows always contain missing cells . * But the first data row always does not contain missing cell */ if ( i == YNum ) i = 0 ; if ( i > 0 ) i -- ; tc . setHeadingLineNumber ( i ) ;
public class SyllableCounter { /** * Main point of this library . Method to count the number of syllables of a * word using a fallback method as documented at the class level of this * documentation . * @ param word the word you want to count the syllables of . * @ return the number of syllables of the word . */ public int count ( final String word ) { } }
if ( word == null ) { throw new NullPointerException ( "the word parameter was null." ) ; } else if ( word . length ( ) == 0 ) { return 0 ; } else if ( word . length ( ) == 1 ) { return 1 ; } final String lowerCase = word . toLowerCase ( Locale . ENGLISH ) ; if ( exceptions . containsKey ( lowerCase ) ) { return exceptions . get ( lowerCase ) ; } final String prunned ; if ( lowerCase . charAt ( lowerCase . length ( ) - 1 ) == 'e' ) { prunned = lowerCase . substring ( 0 , lowerCase . length ( ) - 1 ) ; } else { prunned = lowerCase ; } int count = 0 ; boolean prevIsVowel = false ; for ( char c : prunned . toCharArray ( ) ) { final boolean isVowel = vowels . contains ( c ) ; if ( isVowel && ! prevIsVowel ) { ++ count ; } prevIsVowel = isVowel ; } count += addSyls . stream ( ) . filter ( pattern -> pattern . matcher ( prunned ) . find ( ) ) . count ( ) ; count -= subSyls . stream ( ) . filter ( pattern -> pattern . matcher ( prunned ) . find ( ) ) . count ( ) ; return count > 0 ? count : 1 ;
public class JavaFastBlur { /** * Ref . . . http : / / stackoverflow . com / questions / 2067955 / fast - bitmap - blur - for - android - sdk */ private Bitmap fastBlur ( Bitmap inBitmap , Bitmap outBitmap , int radius ) { } }
// Stack Blur v1.0 from // http : / / www . quasimondo . com / StackBlurForCanvas / StackBlurDemo . html // Java Author : Mario Klingemann < mario at quasimondo . com > // http : / / incubator . quasimondo . com // created Feburary 29 , 2004 // Android port : Yahel Bouaziz < yahel at kayenko . com > // http : / / www . kayenko . com // ported april 5th , 2012 // This is a compromise between Gaussian Blur and Box blur // It creates much better looking blurs than Box Blur , but is // 7x faster than my Gaussian Blur implementation . // I called it Stack Blur because this describes best how this // filter works internally : it creates a kind of moving stack // of colors whilst scanning through the image . Thereby it // just has to add one new block of color to the right side // of the stack and remove the leftmost color . The remaining // colors on the topmost layer of the stack are either added on // or reduced by one , depending on if they are on the right or // on the left side of the stack . // If you are using this algorithm in your code please add // the following line : // Stack Blur Algorithm by Mario Klingemann < mario @ quasimondo . com > long start = Debug . threadCpuTimeNanos ( ) ; if ( radius < 1 ) { return null ; } int w = inBitmap . getWidth ( ) ; int h = inBitmap . getHeight ( ) ; int [ ] pix = new int [ w * h ] ; inBitmap . getPixels ( pix , 0 , w , 0 , 0 , w , h ) ; int wm = w - 1 ; int hm = h - 1 ; int wh = w * h ; int div = radius + radius + 1 ; int r [ ] = new int [ wh ] ; int g [ ] = new int [ wh ] ; int b [ ] = new int [ wh ] ; int rsum , gsum , bsum , x , y , i , p , yp , yi , yw ; int vmin [ ] = new int [ Math . max ( w , h ) ] ; int divsum = ( div + 1 ) >> 1 ; divsum *= divsum ; int dv [ ] = new int [ 256 * divsum ] ; for ( i = 0 ; i < 256 * divsum ; i ++ ) { dv [ i ] = ( i / divsum ) ; } yw = yi = 0 ; int [ ] [ ] stack = new int [ div ] [ 3 ] ; int stackpointer ; int stackstart ; int [ ] sir ; int rbs ; int r1 = radius + 1 ; int routsum , goutsum , boutsum ; int rinsum , ginsum , binsum ; for ( y = 0 ; y < h ; y ++ ) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0 ; for ( i = - radius ; i <= radius ; i ++ ) { p = pix [ yi + Math . min ( wm , Math . max ( i , 0 ) ) ] ; sir = stack [ i + radius ] ; sir [ 0 ] = ( p & 0xff0000 ) >> 16 ; sir [ 1 ] = ( p & 0x00ff00 ) >> 8 ; sir [ 2 ] = ( p & 0x0000ff ) ; rbs = r1 - Math . abs ( i ) ; rsum += sir [ 0 ] * rbs ; gsum += sir [ 1 ] * rbs ; bsum += sir [ 2 ] * rbs ; if ( i > 0 ) { rinsum += sir [ 0 ] ; ginsum += sir [ 1 ] ; binsum += sir [ 2 ] ; } else { routsum += sir [ 0 ] ; goutsum += sir [ 1 ] ; boutsum += sir [ 2 ] ; } } stackpointer = radius ; for ( x = 0 ; x < w ; x ++ ) { r [ yi ] = dv [ rsum ] ; g [ yi ] = dv [ gsum ] ; b [ yi ] = dv [ bsum ] ; rsum -= routsum ; gsum -= goutsum ; bsum -= boutsum ; stackstart = stackpointer - radius + div ; sir = stack [ stackstart % div ] ; routsum -= sir [ 0 ] ; goutsum -= sir [ 1 ] ; boutsum -= sir [ 2 ] ; if ( y == 0 ) { vmin [ x ] = Math . min ( x + radius + 1 , wm ) ; } p = pix [ yw + vmin [ x ] ] ; sir [ 0 ] = ( p & 0xff0000 ) >> 16 ; sir [ 1 ] = ( p & 0x00ff00 ) >> 8 ; sir [ 2 ] = ( p & 0x0000ff ) ; rinsum += sir [ 0 ] ; ginsum += sir [ 1 ] ; binsum += sir [ 2 ] ; rsum += rinsum ; gsum += ginsum ; bsum += binsum ; stackpointer = ( stackpointer + 1 ) % div ; sir = stack [ ( stackpointer ) % div ] ; routsum += sir [ 0 ] ; goutsum += sir [ 1 ] ; boutsum += sir [ 2 ] ; rinsum -= sir [ 0 ] ; ginsum -= sir [ 1 ] ; binsum -= sir [ 2 ] ; yi ++ ; } yw += w ; } for ( x = 0 ; x < w ; x ++ ) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0 ; yp = - radius * w ; for ( i = - radius ; i <= radius ; i ++ ) { yi = Math . max ( 0 , yp ) + x ; sir = stack [ i + radius ] ; sir [ 0 ] = r [ yi ] ; sir [ 1 ] = g [ yi ] ; sir [ 2 ] = b [ yi ] ; rbs = r1 - Math . abs ( i ) ; rsum += r [ yi ] * rbs ; gsum += g [ yi ] * rbs ; bsum += b [ yi ] * rbs ; if ( i > 0 ) { rinsum += sir [ 0 ] ; ginsum += sir [ 1 ] ; binsum += sir [ 2 ] ; } else { routsum += sir [ 0 ] ; goutsum += sir [ 1 ] ; boutsum += sir [ 2 ] ; } if ( i < hm ) { yp += w ; } } yi = x ; stackpointer = radius ; for ( y = 0 ; y < h ; y ++ ) { // Preserve alpha channel : ( 0xff00000 & pix [ yi ] ) pix [ yi ] = ( 0xff000000 & pix [ yi ] ) | ( dv [ rsum ] << 16 ) | ( dv [ gsum ] << 8 ) | dv [ bsum ] ; rsum -= routsum ; gsum -= goutsum ; bsum -= boutsum ; stackstart = stackpointer - radius + div ; sir = stack [ stackstart % div ] ; routsum -= sir [ 0 ] ; goutsum -= sir [ 1 ] ; boutsum -= sir [ 2 ] ; if ( x == 0 ) { vmin [ y ] = Math . min ( y + r1 , hm ) * w ; } p = x + vmin [ y ] ; sir [ 0 ] = r [ p ] ; sir [ 1 ] = g [ p ] ; sir [ 2 ] = b [ p ] ; rinsum += sir [ 0 ] ; ginsum += sir [ 1 ] ; binsum += sir [ 2 ] ; rsum += rinsum ; gsum += ginsum ; bsum += binsum ; stackpointer = ( stackpointer + 1 ) % div ; sir = stack [ stackpointer ] ; routsum += sir [ 0 ] ; goutsum += sir [ 1 ] ; boutsum += sir [ 2 ] ; rinsum -= sir [ 0 ] ; ginsum -= sir [ 1 ] ; binsum -= sir [ 2 ] ; yi += w ; } } outBitmap . setPixels ( pix , 0 , w , 0 , 0 , w , h ) ; if ( start > 0 ) { long duration = Debug . threadCpuTimeNanos ( ) - start ; blurConfig . asyncPolicy ( ) . putSampleData ( false , calculateComputation ( w , h , radius ) , duration ) ; if ( blurConfig . debug ( ) ) { Log . d ( TAG , String . format ( Locale . US , "fastBlur() took %d ms." , duration / 1000000L ) ) ; } } return outBitmap ;
public class BinaryHeapPriorityQueue { /** * Promotes a key in the queue , adding it if it wasn ' t there already . If the specified priority is worse than the current priority , nothing happens . Faster than add if you don ' t care about whether the key is new . * @ param key an < code > Object < / code > value * @ return whether the priority actually improved . */ public boolean relaxPriority ( E key , double priority ) { } }
Entry < E > entry = getEntry ( key ) ; if ( entry == null ) { entry = makeEntry ( key ) ; } if ( compare ( priority , entry . priority ) <= 0 ) { return false ; } entry . priority = priority ; heapifyUp ( entry ) ; return true ;
public class ApiOvhTelephony { /** * Alter this object properties * REST : PUT / telephony / { billingAccount } / ovhPabx / { serviceName } / dialplan / { dialplanId } / extension / { extensionId } / rule / { ruleId } * @ param body [ required ] New object properties * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] * @ param dialplanId [ required ] * @ param extensionId [ required ] * @ param ruleId [ required ] */ public void billingAccount_ovhPabx_serviceName_dialplan_dialplanId_extension_extensionId_rule_ruleId_PUT ( String billingAccount , String serviceName , Long dialplanId , Long extensionId , Long ruleId , OvhOvhPabxDialplanExtensionRule body ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/ovhPabx/{serviceName}/dialplan/{dialplanId}/extension/{extensionId}/rule/{ruleId}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName , dialplanId , extensionId , ruleId ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class HttpInboundLink { /** * Called when an error occurs on this connection . * @ param inVC * @ param t */ @ Override public void error ( VirtualConnection inVC , Throwable t ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "error() called on " + this + " " + inVC ) ; } try { close ( inVC , ( Exception ) t ) ; } catch ( ClassCastException cce ) { // no FFDC required close ( inVC , new Exception ( "Problem when finishing response" ) ) ; }
public class MemcmpEncoder { /** * Bytes are encoded by writing all bytes out as themselves , except for bytes * of value 0x00 . Bytes of value 0x00 are encoded as two bytes , 0x00 0x01 . The * end marker is signified by two 0x00 bytes . This guarantees that the end * marker is the least possible value . * @ param bytes * The bytes to encode . */ @ Override public void writeBytes ( ByteBuffer bytes ) throws IOException { } }
writeBytes ( bytes . array ( ) , bytes . position ( ) , bytes . remaining ( ) ) ;
public class SQLMetadataSegmentManager { /** * For the garbage collector in Java , it ' s better to keep new objects short - living , but once they are old enough * ( i . e . promoted to old generation ) , try to keep them alive . In { @ link # poll ( ) } , we fetch and deserialize all * existing segments each time , and then replace them in { @ link # dataSources } . This method allows to use already * existing ( old ) segments when possible , effectively interning them a - la { @ link String # intern } or { @ link * com . google . common . collect . Interner } , aiming to make the majority of { @ link DataSegment } objects garbage soon after * they are deserialized and to die in young generation . It allows to avoid fragmentation of the old generation and * full GCs . */ private DataSegment replaceWithExistingSegmentIfPresent ( DataSegment segment ) { } }
DruidDataSource dataSource = Optional . ofNullable ( dataSources ) . map ( m -> m . get ( segment . getDataSource ( ) ) ) . orElse ( null ) ; if ( dataSource == null ) { return segment ; } DataSegment alreadyExistingSegment = dataSource . getSegment ( segment . getId ( ) ) ; return alreadyExistingSegment != null ? alreadyExistingSegment : segment ;
public class StateDP { /** * / * ( non - Javadoc ) * @ see tuwien . auto . calimero . datapoint . Datapoint # doSave ( * tuwien . auto . calimero . xml . XMLWriter ) */ void doSave ( XMLWriter w ) throws KNXMLException { } }
// < expiration timeout = int / > w . writeEmptyElement ( TAG_EXPIRATION , Arrays . asList ( new Attribute [ ] { new Attribute ( ATTR_TIMEOUT , Integer . toString ( timeout ) ) } ) ) ; w . writeElement ( TAG_UPDATING , Collections . EMPTY_LIST , null ) ; synchronized ( updating ) { for ( final Iterator i = updating . iterator ( ) ; i . hasNext ( ) ; ) ( ( GroupAddress ) i . next ( ) ) . save ( w ) ; } w . endElement ( ) ; w . writeElement ( TAG_INVALIDATING , Collections . EMPTY_LIST , null ) ; synchronized ( invalidating ) { for ( final Iterator i = invalidating . iterator ( ) ; i . hasNext ( ) ; ) ( ( GroupAddress ) i . next ( ) ) . save ( w ) ; } w . endElement ( ) ;
public class Balanced { /** * Splits a string around the specified character , returning the parts in an array . * However , any occurrence of the specified character enclosed between balanced parentheses / brackets / braces is ignored . * @ param text a String * @ param delimiter the character to split the string around * @ return a String array containing the split parts */ public static String [ ] split ( String text , char delimiter ) { } }
List < String > list = new ArrayList < > ( ) ; return split ( list , null , text , 0 , text . length ( ) , delimiter , null ) . toArray ( new String [ list . size ( ) ] ) ;
public class PharmacophoreMatcher { /** * Performs the pharmacophore matching . * @ param atomContainer The target molecule . Must have 3D coordinates * @ param initializeTarget If < i > true < / i > , the target molecule specified in the * first argument will be analyzed to identify matching pharmacophore groups . If < i > false < / i > * this is not performed . The latter case is only useful when dealing with conformers * since for a given molecule , all conformers will have the same pharmacophore groups * and only the constraints will change from one conformer to another . * @ return true is the target molecule contains the query pharmacophore * @ throws org . openscience . cdk . exception . CDKException * if the query pharmacophore was not set or the query is invalid or if the molecule * does not have 3D coordinates */ public boolean matches ( IAtomContainer atomContainer , boolean initializeTarget ) throws CDKException { } }
if ( ! GeometryUtil . has3DCoordinates ( atomContainer ) ) throw new CDKException ( "Molecule must have 3D coordinates" ) ; if ( pharmacophoreQuery == null ) throw new CDKException ( "Must set the query pharmacophore before matching" ) ; if ( ! checkQuery ( pharmacophoreQuery ) ) throw new CDKException ( "A problem in the query. Make sure all pharmacophore groups of the same symbol have the same same SMARTS" ) ; String title = ( String ) atomContainer . getTitle ( ) ; if ( initializeTarget ) pharmacophoreMolecule = getPharmacophoreMolecule ( atomContainer ) ; else { // even though the atoms comprising the pcore groups are // constant , their coords will differ , so we need to make // sure we get the latest set of effective coordinates for ( IAtom iAtom : pharmacophoreMolecule . atoms ( ) ) { PharmacophoreAtom patom = PharmacophoreAtom . get ( iAtom ) ; List < Integer > tmpList = new ArrayList < Integer > ( ) ; for ( int idx : patom . getMatchingAtoms ( ) ) tmpList . add ( idx ) ; Point3d coords = getEffectiveCoordinates ( atomContainer , tmpList ) ; patom . setPoint3d ( coords ) ; } } if ( pharmacophoreMolecule . getAtomCount ( ) < pharmacophoreQuery . getAtomCount ( ) ) { logger . debug ( "Target [" + title + "] did not match the query SMARTS. Skipping constraints" ) ; return false ; } mappings = Pattern . findSubstructure ( pharmacophoreQuery ) . matchAll ( pharmacophoreMolecule ) ; // XXX : doing one search then discarding return mappings . atLeast ( 1 ) ;
public class StopLibrary { /** * 增加停用词 * @ param key * @ param regexes */ public static void insertStopWords ( String key , String ... stopWords ) { } }
StopRecognition fr = get ( key ) ; fr . insertStopWords ( stopWords ) ;
public class Duration { /** * Format a integer or long value as a time duration . Returns empty string if < code > value < / code > argument is null . * @ param value numeric value . * @ return numeric value represented as duration , possible empty if < code > value < / code > argument is null . * @ throws IllegalArgumentException if < code > value < / code > argument is not integer or long . */ @ Override public String format ( Object value ) { } }
if ( value == null ) { return "" ; } double duration = 0 ; if ( value instanceof Integer ) { duration = ( Integer ) value ; } else if ( value instanceof Long ) { duration = ( Long ) value ; } else { throw new IllegalArgumentException ( String . format ( "Invalid argument type |%s|." , value . getClass ( ) ) ) ; } if ( duration == 0 ) { return format ( 0 , Units . MILLISECONDS ) ; } Units units = Units . MILLISECONDS ; for ( Units u : Units . values ( ) ) { if ( duration < u . value ) { break ; } units = u ; } return format ( duration / units . value , units ) ;
public class InterceptorProxy { /** * Invoke the interceptor method associated with the interceptor index * that was passed as the " interceptorIndex " parameter of the * constructor method of this class . * @ param bean is the EJB instance that is the target of this invocation . * @ param inv is the InvocationContext to pass as an argument to the * interceptor method if the interceptor method requires it . * @ param interceptors is the array of interceptor instances created for a * particular EJB instance . The array must be ordered so that the * " interceptorIndex " parameter passed to the constructor method * of this class can be used as an index into this array to * select the correct interceptor instance to invoke . * @ return Object returned by interceptor instance . * @ throws Exception */ public final Object invokeInterceptor ( Object bean , InvocationContext inv , Object [ ] interceptors ) throws Exception { } }
// Interceptor instance is the bean instance itself if the // interceptor index is < 0. Object interceptorInstance = ( ivBeanInterceptor ) ? bean : interceptors [ ivInterceptorIndex ] ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) // d367572.7 { Tr . debug ( tc , "invoking " + this ) ; Tr . debug ( tc , "interceptor instance = " + interceptorInstance ) ; } // Does interceptor method require InvocationContext as an argument ? if ( ivRequiresInvocationContext ) { try { // Yes it does , so pass it as an argument . Object [ ] args = new Object [ ] { inv } ; // d404122 return ivInterceptorMethod . invoke ( interceptorInstance , args ) ; // d404122 } catch ( IllegalArgumentException ie ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "ivInterceptorMethod: " + ivInterceptorMethod . toString ( ) + " class: " + ivInterceptorMethod . getClass ( ) + " declaring class: " + ivInterceptorMethod . getDeclaringClass ( ) ) ; Tr . debug ( tc , "interceptorInstance: " + interceptorInstance . toString ( ) + " class: " + interceptorInstance . getClass ( ) ) ; } throw ie ; } } else { // Nope , interceptor method takes no arguments . return ivInterceptorMethod . invoke ( interceptorInstance , NO_ARGS ) ; }
public class RocksDBKeyedStateBackend { /** * Triggers an asynchronous snapshot of the keyed state backend from RocksDB . This snapshot can be canceled and * is also stopped when the backend is closed through { @ link # dispose ( ) } . For each backend , this method must always * be called by the same thread . * @ param checkpointId The Id of the checkpoint . * @ param timestamp The timestamp of the checkpoint . * @ param streamFactory The factory that we can use for writing our state to streams . * @ param checkpointOptions Options for how to perform this checkpoint . * @ return Future to the state handle of the snapshot data . * @ throws Exception indicating a problem in the synchronous part of the checkpoint . */ @ Nonnull @ Override public RunnableFuture < SnapshotResult < KeyedStateHandle > > snapshot ( final long checkpointId , final long timestamp , @ Nonnull final CheckpointStreamFactory streamFactory , @ Nonnull CheckpointOptions checkpointOptions ) throws Exception { } }
long startTime = System . currentTimeMillis ( ) ; // flush everything into db before taking a snapshot writeBatchWrapper . flush ( ) ; RocksDBSnapshotStrategyBase < K > chosenSnapshotStrategy = checkpointOptions . getCheckpointType ( ) . isSavepoint ( ) ? savepointSnapshotStrategy : checkpointSnapshotStrategy ; RunnableFuture < SnapshotResult < KeyedStateHandle > > snapshotRunner = chosenSnapshotStrategy . snapshot ( checkpointId , timestamp , streamFactory , checkpointOptions ) ; chosenSnapshotStrategy . logSyncCompleted ( streamFactory , startTime ) ; return snapshotRunner ;
public class JTrees { /** * Returns a list containing all leaf nodes from the given tree model * that are descendants of the given node . These are the nodes that * have 0 children . * @ param treeModel The tree model * @ param node The node to start the search from * @ return The leaf nodes */ public static List < Object > getLeafNodes ( TreeModel treeModel , Object node ) { } }
List < Object > leafNodes = new ArrayList < Object > ( ) ; getLeafNodes ( treeModel , node , leafNodes ) ; return leafNodes ;
public class DefaultCsvEncoder { /** * { @ inheritDoc } */ public String encode ( final String input , final CsvContext context , final CsvPreference preference ) { } }
final StringBuilder currentColumn = new StringBuilder ( ) ; final int delimiter = preference . getDelimiterChar ( ) ; final char quote = ( char ) preference . getQuoteChar ( ) ; final char quoteEscapeChar = ( char ) preference . getQuoteEscapeChar ( ) ; final String eolSymbols = preference . getEndOfLineSymbols ( ) ; final int lastCharIndex = input . length ( ) - 1 ; boolean quotesRequiredForSpecialChar = false ; boolean skipNewline = false ; for ( int i = 0 ; i <= lastCharIndex ; i ++ ) { final char c = input . charAt ( i ) ; if ( skipNewline ) { skipNewline = false ; if ( c == '\n' ) { continue ; // newline following a carriage return is skipped } } if ( c == delimiter ) { quotesRequiredForSpecialChar = true ; currentColumn . append ( c ) ; } else if ( c == quote ) { quotesRequiredForSpecialChar = true ; currentColumn . append ( quoteEscapeChar ) ; currentColumn . append ( quote ) ; } else if ( c == '\r' ) { quotesRequiredForSpecialChar = true ; currentColumn . append ( eolSymbols ) ; context . setLineNumber ( context . getLineNumber ( ) + 1 ) ; skipNewline = true ; } else if ( c == '\n' ) { quotesRequiredForSpecialChar = true ; currentColumn . append ( eolSymbols ) ; context . setLineNumber ( context . getLineNumber ( ) + 1 ) ; } else { currentColumn . append ( c ) ; } } final boolean quotesRequiredForMode = preference . getQuoteMode ( ) . quotesRequired ( input , context , preference ) ; final boolean quotesRequiredForSurroundingSpaces = preference . isSurroundingSpacesNeedQuotes ( ) && input . length ( ) > 0 && ( input . charAt ( 0 ) == ' ' || input . charAt ( input . length ( ) - 1 ) == ' ' ) ; if ( quotesRequiredForSpecialChar || quotesRequiredForMode || quotesRequiredForSurroundingSpaces ) { currentColumn . insert ( 0 , quote ) . append ( quote ) ; } return currentColumn . toString ( ) ;
public class BaseSessionProxy { /** * Check for session . * @ param strClassAndID * @ return */ public BaseSessionProxy checkForSession ( String strClassAndID ) { } }
int iColon = strClassAndID . indexOf ( CLASS_SEPARATOR ) ; String strSessionClass = null ; if ( iColon != - 1 ) strSessionClass = strClassAndID . substring ( 0 , iColon ) ; String strID = strClassAndID . substring ( iColon + 1 ) ; if ( REMOTE_SESSION . equals ( strSessionClass ) ) return new SessionProxy ( this , strID ) ; if ( REMOTE_TABLE . equals ( strSessionClass ) ) return new TableProxy ( this , strID ) ; if ( REMOTE_BASE_SESSION . equals ( strSessionClass ) ) return new BaseSessionProxy ( this , strID ) ; return null ; // Not a session
public class FctBnAccEntitiesProcessors { /** * < p > Get InvTxMeth < PurchaseInvoice , PurchaseInvoiceTaxLine > . < / p > * @ param pAddParam additional param * @ return requested InvTxMeth < PurchaseInvoice , PurchaseInvoiceTaxLine > * @ throws Exception - an exception */ protected final InvTxMeth < PurchaseInvoice , PurchaseInvoiceTaxLine > lazyGetPurInvTxMeth ( final Map < String , Object > pAddParam ) throws Exception { } }
InvTxMeth < PurchaseInvoice , PurchaseInvoiceTaxLine > purInvTxMe = this . purInvTxMeth ; if ( purInvTxMe == null ) { purInvTxMe = new InvTxMeth < PurchaseInvoice , PurchaseInvoiceTaxLine > ( ) ; purInvTxMe . setGoodLnCl ( PurchaseInvoiceLine . class ) ; purInvTxMe . setServiceLnCl ( PurchaseInvoiceServiceLine . class ) ; purInvTxMe . setInvTxLnCl ( PurchaseInvoiceTaxLine . class ) ; purInvTxMe . setIsTxByUser ( true ) ; purInvTxMe . setStWhereAdjGdLnInvBas ( "where PURCHASEINVOICELINE.TAXCATEGORY is not null and REVERSEDID " + "is null and ITSOWNER=" ) ; purInvTxMe . setStWhereAdjSrLnInvBas ( "where PURCHASEINVOICESERVICELINE.TAXCATEGORY is not null and REVERSEDID " + "is null and ITSOWNER=" ) ; purInvTxMe . setFlTotals ( "invTotals.sql" ) ; purInvTxMe . setFlTxItBas ( "invTxItBas.sql" ) ; purInvTxMe . setFlTxItBasAggr ( "invTxItBasAggr.sql" ) ; purInvTxMe . setFlTxInvBas ( "invTxInvBas.sql" ) ; purInvTxMe . setFlTxInvAdj ( "invTxInvAdj.sql" ) ; purInvTxMe . setFlTxInvBasAggr ( "invTxInvBasAggr.sql" ) ; purInvTxMe . setTblNmsTot ( new String [ ] { "PURCHASEINVOICELINE" , "PURCHASEINVOICESERVICELINE" , "PURCHASEINVOICETAXLINE" , "PURCHASEINVOICEGOODSTAXLINE" , "PURCHASEINVOICESERVICETAXLINE" } ) ; FactoryPersistableBase < PurchaseInvoiceTaxLine > fctItl = new FactoryPersistableBase < PurchaseInvoiceTaxLine > ( ) ; fctItl . setObjectClass ( PurchaseInvoiceTaxLine . class ) ; fctItl . setDatabaseId ( getSrvDatabase ( ) . getIdDatabase ( ) ) ; purInvTxMe . setFctInvTxLn ( fctItl ) ; // assigning fully initialized object : this . purInvTxMeth = purInvTxMe ; } return purInvTxMe ;
public class StreamProjection { /** * Projects a { @ link Tuple } { @ link DataStream } to the previously selected fields . * @ return The projected DataStream . * @ see Tuple * @ see DataStream */ public < T0 > SingleOutputStreamOperator < Tuple1 < T0 > > projectTuple1 ( ) { } }
TypeInformation < ? > [ ] fTypes = extractFieldTypes ( fieldIndexes , dataStream . getType ( ) ) ; TupleTypeInfo < Tuple1 < T0 > > tType = new TupleTypeInfo < Tuple1 < T0 > > ( fTypes ) ; return dataStream . transform ( "Projection" , tType , new StreamProject < IN , Tuple1 < T0 > > ( fieldIndexes , tType . createSerializer ( dataStream . getExecutionConfig ( ) ) ) ) ;
public class StreamExecutor { /** * Handle the execution of the instance */ public void handleInstanceExecutor ( ) { } }
for ( InstanceExecutor executor : taskIdToInstanceExecutor . values ( ) ) { boolean isLocalSpout = spoutSets . contains ( executor . getComponentName ( ) ) ; int taskId = executor . getTaskId ( ) ; int items = executor . getStreamOutQueue ( ) . size ( ) ; for ( int i = 0 ; i < items ; i ++ ) { Message msg = executor . getStreamOutQueue ( ) . poll ( ) ; if ( msg instanceof HeronTuples . HeronTupleSet ) { HeronTuples . HeronTupleSet tupleSet = ( HeronTuples . HeronTupleSet ) msg ; if ( tupleSet . hasData ( ) ) { HeronTuples . HeronDataTupleSet d = tupleSet . getData ( ) ; TopologyAPI . StreamId streamId = d . getStream ( ) ; for ( HeronTuples . HeronDataTuple tuple : d . getTuplesList ( ) ) { List < Integer > outTasks = this . topologyManager . getListToSend ( streamId , tuple ) ; outTasks . addAll ( tuple . getDestTaskIdsList ( ) ) ; if ( outTasks . isEmpty ( ) ) { LOG . severe ( "Nobody to send the tuple to" ) ; } copyDataOutBound ( taskId , isLocalSpout , streamId , tuple , outTasks ) ; } } if ( tupleSet . hasControl ( ) ) { HeronTuples . HeronControlTupleSet c = tupleSet . getControl ( ) ; for ( HeronTuples . AckTuple ack : c . getAcksList ( ) ) { copyControlOutBound ( tupleSet . getSrcTaskId ( ) , ack , true ) ; } for ( HeronTuples . AckTuple fail : c . getFailsList ( ) ) { copyControlOutBound ( tupleSet . getSrcTaskId ( ) , fail , false ) ; } } } } }
public class Request { /** * Download this resource asynchronously , without a request body . * The download progress will be monitored with a { @ link ProgressListener } . * < b > Note : < / b > This method consumes the < code > InputStream < / code > from the response and closes it , * so the { @ link Response # getResponseByteStream ( ) } method will always return null for this request . * @ param context The context that will be passed to authentication listener . * @ param progressListener The listener that monitors the download progress * @ param responseListener The listener whose onSuccess or onFailure methods will be called when this request finishes */ public void download ( Context context , ProgressListener progressListener , ResponseListener responseListener ) { } }
setContext ( context ) ; super . download ( progressListener , responseListener ) ;
public class TransportNegotiator { /** * Send an offer for a transport candidate * @ param cand * @ throws NotConnectedException * @ throws InterruptedException */ private synchronized void sendTransportCandidateOffer ( TransportCandidate cand ) throws NotConnectedException , InterruptedException { } }
if ( ! cand . isNull ( ) ) { // Offer our new candidate . . . addOfferedCandidate ( cand ) ; JingleContent content = parentNegotiator . getJingleContent ( ) ; content . addJingleTransport ( getJingleTransport ( cand ) ) ; Jingle jingle = new Jingle ( JingleActionEnum . TRANSPORT_INFO ) ; jingle . addContent ( content ) ; // We SHOULD NOT be sending packets directly . // This circumvents the state machinery . // TODO - work this into the state machinery . session . sendFormattedJingle ( jingle ) ; }
public class PdfContentStreamProcessor { /** * Loads all the supported graphics and text state operators in a map . */ private void populateOperators ( ) { } }
operators = new HashMap ( ) ; registerContentOperator ( "q" , new PushGraphicsState ( ) ) ; registerContentOperator ( "Q" , new PopGraphicsState ( ) ) ; registerContentOperator ( "cm" , new ModifyCurrentTransformationMatrix ( ) ) ; registerContentOperator ( "gs" , new ProcessGraphicsStateResource ( ) ) ; registerContentOperator ( "Tc" , new SetTextCharacterSpacing ( ) ) ; registerContentOperator ( "Tw" , new SetTextWordSpacing ( ) ) ; registerContentOperator ( "Tz" , new SetTextHorizontalScaling ( ) ) ; registerContentOperator ( "TL" , new SetTextLeading ( ) ) ; registerContentOperator ( "Tf" , new SetTextFont ( ) ) ; registerContentOperator ( "Tr" , new SetTextRenderMode ( ) ) ; registerContentOperator ( "Ts" , new SetTextRise ( ) ) ; registerContentOperator ( "BT" , new BeginText ( ) ) ; registerContentOperator ( "ET" , new EndText ( ) ) ; registerContentOperator ( "Td" , new TextMoveStartNextLine ( ) ) ; registerContentOperator ( "TD" , new TextMoveStartNextLineWithLeading ( ) ) ; registerContentOperator ( "Tm" , new TextSetTextMatrix ( ) ) ; registerContentOperator ( "T*" , new TextMoveNextLine ( ) ) ; registerContentOperator ( "Tj" , new ShowText ( ) ) ; registerContentOperator ( "'" , new MoveNextLineAndShowText ( ) ) ; registerContentOperator ( "\"" , new MoveNextLineAndShowTextWithSpacing ( ) ) ; registerContentOperator ( "TJ" , new ShowTextArray ( ) ) ; registerContentOperator ( "Do" , new Do ( ) ) ;
public class PullFileLoader { /** * Load a single pull file . * @ param path The { @ link Path } to the pull file to load , full path * @ param sysProps A { @ link Config } used as fallback . * @ param loadGlobalProperties if true , will also load at most one * . properties file per directory from the * { @ link # rootDirectory } to the pull file { @ link Path } . * @ return The loaded { @ link Config } . * @ throws IOException */ public Config loadPullFile ( Path path , Config sysProps , boolean loadGlobalProperties ) throws IOException { } }
Config fallback = loadGlobalProperties ? loadAncestorGlobalConfigs ( path , sysProps ) : sysProps ; if ( this . javaPropsPullFileFilter . accept ( path ) ) { return loadJavaPropsWithFallback ( path , fallback ) . resolve ( ) ; } else if ( this . hoconPullFileFilter . accept ( path ) ) { return loadHoconConfigAtPath ( path ) . withFallback ( fallback ) . resolve ( ) ; } else { throw new IOException ( String . format ( "Cannot load pull file %s due to unrecognized extension." , path ) ) ; }
public class CommandRegistry { /** * Retrieves the command associated with the specified name from the registry . * @ param commandName Name of the command sought . * @ param forceCreate If true and the command does not exist , one is created and added to the * registry . * @ return The associated command , or null if it does not exist ( and forceCreate is false ) . */ public Command get ( String commandName , boolean forceCreate ) { } }
Command command = commands . get ( commandName ) ; if ( command == null && forceCreate ) { command = new Command ( commandName ) ; add ( command ) ; } return command ;
public class OutputsInner { /** * Lists all of the outputs under the specified streaming job . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; OutputInner & gt ; object */ public Observable < ServiceResponse < Page < OutputInner > > > listByStreamingJobWithServiceResponseAsync ( final String resourceGroupName , final String jobName ) { } }
return listByStreamingJobSinglePageAsync ( resourceGroupName , jobName ) . concatMap ( new Func1 < ServiceResponse < Page < OutputInner > > , Observable < ServiceResponse < Page < OutputInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < OutputInner > > > call ( ServiceResponse < Page < OutputInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByStreamingJobNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class RemovePermissionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RemovePermissionRequest removePermissionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( removePermissionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( removePermissionRequest . getStatementId ( ) , STATEMENTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EntityStore { /** * Creates a topic that can be used in an EntityQuery ( see { @ link # from } for details . */ public < T > T topic ( Class < T > entity ) { } }
EntityMetadata . EntityDescriptor descriptor = metadata . of ( entity ) ; @ SuppressWarnings ( { "unchecked" , "UnnecessaryLocalVariable" } ) // Cast is guaranteed by enhancer . T proxy = ( T ) Enhancer . create ( entity , new Class [ ] { TopicProxy . HasCalledFields . class } , new TopicProxy ( descriptor ) ) ; return proxy ;
public class DefaultXLDeployClient { /** * / / / / / Helpers */ private ResponseEntity < String > makeRestCall ( String instanceUrl , String endpoint ) { } }
String url = normalizeUrl ( instanceUrl , "/deployit/" + endpoint ) ; ResponseEntity < String > response = null ; try { response = restOperations . exchange ( url , HttpMethod . GET , new HttpEntity < > ( createHeaders ( instanceUrl ) ) , String . class ) ; } catch ( RestClientException re ) { LOGGER . error ( "Error with REST url: " + url ) ; LOGGER . error ( re . getMessage ( ) ) ; } return response ;
public class OmsInsolation { /** * Evaluate the radiation . * @ param lambda * the latitude . * @ param demWR * the raster of elevation * @ param gradientWR * the raster of the gradient value of the dem . * @ param insolationWR * the wr where to store the result . * @ param the * day in the year . * @ paradx the resolutiono of the dem . */ private void calcInsolation ( double lambda , WritableRaster demWR , WritableRaster gradientWR , WritableRaster insolationWR , int day , double dx ) { } }
// calculating the day angle // double dayang = 2 * Math . PI * ( day - 1 ) / 365.0; double dayangb = ( 360 / 365.25 ) * ( day - 79.436 ) ; dayangb = Math . toRadians ( dayangb ) ; // Evaluate the declination of the sun . delta = getDeclination ( dayangb ) ; // Evaluate the radiation in this day . double ss = Math . acos ( - Math . tan ( delta ) * Math . tan ( lambda ) ) ; double hour = - ss + ( Math . PI / 48.0 ) ; while ( hour <= ss - ( Math . PI / 48 ) ) { omega = hour ; // calculating the vector related to the sun double sunVector [ ] = calcSunVector ( ) ; double zenith = calcZenith ( sunVector [ 2 ] ) ; double [ ] inverseSunVector = calcInverseSunVector ( sunVector ) ; double [ ] normalSunVector = calcNormalSunVector ( sunVector ) ; int height = demWR . getHeight ( ) ; int width = demWR . getWidth ( ) ; WritableRaster sOmbraWR = calculateFactor ( height , width , sunVector , inverseSunVector , normalSunVector , demWR , dx ) ; double mr = 1 / ( sunVector [ 2 ] + 0.15 * Math . pow ( ( 93.885 - zenith ) , ( - 1.253 ) ) ) ; for ( int j = 0 ; j < height ; j ++ ) { for ( int i = 0 ; i < width ; i ++ ) { // evaluate the radiation . calcRadiation ( i , j , demWR , sOmbraWR , insolationWR , sunVector , gradientWR , mr ) ; } } hour = hour + Math . PI / 24.0 ; }
public class AuthorizationConfig { /** * < pre > * The name of the authorization provider , such as * firebaserules . googleapis . com . * < / pre > * < code > string provider = 1 ; < / code > */ public java . lang . String getProvider ( ) { } }
java . lang . Object ref = provider_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; provider_ = s ; return s ; }
public class CharacterParser { public boolean isRegexp ( String history ) { } }
// could be start of regexp or divide sign history = history . replace ( "^\\s*" , "" ) ; // unless its an ` if ` , ` while ` , ` for ` or ` with ` it ' s a divide , so we assume it ' s a divide if ( history . charAt ( 0 ) == ')' ) return false ; // unless it ' s a function expression , it ' s a regexp , so we assume it ' s a regexp if ( history . charAt ( 0 ) == '}' ) return true ; // any punctuation means it ' s a regexp if ( isPunctuator ( history . charAt ( 0 ) ) ) return true ; // if the last thing was a keyword then it must be a regexp ( e . g . ` typeof / foo / ` ) Matcher matcher = pattern . matcher ( history ) ; if ( matcher . matches ( ) && isKeyword ( new StringBuilder ( matcher . group ( 0 ) ) . reverse ( ) . toString ( ) ) ) { return true ; } return false ;
public class HELM1Utils { /** * method to generate from a helm2notation a valid canonical HELM1 * @ param helm2notation input HELM2Notation * @ return canonical HELM * @ throws HELM1FormatException if HELM2 features are there * @ throws ChemistryException if the Chemistry Engine can not be initialized */ public static String getCanonical ( HELM2Notation helm2notation ) throws HELM1FormatException , ChemistryException { } }
Map < String , String > convertsortedIdstoIds ; try { Object [ ] temp = setCanonicalHELMFirstSection ( helm2notation ) ; LOG . info ( "First Section of canonical HELM was generated" ) ; convertsortedIdstoIds = ( Map < String , String > ) temp [ 0 ] ; String firstSection = ( String ) temp [ 1 ] ; String secondSection = setCanonicalHELMSecondSection ( convertsortedIdstoIds , helm2notation . getListOfConnections ( ) ) ; LOG . info ( "Second Section of canonical HELM was generated" ) ; return firstSection + "$" + secondSection + "$" + "" + "$" + "" + "$V2.0" ; } catch ( ClassNotFoundException | IOException | HELM1ConverterException | ValidationException | org . helm . notation2 . parser . exceptionparser . NotationException e ) { e . printStackTrace ( ) ; LOG . error ( "Canonical HELM 1 can not be generated due to HELM2 features" ) ; throw new HELM1FormatException ( "Canonical HELM 1 can not be generated due to HELM2 features " + e . getMessage ( ) + e . getCause ( ) ) ; }
public class AbstractConfigurableTemplateResolver { /** * Computes the resource name that will be used for resolving , from the template name and other * parameters configured at this < em > configurable < / em > resolver . * This method can be overridden by subclasses that need to modify the standard way in which the * name of the template resource is computed by default before passing it to the real resource * resolution mechanism ( in method { @ link # computeTemplateResource ( IEngineConfiguration , String , String , String , String , Map ) } * By default , the resource name will be created by first applying the < em > template aliases < / em > , and then * adding < em > prefix < / em > and < em > suffix < / em > to the specified < em > template < / em > ( template name ) . * @ param configuration the engine configuration in use . * @ param ownerTemplate the owner template , if the resource being computed is a fragment . Might be null . * @ param template the template ( normally the template name , except for String templates ) . * @ param prefix the prefix to be applied . * @ param suffix the suffix to be applied . * @ param forceSuffix whether the suffix should be forced or not . * @ param templateAliases the template aliases map . * @ param templateResolutionAttributes the template resolution attributes , if any . Might be null . * @ return the resource name that should be used for resolving * @ since 3.0.6 */ protected String computeResourceName ( final IEngineConfiguration configuration , final String ownerTemplate , final String template , final String prefix , final String suffix , final boolean forceSuffix , final Map < String , String > templateAliases , final Map < String , Object > templateResolutionAttributes ) { } }
Validate . notNull ( template , "Template name cannot be null" ) ; String unaliasedName = templateAliases . get ( template ) ; if ( unaliasedName == null ) { unaliasedName = template ; } final boolean hasPrefix = ! StringUtils . isEmptyOrWhitespace ( prefix ) ; final boolean hasSuffix = ! StringUtils . isEmptyOrWhitespace ( suffix ) ; final boolean shouldApplySuffix = hasSuffix && ( forceSuffix || ! ContentTypeUtils . hasRecognizedFileExtension ( unaliasedName ) ) ; if ( ! hasPrefix && ! shouldApplySuffix ) { return unaliasedName ; } if ( ! hasPrefix ) { // shouldApplySuffix return unaliasedName + suffix ; } if ( ! shouldApplySuffix ) { // hasPrefix return prefix + unaliasedName ; } // hasPrefix & & shouldApplySuffix return prefix + unaliasedName + suffix ;
public class MyTreeTable { /** * Returns whether the cell under the coordinates of the mouse * in the { @ link EventObject } is editable . */ public boolean isCellEditable ( EventObject e ) { } }
if ( e instanceof MouseEvent ) { MouseEvent me = ( MouseEvent ) e ; // If the modifiers are not 0 ( or the left mouse button ) , // tree may try and toggle the selection , and table // will then try and toggle , resulting in the // selection remaining the same . To avoid this , we // only dispatch when the modifiers are 0 ( or the left mouse // button ) . if ( me . getModifiers ( ) == 0 || ( ( me . getModifiers ( ) & ( InputEvent . BUTTON1_MASK | 1024 ) ) != 0 && ( me . getModifiers ( ) & ( InputEvent . SHIFT_MASK | InputEvent . CTRL_MASK | InputEvent . ALT_MASK | InputEvent . BUTTON2_MASK | InputEvent . BUTTON3_MASK | 64 | // SHIFT _ DOWN _ MASK 128 | // CTRL _ DOWN _ MASK 512 | // ALT _ DOWN _ MASK 2048 | // BUTTON2 _ DOWN _ MASK 4096 // BUTTON3 _ DOWN _ MASK ) ) == 0 ) ) { int row = rowAtPoint ( me . getPoint ( ) ) ; for ( int counter = getColumnCount ( ) - 1 ; counter >= 0 ; counter -- ) { if ( TreeTableModel . class == getColumnClass ( counter ) ) { MouseEvent newME = new MouseEvent ( MyTreeTable . this . tree , me . getID ( ) , me . getWhen ( ) , me . getModifiers ( ) , me . getX ( ) - getCellRect ( row , counter , true ) . x , me . getY ( ) , me . getClickCount ( ) , me . isPopupTrigger ( ) ) ; MyTreeTable . this . tree . dispatchEvent ( newME ) ; break ; } } } if ( me . getClickCount ( ) >= 3 ) { return true ; } return false ; } if ( e == null ) { return true ; } return false ;
public class TableSession { /** * Open - Open this table for a query . * @ param strKeyArea The default key area ( null to leave unchanged ) . * @ param strFields The fields to select ( comma separated ) . * @ param objInitialKey The start key ( as a raw data object or a BaseBuffer ) . * @ param objEndKey The end key ( as a raw data object or a BaseBuffer ) . * @ param byBehaviorData A steam describing the behaviors to add and the initialization data . * @ exception Exception File exception . */ public void open ( String strKeyArea , int iOpenMode , boolean bDirection , String strFields , Object objInitialKey , Object objEndKey , byte [ ] byBehaviorData ) throws DBException , RemoteException { } }
try { synchronized ( this . getTask ( ) ) { // In case two tasks are calling here this . getMainRecord ( ) . close ( ) ; // FROM is automatic , since the remote BaseRecord is exactly the same as this one // ORDER BY KeyArea keyArea = this . getMainRecord ( ) . getKeyArea ( ) ; if ( strKeyArea != null ) { this . getMainRecord ( ) . setKeyArea ( strKeyArea ) ; keyArea = this . getMainRecord ( ) . getKeyArea ( - 1 ) ; // This next set of code deals with a special case where the caller wants a non - key area order if ( this . getMainRecord ( ) . getDefaultOrder ( ) == Constants . MAIN_KEY_AREA ) if ( ! keyArea . getKeyName ( ) . equals ( strKeyArea ) ) if ( ! Constants . PRIMARY_KEY . equals ( strKeyArea ) ) { BaseField field = this . getMainRecord ( ) . getField ( strKeyArea ) ; if ( field != null ) { KeyArea tempKeyStart = this . getMainRecord ( ) . makeIndex ( DBConstants . NOT_UNIQUE , null ) ; // Add temp key tempKeyStart . addKeyField ( field , bDirection ) ; this . getMainRecord ( ) . setKeyArea ( this . getMainRecord ( ) . getKeyAreaCount ( ) - 1 ) ; } } keyArea . setKeyOrder ( bDirection ) ; } // Open mode this . getMainRecord ( ) . setOpenMode ( iOpenMode ) ; // SELECT ( fields to select ) if ( strFields != null ) this . getMainRecord ( ) . setSelected ( strFields ) ; // Select these fields else { Record recordBase = this . getMainRecord ( ) . getTable ( ) . getCurrentTable ( ) . getRecord ( ) ; int iFieldTypes = this . getFieldTypes ( recordBase ) ; if ( iFieldTypes == BaseBuffer . PHYSICAL_FIELDS ) this . getMainRecord ( ) . setSelected ( true ) ; // Select these fields ( otherwise leave the selection alone ) } // WHERE XYZ > = FileListener listener = null ; listener = ( FileListener ) this . getMainRecord ( ) . getListener ( ) ; while ( listener != null ) { // Clear inited flag from all Linked behaviors if ( ( listener . getMasterSlaveFlag ( ) & FileListener . LINKED_TO_SLAVE ) != 0 ) listener . setMasterSlaveFlag ( listener . getMasterSlaveFlag ( ) & ~ FileListener . INITED_IN_SLAVE ) ; // Clear inited flag listener = ( FileListener ) listener . getNextListener ( ) ; } if ( objInitialKey != null ) { VectorBuffer recBuff = new VectorBuffer ( ( Vector ) objInitialKey ) ; int iLastModified = - 1 ; keyArea . reverseKeyBuffer ( recBuff , DBConstants . FILE_KEY_AREA ) ; String strLastModified = recBuff . getNextString ( ) ; try { if ( strLastModified != null ) iLastModified = Integer . parseInt ( strLastModified ) ; } catch ( NumberFormatException ex ) { iLastModified = - 1 ; } this . getMainRecord ( ) . addListener ( listener = new SubCurrentFilter ( iLastModified , true , false ) ) ; // Use current listener . setMasterSlaveFlag ( listener . getMasterSlaveFlag ( ) | FileListener . INITED_IN_SLAVE | FileListener . LINKED_TO_SLAVE | FileListener . RUN_IN_SLAVE ) ; } // WHERE XYZ > = if ( objEndKey != null ) { VectorBuffer recBuff = new VectorBuffer ( ( Vector ) objEndKey ) ; keyArea . reverseKeyBuffer ( recBuff , DBConstants . FILE_KEY_AREA ) ; int iLastModified = - 1 ; String strLastModified = recBuff . getNextString ( ) ; try { if ( strLastModified != null ) iLastModified = Integer . parseInt ( strLastModified ) ; } catch ( NumberFormatException ex ) { iLastModified = - 1 ; } this . getMainRecord ( ) . addListener ( listener = new SubCurrentFilter ( iLastModified , false , true ) ) ; // Use current listener . setMasterSlaveFlag ( listener . getMasterSlaveFlag ( ) | FileListener . INITED_IN_SLAVE | FileListener . LINKED_TO_SLAVE | FileListener . RUN_IN_SLAVE ) ; } // WHERE XYZ // The following code replicates the Behaviors for the server class . // If the listener doesn ' t exist , it is created and the current params are set . // If the listener does exist , the current params are set . // Note : There is a special section of code to see that if two behaviors with the same // name exist , they are set separately . if ( byBehaviorData != null ) { ByteArrayInputStream baIn = new ByteArrayInputStream ( byBehaviorData ) ; ObjectInputStream daIn = new ObjectInputStream ( baIn ) ; String strBehaviorName = null ; try { strBehaviorName = daIn . readUTF ( ) ; } catch ( IOException ex ) { strBehaviorName = null ; } while ( strBehaviorName != null ) { listener = ( FileListener ) this . getMainRecord ( ) . getListener ( strBehaviorName ) ; while ( listener != null ) { // Already set up this listener , find the next one or null if ( ( listener . getMasterSlaveFlag ( ) & FileListener . LINKED_TO_SLAVE ) != 0 ) if ( ( listener . getMasterSlaveFlag ( ) & FileListener . INITED_IN_SLAVE ) == 0 ) break ; // Use this listener ( Linked , but not inited ) listener = ( FileListener ) listener . getListener ( strBehaviorName ) ; } if ( listener == null ) { listener = ( FileListener ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( strBehaviorName ) ; } else { this . getMainRecord ( ) . removeListener ( listener , false ) ; } listener . initRemoteSkel ( daIn ) ; this . getMainRecord ( ) . addListener ( listener ) ; listener . setMasterSlaveFlag ( listener . getMasterSlaveFlag ( ) | FileListener . INITED_IN_SLAVE | FileListener . LINKED_TO_SLAVE ) ; try { strBehaviorName = daIn . readUTF ( ) ; } catch ( IOException ex ) { strBehaviorName = null ; } } daIn . close ( ) ; baIn . close ( ) ; } listener = ( FileListener ) this . getMainRecord ( ) . getListener ( ) ; while ( listener != null ) { // Remove old created behaviors ( linked , but not inited ) FileListener behaviorToRemove = listener ; listener = ( FileListener ) listener . getNextListener ( ) ; if ( ( behaviorToRemove . getMasterSlaveFlag ( ) & FileListener . LINKED_TO_SLAVE ) != 0 ) if ( ( behaviorToRemove . getMasterSlaveFlag ( ) & FileListener . INITED_IN_SLAVE ) == 0 ) this . getMainRecord ( ) . removeListener ( behaviorToRemove , true ) ; } // End Utility . getLogger ( ) . info ( "EJB Open key: " + strKeyArea ) ; this . getMainRecord ( ) . open ( ) ; } } catch ( DBException ex ) { throw ex ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; throw new DBException ( ex . getMessage ( ) ) ; }
public class FileMonitor { /** * Add listener to this file monitor . * @ param fileListener Listener to add . */ public void addListener ( FileChangedListener fileListener ) { } }
// Don ' t add if its already there for ( Iterator < WeakReference < FileChangedListener > > i = listeners_ . iterator ( ) ; i . hasNext ( ) ; ) { WeakReference < FileChangedListener > reference = i . next ( ) ; FileChangedListener listener = reference . get ( ) ; if ( listener == fileListener ) return ; } // Use WeakReference to avoid memory leak if this becomes the // sole reference to the object . listeners_ . add ( new WeakReference < FileChangedListener > ( fileListener ) ) ;
public class Symtab { /** * Enter a binary operation into symbol table . * @ param name The name of the operator . * @ param left The type of the left operand . * @ param right The type of the left operand . * @ param res The operation ' s result type . * @ param opcode The operation ' s bytecode instruction . */ private void enterBinop ( String name , Type left , Type right , Type res , int opcode ) { } }
predefClass . members ( ) . enter ( new OperatorSymbol ( makeOperatorName ( name ) , new MethodType ( List . of ( left , right ) , res , List . < Type > nil ( ) , methodClass ) , opcode , predefClass ) ) ;
public class InternalService { /** * Get profile details from the service . * @ param profileId Profile Id of the user . * @ param callback Callback to deliver new session instance . */ public void getProfile ( @ NonNull final String profileId , @ Nullable Callback < ComapiResult < Map < String , Object > > > callback ) { } }
adapter . adapt ( getProfile ( profileId ) , callback ) ;
public class ManagerImpl { public < T > T executeInApplicationContext ( Callable < T > callable ) throws Exception { } }
ApplicationContext context = ( ApplicationContext ) getScopedContext ( ApplicationScoped . class ) ; boolean activatedByUs = false ; try { if ( ! context . isActive ( ) ) { context . activate ( ) ; activatedByUs = true ; } return callable . call ( ) ; } finally { if ( activatedByUs && context . isActive ( ) ) { context . deactivate ( ) ; } }
public class FirmataMessageFactory { /** * Creates Firmata message to set value of an output pin in PWM mode . < br / > * If pin id is beyond 15th or value is greater than we can put into * standard analog message , extended analog message is built . * @ param pinId index of the pin * @ param value value to be set * @ return Firmata message to set PWM output */ public static byte [ ] setAnalogPinValue ( byte pinId , long value ) { } }
byte [ ] message ; if ( pinId <= 15 && value <= 16383 ) { message = new byte [ ] { ( byte ) ( ANALOG_MESSAGE | ( pinId & 0x0F ) ) , ( byte ) ( value & 0x7F ) , ( byte ) ( ( value >>> 7 ) & 0x7F ) } ; } else { message = new byte [ ] { START_SYSEX , EXTENDED_ANALOG , pinId , ( byte ) ( value & 0x7F ) , ( byte ) ( ( value >>> 7 ) & 0x7F ) , ( byte ) ( ( value >>> 14 ) & 0x7F ) , ( byte ) ( ( value >>> 21 ) & 0x7F ) , END_SYSEX } ; } return message ;
public class sslfipskey { /** * Use this API to fetch filtered set of sslfipskey resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static sslfipskey [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
sslfipskey obj = new sslfipskey ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; sslfipskey [ ] response = ( sslfipskey [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class GitController { /** * Change log entry point */ @ RequestMapping ( value = "changelog" , method = RequestMethod . GET ) public BuildDiff changeLog ( BuildDiffRequest request ) { } }
GitChangeLog changeLog = gitService . changeLog ( request ) ; // Stores in cache logCache . put ( changeLog . getUuid ( ) , changeLog ) ; // OK return changeLog ;
public class AVIMConversation { /** * 设置当前聊天对话的属性 , 仅用于初始化时 * 因为 attr 涉及到本地缓存 , 所以初始化时与主动调用 setAttributes 行为不同 * @ param attr */ void setAttributesForInit ( Map < String , Object > attr ) { } }
this . attributes . clear ( ) ; if ( attr != null ) { this . attributes . putAll ( attr ) ; }
public class UpdateOperationWithCacheFileTask { /** * Removes the given file . * @ param file * a file which should be deleted * @ throws net . sf . qualitycheck . exception . IllegalNullArgumentException * if the given argument is { @ code null } * @ throws net . sf . qualitycheck . exception . IllegalStateOfArgumentException * if the file can not be deleted */ protected static void deleteFile ( @ Nonnull final File file ) { } }
Check . notNull ( file , "file" ) ; Check . stateIsTrue ( ! file . exists ( ) || file . delete ( ) , "Cannot delete file '%s'." , file . getPath ( ) ) ;
public class CommerceCurrencyUtil { /** * Removes the commerce currency with the primary key from the database . Also notifies the appropriate model listeners . * @ param commerceCurrencyId the primary key of the commerce currency * @ return the commerce currency that was removed * @ throws NoSuchCurrencyException if a commerce currency with the primary key could not be found */ public static CommerceCurrency remove ( long commerceCurrencyId ) throws com . liferay . commerce . currency . exception . NoSuchCurrencyException { } }
return getPersistence ( ) . remove ( commerceCurrencyId ) ;
public class BoundedBuffer { /** * @ awisniew - ADDED * ( non - Javadoc ) * @ see java . util . concurrent . BlockingQueue # offer ( java . lang . Object ) */ @ Override public boolean offer ( T t ) { } }
if ( t == null ) { throw new IllegalArgumentException ( ) ; } boolean ret = false ; synchronized ( lock ) { if ( t instanceof QueueItem && ( ( QueueItem ) t ) . isExpedited ( ) ) { if ( numberOfUsedExpeditedSlots . get ( ) < expeditedBuffer . length ) { expeditedInsert ( t ) ; numberOfUsedExpeditedSlots . getAndIncrement ( ) ; ret = true ; } } else { if ( numberOfUsedSlots . get ( ) < buffer . length ) { insert ( t ) ; numberOfUsedSlots . getAndIncrement ( ) ; ret = true ; } } } if ( ret ) { notifyGet_ ( ) ; return true ; } return false ;
public class ST_RemoveDuplicatedCoordinates { /** * Removes duplicated coordinates within a geometry . * @ param geom * @ return */ public static Geometry removeCoordinates ( Geometry geom ) { } }
if ( geom == null ) { return null ; } else if ( geom . isEmpty ( ) ) { return geom ; } else if ( geom instanceof Point ) { return geom ; } else if ( geom instanceof MultiPoint ) { return removeCoordinates ( ( MultiPoint ) geom ) ; } else if ( geom instanceof LineString ) { return removeCoordinates ( ( LineString ) geom ) ; } else if ( geom instanceof MultiLineString ) { return removeCoordinates ( ( MultiLineString ) geom ) ; } else if ( geom instanceof Polygon ) { return removeCoordinates ( ( Polygon ) geom ) ; } else if ( geom instanceof MultiPolygon ) { return removeCoordinates ( ( MultiPolygon ) geom ) ; } else if ( geom instanceof GeometryCollection ) { return removeCoordinates ( ( GeometryCollection ) geom ) ; } return null ;
public class StringUtils { /** * If < code > checkString < / code > has text , returns checkString + value string . Otherwise * empty string was returned * @ param chekString the chek string * @ param value the value * @ return the string */ public static String ifNotEmptyPrepend ( String chekString , String value ) { } }
if ( hasText ( chekString ) ) { return chekString + value ; } else { return "" ; }
public class OptionsParamView { /** * @ param processImages 0 = not to process . Other = process images */ public void setProcessImages ( int processImages ) { } }
this . processImages = processImages ; getConfig ( ) . setProperty ( PROCESS_IMAGES , Integer . toString ( processImages ) ) ;
public class NettyServerHandler { /** * Sends the response headers to the client . */ private void sendResponseHeaders ( ChannelHandlerContext ctx , SendResponseHeadersCommand cmd , ChannelPromise promise ) throws Http2Exception { } }
// TODO ( carl - mastrangelo ) : remove this check once https : / / github . com / netty / netty / issues / 6296 is // fixed . int streamId = cmd . stream ( ) . id ( ) ; Http2Stream stream = connection ( ) . stream ( streamId ) ; if ( stream == null ) { resetStream ( ctx , streamId , Http2Error . CANCEL . code ( ) , promise ) ; return ; } if ( cmd . endOfStream ( ) ) { closeStreamWhenDone ( promise , streamId ) ; } encoder ( ) . writeHeaders ( ctx , streamId , cmd . headers ( ) , 0 , cmd . endOfStream ( ) , promise ) ;
public class OIndexDefinitionFactory { /** * Creates an instance of { @ link OIndexDefinition } for automatic index . * @ param oClass * class which will be indexed * @ param fieldNames * list of properties which will be indexed . Format should be ' < property > [ by key | value ] ' , use ' by key ' or ' by value ' to * describe how to index maps . By default maps indexed by key * @ param types * types of indexed properties * @ return index definition instance */ public static OIndexDefinition createIndexDefinition ( final OClass oClass , final List < String > fieldNames , final List < OType > types ) { } }
checkTypes ( oClass , fieldNames , types ) ; if ( fieldNames . size ( ) == 1 ) return createSingleFieldIndexDefinition ( oClass , fieldNames . get ( 0 ) , types . get ( 0 ) ) ; else return createMultipleFieldIndexDefinition ( oClass , fieldNames , types ) ;
public class BaseStreamEx { /** * { @ inheritDoc } * If this stream was created using { @ link # parallel ( ForkJoinPool ) } , the new * stream forgets about supplied custom { @ link ForkJoinPool } and its * terminal operation will be executed in common pool . */ @ SuppressWarnings ( "unchecked" ) @ Override public S parallel ( ) { } }
context = context . parallel ( ) ; if ( stream != null ) stream = stream . parallel ( ) ; return ( S ) this ;
public class EntityUpdatedData { /** * Adds a single change . * @ param name the name * @ param before the before state * @ param after the after state */ public void addChange ( String name , String before , String after ) { } }
addChange ( new EntityFieldChange ( name , before , after ) ) ;
public class MemoryRemoteTable { /** * Receive this relative record in the table . * < p > Note : This is usually used only by thin clients , as thick clients have the code to * fake absolute access . * @ param iRowIndex The row to retrieve . * @ param iRowCount The number of rows to retrieve ( Used only by EjbCachedTable ) . * @ return The record ( s ) or an error code as an Integer . * @ exception Exception File exception . */ public Object get ( int iRowIndex , int iRowCount ) throws DBException , RemoteException { } }
if ( ( m_iterator == null ) || ( m_iCurrentRecord == - 1 ) || ( iRowIndex <= m_iCurrentRecord ) ) { m_iterator = m_mDataMap . keySet ( ) . iterator ( ) ; m_iCurrentRecord = - 1 ; } while ( m_iterator . hasNext ( ) ) { m_iCurrentRecord ++ ; m_objCurrentKey = m_iterator . next ( ) ; if ( m_iCurrentRecord == iRowIndex ) return m_mDataMap . get ( m_objCurrentKey ) ; } m_iterator = null ; m_iCurrentRecord = - 1 ; m_objCurrentKey = null ; return m_objCurrentKey ;
public class CreateSimulationJobResult { /** * The list of all tags added to the simulation job . * @ param tags * The list of all tags added to the simulation job . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateSimulationJobResult withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class MLDouble { /** * Converts double [ ] [ ] to Double [ ] * @ param dd * @ return */ private static Double [ ] double2DToDouble ( double [ ] [ ] dd ) { } }
Double [ ] d = new Double [ dd . length * dd [ 0 ] . length ] ; for ( int n = 0 ; n < dd [ 0 ] . length ; n ++ ) { for ( int m = 0 ; m < dd . length ; m ++ ) { d [ m + n * dd . length ] = dd [ m ] [ n ] ; } } return d ;
public class ConnectionContextFactory { /** * To store a connection in this processor object , call this setter . * If there was already a cached connection , it will be closed . * NOTE : Calling { @ link # createConnection ( ConnectionContext ) } does * < b > not < / b > set this processor ' s connection - that method only creates the * connection and puts that connection in the context . It does not save that * connection in this processor object . You must explicitly set the * connection via this method if you want that connection cached here . See * also { @ link # createOrReuseConnection ( ConnectionContext , boolean ) } . * @ param connection the connection * @ param closeExistingConnection if true , and if there was already a connection * cached , that connection will be closed . Otherwise * it will be left alone but the new connection * will take its place . * @ see # createOrReuseConnection ( ConnectionContext , boolean ) */ protected void cacheConnection ( Connection connection , boolean closeExistingConnection ) { } }
if ( this . connection != null && closeExistingConnection ) { try { // make sure it is closed to free up any resources it was using this . connection . close ( ) ; } catch ( JMSException e ) { msglog . errorCannotCloseConnectionMemoryMightLeak ( e ) ; } } this . connection = connection ;
public class BplusTree { /** * Remove the Key from the subtree rooted at the node with nodeid ( this function is recursive ) * @ param key to delete * @ param nodeid of the subtree to remove key from * @ return true if key was removed and false otherwise */ protected boolean removeRecursive ( final K key , final int nodeid ) { } }
if ( nodeid == Node . NULL_ID ) { return false ; // NOT FOUND } Node < K , V > nodeDelete = getNode ( nodeid ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "trying removeRecursive nodeDelete=" + nodeDelete + " key=" + key ) ; } int slot = nodeDelete . findSlotByKey ( key ) ; if ( nodeDelete . isLeaf ( ) ) { if ( slot < 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "NOT FOUND nodeDelete=" + nodeDelete + " key=" + key ) ; } return false ; // NOT FOUND } nodeDelete . remove ( slot ) ; putNode ( nodeDelete ) ; return true ; } slot = ( ( slot < 0 ) ? ( - slot ) - 1 : slot + 1 ) ; final InternalNode < K , V > nodeDeleteInternal = ( InternalNode < K , V > ) nodeDelete ; if ( removeRecursive ( key , nodeDeleteInternal . childs [ slot ] ) ) { nodeDeleteInternal . checkUnderflow ( slot ) ; return true ; } return false ;
public class Utils { /** * Gets an NLS message . * @ param key the message key * @ param params the message parameters * @ return formatted message */ @ Trivial public static final String getMessage ( String key , Object ... params ) { } }
return TraceNLS . getFormattedMessage ( Utils . class , tc . getResourceBundleName ( ) , key , params , key ) ;
public class Dialog { /** * Executes the pattern search . */ private void mine ( ) { } }
Miner miner = ( Miner ) patternCombo . getSelectedItem ( ) ; if ( miner instanceof MinerAdapter ) ( ( MinerAdapter ) miner ) . setIDFetcher ( new CommonIDFetcher ( ) ) ; // Constructing the pattern before loading any model for a debug friendly code . Otherwise if // loading model takes time and an exception occurs in pattern construction , it is just too // much wait for nothing . ( ( Miner ) patternCombo . getSelectedItem ( ) ) . getPattern ( ) ; // Prepare progress bar ProgressWatcher prg = new ProgressWatcher ( ) { @ Override public synchronized void setTotalTicks ( int total ) { prgBar . setMaximum ( total ) ; } @ Override public synchronized void tick ( int times ) { prgBar . setValue ( prgBar . getValue ( ) + times ) ; } } ; prgBar . setVisible ( true ) ; // Get the model file File modFile ; if ( pcRadio . isSelected ( ) ) { if ( getMaxMemory ( ) < 4000 ) { showMessageDialog ( this , "Maximum memory not large enough for handling\n" + "Pathway Commons data. But will try anyway.\n" + "Please consider running this application with the\n" + "virtual machine parameter \"-Xmx5G\"." ) ; } modFile = new File ( getPCFilename ( ) ) ; if ( ! modFile . exists ( ) ) { prgLabel . setText ( "Downloading model" ) ; if ( ! downloadPC ( prg ) ) { eraseProgressBar ( ) ; showMessageDialog ( this , "Cannot download Pathway Commons data for some reason. Sorry." ) ; return ; } assert modFile . exists ( ) ; } } else if ( customFileRadio . isSelected ( ) ) { modFile = new File ( modelField . getText ( ) ) ; } else if ( customURLRadio . isSelected ( ) ) { String url = urlField . getText ( ) . trim ( ) ; prgLabel . setText ( "Downloading model" ) ; if ( url . endsWith ( ".gz" ) ) downloadCompressed ( prg , url , "temp.owl" , true ) ; else if ( url . endsWith ( ".zip" ) ) downloadCompressed ( prg , url , "temp.owl" , false ) ; else downloadPlain ( url , "temp.owl" ) ; modFile = new File ( "temp.owl" ) ; if ( ! modFile . exists ( ) ) { showMessageDialog ( this , "Cannot download the model at the given URL." ) ; eraseProgressBar ( ) ; return ; } } else { throw new RuntimeException ( "Code should not be able to reach here!" ) ; } // Get the output file File outFile = new File ( outputField . getText ( ) ) ; try { BufferedWriter writer = new BufferedWriter ( new FileWriter ( outFile ) ) ; writer . write ( "x" ) ; writer . close ( ) ; outFile . delete ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; eraseProgressBar ( ) ; showMessageDialog ( this , "Cannot write to file: " + outFile . getPath ( ) ) ; return ; } // Load model prgLabel . setText ( "Loading the model" ) ; prgBar . setIndeterminate ( true ) ; prgBar . setStringPainted ( false ) ; SimpleIOHandler io = new SimpleIOHandler ( ) ; Model model ; try { model = io . convertFromOWL ( new FileInputStream ( modFile ) ) ; prgBar . setIndeterminate ( false ) ; prgBar . setStringPainted ( true ) ; } catch ( FileNotFoundException e ) { e . printStackTrace ( ) ; eraseProgressBar ( ) ; showMessageDialog ( this , "File not found: " + modFile . getPath ( ) ) ; return ; } // Search Miner min = ( Miner ) patternCombo . getSelectedItem ( ) ; Pattern p = min . getPattern ( ) ; prgLabel . setText ( "Searching the pattern" ) ; prgBar . setValue ( 0 ) ; Map < BioPAXElement , List < Match > > matches = Searcher . search ( model , p , prg ) ; if ( matches . isEmpty ( ) ) { prgLabel . setText ( "No results found!" ) ; } else { try { prgLabel . setText ( "Writing result" ) ; prgBar . setValue ( 0 ) ; prgBar . setStringPainted ( false ) ; prgBar . setIndeterminate ( true ) ; FileOutputStream os = new FileOutputStream ( outFile ) ; min . writeResult ( matches , os ) ; os . close ( ) ; prgBar . setIndeterminate ( false ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; eraseProgressBar ( ) ; showMessageDialog ( this , "Error occurred while writing the results" ) ; return ; } prgLabel . setText ( "Success! " ) ; System . out . println ( "Success!" ) ; this . dispose ( ) ; }
public class MappingOntologyComplianceValidatorImpl { /** * Produces a map from datatypeProperty to corresponding datatype according to the ontology ( the datatype may * be inferred ) . * This is a rewriting of method : * it . unibz . inf . ontop . owlrefplatform . core . mappingprocessing . MappingDataTypeRepair # getDataTypeFromOntology * from Ontop v 1.18.1 */ private ImmutableMultimap < IRI , Datatype > computeDataTypeMap ( ClassifiedTBox reasoner ) { } }
// TODO : switch to guava > 2.1 , and replace by Streams . stream ( iterable ) return StreamSupport . stream ( reasoner . dataRangesDAG ( ) . spliterator ( ) , false ) . flatMap ( n -> getPartialPredicateToDatatypeMap ( n , reasoner ) . entrySet ( ) . stream ( ) ) . collect ( ImmutableCollectors . toMultimap ( e -> e . getKey ( ) , Map . Entry :: getValue ) ) ;
public class RamlControllerVisitor { /** * Check if the given resource or its ancestor have the uri param of given name . * @ param resource The resource on which to check . * @ param uriParamName Name of the uri Param we are looking for . * @ return < code > true < / code > if this or its ancestor resource have the param of given name already define . */ private static Boolean ancestorOrIHasParam ( final Resource resource , String uriParamName ) { } }
Resource ancestor = resource ; while ( ancestor != null ) { if ( ancestor . getUriParameters ( ) . containsKey ( uriParamName ) ) { return true ; } ancestor = ancestor . getParentResource ( ) ; } return false ;
public class WSRdbManagedConnectionImpl { /** * Add a handle to this ManagedConnection ' s list of handles . * Signal the JDBC 4.3 + driver that a request is starting . * @ param handle the handle to add . * @ throws ResourceException if a JDBC 4.3 + driver rejects the beginRequest operation */ private final void addHandle ( WSJdbcConnection handle ) throws ResourceException { } }
( numHandlesInUse < handlesInUse . length - 1 ? handlesInUse : resizeHandleList ( ) ) [ numHandlesInUse ++ ] = handle ; if ( ! inRequest && dsConfig . get ( ) . enableBeginEndRequest ) try { inRequest = true ; mcf . jdbcRuntime . beginRequest ( sqlConn ) ; } catch ( SQLException x ) { FFDCFilter . processException ( x , getClass ( ) . getName ( ) , "548" , this ) ; throw new DataStoreAdapterException ( "DSA_ERROR" , x , getClass ( ) ) ; }
public class XsdEmitter { /** * Maps a COBOL data item to an XML schema type . * < ul > * < li > COBOL elementary data items are mapped to XML Schema simple types . < / li > * < li > COBOL structures are mapped to XML schema complex Types . < / li > * < / ul > * @ param xsdDataItem COBOL data item decorated with XSD attributes * @ return a corresponding XML schema type */ public XmlSchemaType createXmlSchemaType ( final XsdDataItem xsdDataItem ) { } }
if ( xsdDataItem . getXsdType ( ) == null ) { return null ; } switch ( xsdDataItem . getXsdType ( ) ) { case COMPLEX : return createXmlSchemaComplexType ( xsdDataItem ) ; case STRING : return createAlphaXmlSchemaSimpleType ( xsdDataItem , "string" ) ; case HEXBINARY : return createAlphaXmlSchemaSimpleType ( xsdDataItem , "hexBinary" ) ; case SHORT : return createNumericXmlSchemaSimpleType ( xsdDataItem , "short" ) ; case USHORT : return createNumericXmlSchemaSimpleType ( xsdDataItem , "unsignedShort" ) ; case INT : return createNumericXmlSchemaSimpleType ( xsdDataItem , "int" ) ; case UINT : return createNumericXmlSchemaSimpleType ( xsdDataItem , "unsignedInt" ) ; case LONG : return createNumericXmlSchemaSimpleType ( xsdDataItem , "long" ) ; case ULONG : return createNumericXmlSchemaSimpleType ( xsdDataItem , "unsignedLong" ) ; case INTEGER : return createNumericXmlSchemaSimpleType ( xsdDataItem , "integer" ) ; case DECIMAL : return createNumericXmlSchemaSimpleType ( xsdDataItem , "decimal" ) ; case FLOAT : return createNumericXmlSchemaSimpleType ( xsdDataItem , "float" ) ; case DOUBLE : return createNumericXmlSchemaSimpleType ( xsdDataItem , "double" ) ; default : return null ; }
public class MtasBasicParser { /** * Compute type from mapping source . * @ param source the source * @ return the string * @ throws MtasParserException the mtas parser exception */ private String computeTypeFromMappingSource ( String source ) throws MtasParserException { } }
if ( source . equals ( MtasParserMapping . SOURCE_OWN ) ) { return null ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_GROUP ) ) { return MAPPING_TYPE_GROUP ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_GROUP_ANNOTATION ) ) { return MAPPING_TYPE_GROUP_ANNOTATION ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_WORD ) ) { return MAPPING_TYPE_WORD ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_WORD_ANNOTATION ) ) { return MAPPING_TYPE_WORD_ANNOTATION ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_RELATION ) ) { return MAPPING_TYPE_RELATION ; } else if ( source . equals ( MtasParserMapping . SOURCE_ANCESTOR_RELATION_ANNOTATION ) ) { return MAPPING_TYPE_RELATION_ANNOTATION ; } else { throw new MtasParserException ( "unknown source " + source ) ; }
public class GVRInputManager { /** * Select an input controller based on the list of controller types in gvr . xml . * The list is in priority order with the highest priority controller last . * If you call this function and no controllers are specified in gvr . xml * it will default to " gaze , controller " ( Gear controller first , then Gaze ) . * The " onCursorControllerSelected " event is emitted when * a cursor controller is chosen . The controller chosen is * the highest priority controller available when the call is made . * If a higher priority controller is connected afterwards , * the input manager switches to using the new controller * and " onCursorControllerSelected " is emitted again . * @ param listener listens for onCursorControllerSelected events . * @ see ICursorControllerSelectListener * @ see org . gearvrf . io . GVRInputManager . ICursorControllerSelectListener * @ see # scanControllers ( ) */ public void selectController ( ICursorControllerSelectListener listener ) { } }
if ( ( mEnabledControllerTypes == null ) || ( mEnabledControllerTypes . size ( ) == 0 ) ) { mEnabledControllerTypes = new ArrayList < GVRControllerType > ( Arrays . asList ( GVRControllerType . GAZE , GVRControllerType . CONTROLLER ) ) ; scanDevices ( ) ; } GVRInputManager . SingleControllerSelector selector = new GVRInputManager . SingleControllerSelector ( context , mEnabledControllerTypes ) ; getEventReceiver ( ) . addListener ( selector ) ; getEventReceiver ( ) . addListener ( listener ) ; scanControllers ( ) ;
public class MethodAttribUtils { /** * This utility method compares a method on the bean ' s home * interface with a method on the bean and returns true iff they * are equal for the purpose of determining if the control * descriptor associated with the bean method applies to the * remote interface method . Equality in this case means that the * bean method has the same name as the home method and they have * the same parameters . */ public static final boolean homeMethodEquals ( Method homeMethod , Properties beanMethodProps ) { } }
if ( ( homeMethod == null ) || ( beanMethodProps == null ) ) { return false ; } // Compare method names String homeMethodName = homeMethod . getName ( ) ; String beanMethodName = ( String ) beanMethodProps . get ( "Name" ) ; if ( ! homeMethodName . equals ( beanMethodName ) ) { return false ; } // Compare parameter types Class < ? > homeMethodParamTypes [ ] = homeMethod . getParameterTypes ( ) ; String beanMethodParamTypes [ ] = ( String [ ] ) beanMethodProps . get ( "ArgumentTypes" ) ; if ( homeMethodParamTypes . length != beanMethodParamTypes . length ) { return false ; } for ( int i = 0 ; i < homeMethodParamTypes . length ; i ++ ) { if ( ! homeMethodParamTypes [ i ] . getName ( ) . equals ( beanMethodParamTypes [ i ] ) ) { return false ; } } // If method names are equal and parameter types match then the // methods are equal for our purposes . Java does not allow methods // with the same name and parameter types that differ only in // return type and / or exception signature . return true ;
public class AbstractRestAgent { /** * Sends a request to the NFVO API for finding an instance of type T specified by it ' s ID . * @ param id the ID of the object that shall be retrieved * @ return the found object * @ throws SDKException if the request fails */ @ Help ( help = "Find the object of type {#} through the id" ) public T findById ( final String id ) throws SDKException { } }
return ( T ) requestGet ( id , clazz ) ;
public class GetRelationalDatabaseLogStreamsResult { /** * An object describing the result of your get relational database log streams request . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLogStreams ( java . util . Collection ) } or { @ link # withLogStreams ( java . util . Collection ) } if you want to * override the existing values . * @ param logStreams * An object describing the result of your get relational database log streams request . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetRelationalDatabaseLogStreamsResult withLogStreams ( String ... logStreams ) { } }
if ( this . logStreams == null ) { setLogStreams ( new java . util . ArrayList < String > ( logStreams . length ) ) ; } for ( String ele : logStreams ) { this . logStreams . add ( ele ) ; } return this ;
public class HtmlGraphics { /** * Sizes or resizes the root element that contains the game view . This is specified in pixels as * understood by page elements . If the page is actually being dispalyed on a HiDPI ( Retina ) * device , the actual framebuffer may be 2x ( or larger ) the specified size . */ public void setSize ( int width , int height ) { } }
rootElement . getStyle ( ) . setWidth ( width , Unit . PX ) ; rootElement . getStyle ( ) . setHeight ( height , Unit . PX ) ; // the frame buffer may be larger ( or smaller ) than the logical size , depending on whether // we ' re on a HiDPI display , or how the game has configured things ( maybe they ' re scaling down // from native resolution to improve performance ) Scale fbScale = new Scale ( frameBufferPixelRatio ) ; canvas . setWidth ( fbScale . scaledCeil ( width ) ) ; canvas . setHeight ( fbScale . scaledCeil ( height ) ) ; // set the canvas ' s CSS size to the logical size ; the browser works in logical pixels canvas . getStyle ( ) . setWidth ( width , Style . Unit . PX ) ; canvas . getStyle ( ) . setHeight ( height , Style . Unit . PX ) ; viewportChanged ( canvas . getWidth ( ) , canvas . getHeight ( ) ) ;
public class _ComponentAttributesMap { /** * Remove the attribute with the specified name . An attempt to * remove an entry whose name is that of a < i > property < / i > on * the underlying UIComponent will cause an IllegalArgumentException . * Value - bindings for the underlying component are ignored . * @ param key must be a String . Any other type will cause ClassCastException . */ public Object remove ( Object key ) { } }
checkKey ( key ) ; int keyLength = ( ( String ) key ) . length ( ) ; if ( keyLength >= MIN_LENGHT_CHECK ) { if ( MARK_CREATED . length ( ) == keyLength && MARK_CREATED . equals ( key ) ) { Object oldValue = _component . getOamVfMarkCreated ( ) ; _component . setOamVfMarkCreated ( null ) ; return oldValue ; } else if ( FACET_NAME_KEY . length ( ) == keyLength && FACET_NAME_KEY . equals ( key ) ) { Object oldValue = _component . getOamVfFacetName ( ) ; _component . setOamVfFacetName ( null ) ; return oldValue ; } else if ( COMPONENT_ADDED_BY_HANDLER_MARKER . length ( ) == keyLength && COMPONENT_ADDED_BY_HANDLER_MARKER . equals ( key ) ) { Object oldValue = _component . isOamVfAddedByHandler ( ) ; _component . setOamVfAddedByHandler ( false ) ; return oldValue ; } else if ( FACET_CREATED_UIPANEL_MARKER . length ( ) == keyLength && FACET_CREATED_UIPANEL_MARKER . equals ( key ) ) { Object oldValue = _component . isOamVfFacetCreatedUIPanel ( ) ; _component . setOamVfFacetCreatedUIPanel ( false ) ; return oldValue ; } else if ( UIComponent . BEANINFO_KEY . length ( ) == keyLength && UIComponent . BEANINFO_KEY . equals ( key ) ) { _ccBeanInfo = null ; } } _PropertyDescriptorHolder propertyDescriptor = getPropertyDescriptor ( ( String ) key ) ; if ( propertyDescriptor != null ) { throw new IllegalArgumentException ( "Cannot remove component property attribute" ) ; } return _component . getStateHelper ( ) . remove ( UIComponentBase . PropertyKeys . attributesMap , key ) ;
public class Notifier { /** * Notifies specific component . * To be notiied components must implement INotifiable interface . If they don ' t * the call to this method has no effect . * @ param correlationId ( optional ) transaction id to trace execution through * call chain . * @ param component the component that is to be notified . * @ param args notifiation arguments . * @ throws ApplicationException when errors occured . * @ see INotifiable */ public static void notifyOne ( String correlationId , Object component , Parameters args ) throws ApplicationException { } }
if ( component instanceof INotifiable ) ( ( INotifiable ) component ) . notify ( correlationId , args ) ;
public class Elements { /** * Returns the sublist of all elements of the specified kinds . */ @ Requires ( { } }
"elements != null" , "!elements.contains(null)" , "clazz != null" , "kinds != null" } ) @ SuppressWarnings ( "unchecked" ) public static < T extends ElementModel > List < ? extends T > filter ( List < ? extends ElementModel > elements , Class < T > clazz , ElementKind ... kinds ) { ArrayList < T > result = new ArrayList < T > ( ) ; List < ElementKind > list = Arrays . asList ( kinds ) ; for ( ElementModel element : elements ) { if ( list . contains ( element . getKind ( ) ) && clazz . isAssignableFrom ( element . getClass ( ) ) ) { result . add ( ( T ) element ) ; } } return Collections . unmodifiableList ( result ) ;
public class StringUtil { /** * Converts the string to a date , using the given format . * @ param pString the string to convert * @ param pFormat the date format * @ return the date * @ todo cache formats ? * @ see java . text . SimpleDateFormat * @ see java . text . SimpleDateFormat # SimpleDateFormat ( String ) */ public static Date toDate ( String pString , String pFormat ) { } }
// Get the format from cache , or create new and insert // Return new date return toDate ( pString , new SimpleDateFormat ( pFormat ) ) ;
public class ESRIFileUtil { /** * Replies if the given value is assumed to be NaN according to the * ESRI specifications . * @ param value the value . * @ return < code > true < / code > if the value corresponds to NaN , otherwise < code > false < / code > . */ @ Pure public static boolean isESRINaN ( double value ) { } }
return Double . isInfinite ( value ) || Double . isNaN ( value ) || value <= ESRI_NAN ;
public class ContextFactoryImpl { /** * 获取bean对象 . * @ param c * 类型 * @ return */ @ SuppressWarnings ( "unchecked" ) @ Override public < E > E getBean ( Class < E > c ) { } }
String name = c . getSimpleName ( ) ; String beanName = name . substring ( 0 , 1 ) . toLowerCase ( ) + name . substring ( 1 ) ; String className = c . getName ( ) ; // System . out . println ( " name : " + name ) ; logger . info ( "beanName:" + beanName + " className:" + className ) ; return ( E ) getBean ( beanName ) ;
public class CountryCodes { /** * Returns whether the given country code is in SEPA . * @ param countryCode a non - null , uppercase , two - character country code . * @ return true if SEPA , false if not . * @ throws NullPointerException if the input is null . */ public static boolean isSEPACountry ( String countryCode ) { } }
int index = indexOf ( countryCode ) ; if ( index > - 1 ) { return ( CountryCodes . COUNTRY_IBAN_LENGTHS [ index ] & SEPA ) == SEPA ; } return false ;
public class PlainTextRendererImpl { /** * { @ inheritDoc } */ @ Override public void renderTemplate ( final String templateName , final Map < String , Object > context , final Map < String , WComponent > taggedComponents , final Writer writer , final Map < String , Object > options ) { } }
LOG . debug ( "Rendering plain text template " + templateName ) ; // Expects path to be absolute . String name = templateName . startsWith ( "/" ) ? templateName : "/" + templateName ; boolean xmlEncode = options . containsKey ( XML_ENCODE ) ; String cacheKey = templateName + "-" + xmlEncode ; InputStream stream = null ; // Caching Object value = options . get ( USE_CACHE ) ; boolean cache = ( isCaching ( ) && value == null ) || ( value != null && "true" . equalsIgnoreCase ( value . toString ( ) ) ) ; try { String output = null ; if ( cache ) { output = getCache ( ) . get ( cacheKey ) ; } if ( output == null ) { stream = getClass ( ) . getResourceAsStream ( name ) ; if ( stream == null ) { throw new SystemException ( "Could not find plain text template [" + templateName + "]." ) ; } output = new String ( StreamUtil . getBytes ( stream ) ) ; if ( xmlEncode ) { output = WebUtilities . encode ( output ) ; } if ( cache ) { getCache ( ) . put ( cacheKey , output ) ; } } writer . write ( output ) ; } catch ( SystemException e ) { throw e ; } catch ( Exception e ) { throw new SystemException ( "Problems with plain text template [" + templateName + "]. " + e . getMessage ( ) , e ) ; } finally { StreamUtil . safeClose ( stream ) ; }
public class ResolutionPreference { /** * Obtains the hint of the edit text widget , which allows to enter the width of the resolution , * from a specific typed array . * @ param typedArray * The typed array , the hint should be obtained from , as an instance of the class { @ link * TypedArray } . The typed array may not be null */ private void obtainWidthHint ( @ NonNull final TypedArray typedArray ) { } }
CharSequence obtainedHint = typedArray . getText ( R . styleable . ResolutionPreference_widthHint ) ; if ( obtainedHint == null ) { obtainedHint = getContext ( ) . getText ( R . string . resolution_preference_width_hint ) ; } setWidthHint ( obtainedHint ) ;
public class DataChecksum { /** * Writes the current checksum to the stream . * If < i > reset < / i > is true , then resets the checksum . * @ return number of bytes written . Will be equal to getChecksumSize ( ) ; */ public int writeValue ( DataOutputStream out , boolean reset ) throws IOException { } }
if ( size <= 0 ) { return 0 ; } if ( size == 4 ) { out . writeInt ( ( int ) summer . getValue ( ) ) ; } else { throw new IOException ( "Unknown Checksum " + type ) ; } if ( reset ) { reset ( ) ; } return size ;
public class PGPooledConnection { /** * Gets a handle for a client to use . This is a wrapper around the physical connection , so the * client can call close and it will just return the connection to the pool without really closing * the pgysical connection . * According to the JDBC 2.0 Optional Package spec ( 6.2.3 ) , only one client may have an active * handle to the connection at a time , so if there is a previous handle active when this is * called , the previous one is forcibly closed and its work rolled back . */ @ Override public Connection getConnection ( ) throws SQLException { } }
if ( con == null ) { // Before throwing the exception , let ' s notify the registered listeners about the error PSQLException sqlException = new PSQLException ( GT . tr ( "This PooledConnection has already been closed." ) , PSQLState . CONNECTION_DOES_NOT_EXIST ) ; fireConnectionFatalError ( sqlException ) ; throw sqlException ; } // If any error occurs while opening a new connection , the listeners // have to be notified . This gives a chance to connection pools to // eliminate bad pooled connections . try { // Only one connection can be open at a time from this PooledConnection . See JDBC 2.0 Optional // Package spec section 6.2.3 if ( last != null ) { last . close ( ) ; if ( ! con . getAutoCommit ( ) ) { try { con . rollback ( ) ; } catch ( SQLException ignored ) { } } con . clearWarnings ( ) ; } /* * In XA - mode , autocommit is handled in PGXAConnection , because it depends on whether an * XA - transaction is open or not */ if ( ! isXA ) { con . setAutoCommit ( autoCommit ) ; } } catch ( SQLException sqlException ) { fireConnectionFatalError ( sqlException ) ; throw ( SQLException ) sqlException . fillInStackTrace ( ) ; } ConnectionHandler handler = new ConnectionHandler ( con ) ; last = handler ; Connection proxyCon = ( Connection ) Proxy . newProxyInstance ( getClass ( ) . getClassLoader ( ) , new Class [ ] { Connection . class , PGConnection . class } , handler ) ; last . setProxy ( proxyCon ) ; return proxyCon ;
public class MapOnlyMapper { /** * Override this method , not the other one . This one will be automatically called by the default setup ( ) after * initializing MultipleOutputs . */ protected void setup ( Mapper < I1 , I2 , O1 , O2 > . Context context , MultipleOutputsCollector mOuts ) throws IOException , InterruptedException { } }
public class ODatabaseRecordAbstract { /** * Callback the registeted hooks if any . * @ param iType * @ param id * Record received in the callback * @ return True if the input record is changed , otherwise false */ public boolean callbackHooks ( final TYPE iType , final OIdentifiable id ) { } }
if ( ! OHookThreadLocal . INSTANCE . push ( id ) ) return false ; try { final ORecord < ? > rec = id . getRecord ( ) ; if ( rec == null ) return false ; boolean recordChanged = false ; for ( ORecordHook hook : hooks ) if ( hook . onTrigger ( iType , rec ) ) recordChanged = true ; return recordChanged ; } finally { OHookThreadLocal . INSTANCE . pop ( id ) ; }
public class WebhookAuthConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( WebhookAuthConfiguration webhookAuthConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( webhookAuthConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( webhookAuthConfiguration . getAllowedIPRange ( ) , ALLOWEDIPRANGE_BINDING ) ; protocolMarshaller . marshall ( webhookAuthConfiguration . getSecretToken ( ) , SECRETTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DefaultServiceRegistry { /** * - - - ADD A REMOTE SERVICE - - - */ @ Override public void addActions ( String nodeID , Tree config ) { } }
Tree actions = config . get ( "actions" ) ; String serviceName = config . get ( "name" , "" ) ; int actionCounter = 0 ; final long stamp = lock . writeLock ( ) ; try { if ( actions != null && actions . isMap ( ) ) { for ( Tree actionConfig : actions ) { actionConfig = actionConfig . clone ( ) ; actionConfig . putObject ( "nodeID" , nodeID , true ) ; String actionName = actionConfig . get ( "name" , "" ) ; // Register remote action RemoteActionEndpoint endpoint = new RemoteActionEndpoint ( this , transporter , nodeID , actionConfig , actionName ) ; Strategy < ActionEndpoint > actionStrategy = strategies . get ( actionName ) ; if ( actionStrategy == null ) { actionStrategy = strategyFactory . create ( ) ; strategies . put ( actionName , actionStrategy ) ; } actionStrategy . addEndpoint ( endpoint ) ; // Apply middlewares for ( Middleware middleware : middlewares ) { endpoint . use ( middleware ) ; } // Write log about this action logger . info ( "Action \"" + actionName + "\" on node \"" + nodeID + "\" registered." ) ; actionCounter ++ ; } } names . add ( serviceName ) ; } finally { lock . unlockWrite ( stamp ) ; } // Write log about this service StringBuilder msg = new StringBuilder ( 64 ) ; msg . append ( "Remote service \"" ) ; msg . append ( serviceName ) ; msg . append ( "\" registered " ) ; if ( actionCounter == 0 ) { msg . append ( "without any actions" ) ; } else if ( actionCounter == 1 ) { msg . append ( "with 1 action" ) ; } else { msg . append ( "with " ) ; msg . append ( actionCounter ) ; msg . append ( " actions" ) ; } msg . append ( " on node \"" ) ; msg . append ( nodeID ) ; msg . append ( "\"." ) ; logger . info ( msg . toString ( ) ) ; // Notify local listeners about the new REMOTE service broadcastServicesChanged ( false ) ;
public class JsonParser { /** * 解析json * @ param content json字符串 * @ param type json解析后对应的实体类型 * @ param < T > 实体类型的实际类型 * @ return 解析失败将返回null */ @ SuppressWarnings ( "unchecked" ) public < T > T readAsObject ( String content , Class < T > type ) { } }
Assert . notNull ( type ) ; try { if ( StringUtils . isEmpty ( content ) ) { log . debug ( "content为空,返回null" , content , type ) ; return null ; } else if ( type . equals ( String . class ) ) { return ( T ) content ; } return MAPPER . readValue ( content , type ) ; } catch ( Exception e ) { log . error ( "json解析失败,失败原因:" , e ) ; return null ; }
public class PreauthorizationService { /** * This function deletes a preauthorization . * @ param preauthorization * The { @ link Preauthorization } object to be deleted . */ public void delete ( final Preauthorization preauthorization ) { } }
RestfulUtils . delete ( PreauthorizationService . PATH , preauthorization , Preauthorization . class , super . httpClient ) ;
public class DateUtils { /** * Extract a date from a verbatim date , returning ranges specified to day . * @ param verbatimEventDate a string containing a verbatim event date . * @ param yearsBeforeSuspect the value for a year before which parsed years should be considered suspect . * @ return a map with result and resultState as keys * @ deprecated * @ see DateUtils # extractDateToDayFromVerbatimER ( String , int ) replacement method */ public static Map < String , String > extractDateToDayFromVerbatim ( String verbatimEventDate , int yearsBeforeSuspect ) { } }
Map < String , String > result = extractDateFromVerbatim ( verbatimEventDate , yearsBeforeSuspect ) ; if ( result . size ( ) > 0 && result . get ( "resultState" ) . equals ( "range" ) ) { String dateRange = result . get ( "result" ) ; try { Interval parseDate = extractDateInterval ( dateRange ) ; logger . debug ( parseDate ) ; String resultDate = parseDate . getStart ( ) . toString ( "yyyy-MM-dd" ) + "/" + parseDate . getEnd ( ) . toString ( "yyyy-MM-dd" ) ; result . put ( "result" , resultDate ) ; } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; } } return result ;