signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LoadingCacheProxy { /** * Retrieves the value from the cache , loading it if necessary . */ @ SuppressWarnings ( "PMD.AvoidDeeplyNestedIfStmts" ) private @ Nullable V getOrLoad ( K key ) { } }
boolean statsEnabled = statistics . isEnabled ( ) ; long start = statsEnabled ? ticker . read ( ) : 0L ; long millis = 0L ; Expirable < V > expirable = cache . getIfPresent ( key ) ; if ( ( expirable != null ) && ! expirable . isEternal ( ) ) { millis = nanosToMillis ( ( start == 0L ) ? ticker . read ( ) : start ) ; if ( expirable . hasExpired ( millis ) ) { Expirable < V > expired = expirable ; cache . asMap ( ) . computeIfPresent ( key , ( k , e ) -> { if ( e == expired ) { dispatcher . publishExpired ( this , key , expired . get ( ) ) ; statistics . recordEvictions ( 1 ) ; return null ; } return e ; } ) ; expirable = null ; } } if ( expirable == null ) { expirable = cache . get ( key ) ; statistics . recordMisses ( 1L ) ; } else { statistics . recordHits ( 1L ) ; } V value = null ; if ( expirable != null ) { setAccessExpirationTime ( expirable , millis ) ; value = copyValue ( expirable ) ; } if ( statsEnabled ) { statistics . recordGetTime ( ticker . read ( ) - start ) ; } return value ;
public class AbstractCommonService { /** * 保存所有数据 * @ param pData * @ throws APPErrorException */ @ Override public void update ( Object pData ) throws APPErrorException { } }
valid ( pData ) ; mDao . updateByPrimaryKey ( pData ) ;
public class RootBeer { /** * Using the PackageManager , check for a list of well known apps that require root . @ link { Const . knownRootAppsPackages } * @ param additionalDangerousApps - array of additional packagenames to search for * @ return true if one of the apps it ' s installed */ public boolean detectPotentiallyDangerousApps ( String [ ] additionalDangerousApps ) { } }
// Create a list of package names to iterate over from constants any others provided ArrayList < String > packages = new ArrayList < > ( ) ; packages . addAll ( Arrays . asList ( Const . knownDangerousAppsPackages ) ) ; if ( additionalDangerousApps != null && additionalDangerousApps . length > 0 ) { packages . addAll ( Arrays . asList ( additionalDangerousApps ) ) ; } return isAnyPackageFromListInstalled ( packages ) ;
public class XAbstractAttributeMapBufferedImpl { /** * Retrieves a quick - access representation of this attribute map * for actual usage . De - buffers the attribute map and creates an * in - memory representation which should be discarded after use * to free memory . * @ return In - memory copy of this attribute map . */ protected synchronized XAttributeMap deserialize ( ) throws IOException { } }
if ( this . size == 0 ) { return new XAttributeMapLazyImpl < XAttributeMapImpl > ( XAttributeMapImpl . class ) ; } else { if ( cacheMap != null && cacheMap . get ( ) != null ) { return cacheMap . get ( ) ; } else { storage . seek ( 0 ) ; XAttributeMap deserialized = this . serializer . deserialize ( storage ) ; cacheMap = new WeakReference < XAttributeMap > ( deserialized ) ; return deserialized ; } }
public class TopoGraph { /** * Deletes user index */ void deleteUserIndexForChains ( int userIndex ) { } }
assert ( m_chainIndices . get ( userIndex ) != null ) ; m_chainIndices . set ( userIndex , null ) ;
public class AbstractMemberExpansionTransformer { /** * If this method is being called , it means we ' re expanding a one - dimensional array or collection , with a right hand side * that evaluates to a property that ' s not an array or collection . In that case , we build up an array and simply store * values directly into it . We also null - short - circuit in the event that the root is null . The member expansion portion * ends up as a composite that looks like : * temp _ array = new Foo [ temp _ root . length ] * for ( a in temp _ root index i ) { * temp _ array [ i ] = a . Bar * temp _ array * And the overall expression looks like : * temp _ root = root * ( temp _ root = = null ? ( Bar [ ] ) null : ( Bar [ ] ) member _ expansion ) */ protected IRExpression compileExpansionDirectlyToArray ( IType rootType , IType rootComponentType , IType resultType , IType resultCompType ) { } }
// Evaluate the root and assign it to a temp variable IRSymbol tempRoot = _cc ( ) . makeAndIndexTempSymbol ( getDescriptor ( rootType ) ) ; IRStatement tempRootAssignment = buildAssignment ( tempRoot , ExpressionTransformer . compile ( _expr ( ) . getRootExpression ( ) , _cc ( ) ) ) ; // Create the result array and assign it to a temp variable IRSymbol resultArray = _cc ( ) . makeAndIndexTempSymbol ( getDescriptor ( resultType ) ) ; IRStatement arrayCreation = buildAssignment ( resultArray , makeArray ( resultCompType , createArrayLengthExpression ( rootType , tempRoot ) ) ) ; // Create the loop that populates the array IRForEachStatement forLoop = createArrayStoreLoop ( rootType , rootComponentType , resultCompType , tempRoot , resultArray ) ; // Build the expansion out of the array creation , for loop , and identifier IRExpression expansion = buildComposite ( arrayCreation , forLoop , identifier ( resultArray ) ) ; // Short - circuit if we ' re not dealing with primitive types if ( ! rootComponentType . isPrimitive ( ) && ! resultCompType . isPrimitive ( ) ) { return buildComposite ( tempRootAssignment , buildNullCheckTernary ( identifier ( tempRoot ) , checkCast ( _expr ( ) . getType ( ) , makeArray ( resultCompType , numericLiteral ( 0 ) ) ) , checkCast ( _expr ( ) . getType ( ) , expansion ) ) ) ; } else { return buildComposite ( tempRootAssignment , checkCast ( _expr ( ) . getType ( ) , expansion ) ) ; }
public class NumberMatrix { /** * Print matrices , the operator , and their operation result . * @ param m1 array - array * @ param m2 array - array * @ param m3 array - array * @ param op operator * @ param out printstream */ public static void printResult ( Number [ ] [ ] m1 , Number [ ] [ ] m2 , Number [ ] [ ] m3 , char op , PrintStream out ) { } }
for ( int i = 0 ; i < m1 . length ; i ++ ) { for ( int j = 0 ; j < m1 [ 0 ] . length ; j ++ ) { out . print ( " " + m1 [ i ] [ j ] ) ; } if ( i == m1 . length / 2 ) { out . print ( " " + op + " " ) ; } else { out . print ( " " ) ; } for ( int j = 0 ; j < m2 . length ; j ++ ) { out . print ( " " + m2 [ i ] [ j ] ) ; } if ( i == m1 . length / 2 ) { out . print ( " = " ) ; } else { out . print ( " " ) ; } for ( int j = 0 ; j < m3 . length ; j ++ ) { out . print ( m3 [ i ] [ j ] + " " ) ; } out . println ( ) ; }
public class BEValue { /** * Returns this BEValue as a List of BEValues . * @ throws InvalidBEncodingException If the value is not an * { @ link ArrayList } . */ @ SuppressWarnings ( "unchecked" ) public List < BEValue > getList ( ) throws InvalidBEncodingException { } }
if ( this . value instanceof ArrayList ) { return ( ArrayList < BEValue > ) this . value ; } else { throw new InvalidBEncodingException ( "Excepted List<BEvalue> !" ) ; }
public class CmsJSONSearchConfigurationParser { /** * Returns the configured request parameter for the last query , or the default parameter if no core is configured . * @ return The configured request parameter for the last query , or the default parameter if no core is configured . */ protected String getFirstCallParam ( ) { } }
String param = parseOptionalStringValue ( m_configObject , JSON_KEY_RELOADED_PARAM ) ; if ( param == null ) { return null != m_baseConfig ? m_baseConfig . getGeneralConfig ( ) . getReloadedParam ( ) : DEFAULT_RELOADED_PARAM ; } else { return param ; }
public class MultimapWithProtoValuesSubject { /** * Specifies that the ' has ' bit of these explicitly specified top - level field numbers should be * ignored when comparing for equality . Sub - fields must be specified explicitly ( via { @ link * FieldDescriptor } ) if they are to be ignored as well . * < p > Use { @ link # ignoringFieldAbsenceForValues ( ) } instead to ignore the ' has ' bit for all fields . * @ see # ignoringFieldAbsenceForValues ( ) for details */ public MultimapWithProtoValuesFluentAssertion < M > ignoringFieldAbsenceOfFieldsForValues ( int firstFieldNumber , int ... rest ) { } }
return usingConfig ( config . ignoringFieldAbsenceOfFields ( asList ( firstFieldNumber , rest ) ) ) ;
public class PairTable { /** * While extracting the items from a linear probing hashtable , * this will usually undo the wrap - around provided that the table * isn ' t too full . Experiments suggest that for sufficiently large tables * the load factor would have to be over 90 percent before this would fail frequently , * and even then the subsequent sort would fix things up . * @ param table the given table to unwrap * @ param numPairs the number of valid pairs in the table * @ return the unwrapped table */ static int [ ] unwrappingGetItems ( final PairTable table , final int numPairs ) { } }
if ( numPairs < 1 ) { return null ; } final int [ ] slotsArr = table . slotsArr ; final int tableSize = 1 << table . lgSizeInts ; final int [ ] result = new int [ numPairs ] ; int i = 0 ; int l = 0 ; int r = numPairs - 1 ; // Special rules for the region before the first empty slot . final int hiBit = 1 << ( table . validBits - 1 ) ; while ( ( i < tableSize ) && ( slotsArr [ i ] != - 1 ) ) { final int item = slotsArr [ i ++ ] ; if ( ( item & hiBit ) != 0 ) { result [ r -- ] = item ; } // This item was probably wrapped , so move to end . else { result [ l ++ ] = item ; } } // The rest of the table is processed normally . while ( i < tableSize ) { final int look = slotsArr [ i ++ ] ; if ( look != - 1 ) { result [ l ++ ] = look ; } } assert l == ( r + 1 ) ; return result ;
public class Nodes { /** * Actually persists a node on disk . * @ param node the node to be persisted . * @ throws IOException if the node could not be persisted . */ private void persistNode ( final @ Nonnull Node node ) throws IOException { } }
// no need for a full save ( ) so we just do the minimum if ( node instanceof EphemeralNode ) { Util . deleteRecursive ( new File ( getNodesDir ( ) , node . getNodeName ( ) ) ) ; } else { XmlFile xmlFile = new XmlFile ( Jenkins . XSTREAM , new File ( new File ( getNodesDir ( ) , node . getNodeName ( ) ) , "config.xml" ) ) ; xmlFile . write ( node ) ; SaveableListener . fireOnChange ( this , xmlFile ) ; } jenkins . getQueue ( ) . scheduleMaintenance ( ) ;
public class WebAdminSecurityCollaboratorImpl { /** * Get the effective SecurityMetadata for this application . * First , try to defer to the application collaborator to see if the application * provides data . If so , use it . Otherwise , fallback to the default SecurityMetadata . * { @ inheritDoc } */ @ Override public SecurityMetadata getSecurityMetadata ( ) { } }
SecurityMetadata sm = super . getSecurityMetadata ( ) ; if ( sm == null ) { return getDefaultAdminSecurityMetadata ( ) ; } else { return sm ; }
public class JAXWSBundle { /** * Factory method for creating JAX - WS clients . * @ param serviceClass Service interface class . * @ param address Endpoint URL address . * @ param handlers Client side JAX - WS handlers . Optional . * @ param < T > Service interface type . * @ return JAX - WS client proxy . * @ deprecated Use the { @ link # getClient ( ClientBuilder ) } getClient } method instead . */ @ Deprecated public < T > T getClient ( Class < T > serviceClass , String address , Handler ... handlers ) { } }
checkArgument ( serviceClass != null , "ServiceClass is null" ) ; checkArgument ( address != null , "Address is null" ) ; checkArgument ( ( address ) . trim ( ) . length ( ) > 0 , "Address is empty" ) ; return jaxwsEnvironment . getClient ( new ClientBuilder < > ( serviceClass , address ) . handlers ( handlers ) ) ;
public class Space { /** * Link this space to another by creating a link and finding the point * at which the spaces link up * @ param other The other space to link to */ public void link ( Space other ) { } }
// aligned vertical edges if ( inTolerance ( x , other . x + other . width ) || inTolerance ( x + width , other . x ) ) { float linkx = x ; if ( x + width == other . x ) { linkx = x + width ; } float top = Math . max ( y , other . y ) ; float bottom = Math . min ( y + height , other . y + other . height ) ; float linky = top + ( ( bottom - top ) / 2 ) ; Link link = new Link ( linkx , linky , other ) ; links . put ( other , link ) ; linksList . add ( link ) ; } // aligned horizontal edges if ( inTolerance ( y , other . y + other . height ) || inTolerance ( y + height , other . y ) ) { float linky = y ; if ( y + height == other . y ) { linky = y + height ; } float left = Math . max ( x , other . x ) ; float right = Math . min ( x + width , other . x + other . width ) ; float linkx = left + ( ( right - left ) / 2 ) ; Link link = new Link ( linkx , linky , other ) ; links . put ( other , link ) ; linksList . add ( link ) ; }
public class RelationalOperator { /** * Gets the RelationalOperator performing less than or greater than comparisons to determine whether all provided * values are less than some upper bound value or possibly greater than some lower bound value . * @ param lowerBound the Comparable upper bounded value . * @ param upperBound the Comparable lower bounded value . * @ param < T > the expected Class type for the object used in the less than or greater than comparison . * @ return a RelationalOperator for the less than or greater than comparison . */ public static < T extends Comparable < T > > RelationalOperator < T > lessThanOrGreaterThan ( T lowerBound , T upperBound ) { } }
return ComposableRelationalOperator . compose ( lessThan ( lowerBound ) , LogicalOperator . OR , greaterThan ( upperBound ) ) ;
public class NameServerImpl { /** * Unregisters an identifier locally . * @ param id an identifier */ @ Override public void unregister ( final Identifier id ) { } }
LOG . log ( Level . FINE , "id: " + id ) ; idToAddrMap . remove ( id ) ;
public class CircuitsConfig { /** * Import all circuits from configuration . * @ param circuitsConfig The circuits configuration ( must not be < code > null < / code > ) . * @ return The circuits imported . * @ throws LionEngineException If unable to read data . */ public static Map < Circuit , Collection < TileRef > > imports ( Media circuitsConfig ) { } }
Check . notNull ( circuitsConfig ) ; final Xml root = new Xml ( circuitsConfig ) ; final Collection < Xml > nodesCircuit = root . getChildren ( NODE_CIRCUIT ) ; final Map < Circuit , Collection < TileRef > > circuits = new HashMap < > ( nodesCircuit . size ( ) ) ; for ( final Xml nodeCircuit : nodesCircuit ) { final String groupIn = nodeCircuit . readString ( ATT_GROUP_IN ) ; final String groupOut = nodeCircuit . readString ( ATT_GROUP_OUT ) ; final String circuitType = nodeCircuit . readString ( ATT_CIRCUIT_TYPE ) ; final CircuitType type = CircuitType . from ( circuitType ) ; final Circuit circuit = new Circuit ( type , groupIn , groupOut ) ; final Collection < Xml > nodesTileRef = nodeCircuit . getChildren ( TileConfig . NODE_TILE ) ; final Collection < TileRef > tilesRef = importTiles ( nodesTileRef ) ; circuits . put ( circuit , tilesRef ) ; } return circuits ;
public class PresentsDObjectMgr { /** * Dumps collected profiling information to the system log . */ public void dumpUnitProfiles ( ) { } }
for ( Map . Entry < String , UnitProfile > entry : _profiles . entrySet ( ) ) { log . info ( "P: " + entry . getKey ( ) + " => " + entry . getValue ( ) ) ; }
public class BaasDocument { /** * Asynchronously retrieves the list of documents readable to the user that match < code > filter < / code > * in < code > collection < / code > * @ param collection the collection to retrieve not < code > null < / code > * @ param filter a filter to apply to the request * @ param handler a callback to be invoked with the result of the request * @ return a { @ link com . baasbox . android . RequestToken } to handle the asynchronous request */ public static RequestToken fetchAll ( String collection , BaasQuery . Criteria filter , BaasHandler < List < BaasDocument > > handler ) { } }
return fetchAll ( collection , filter , RequestOptions . DEFAULT , handler ) ;
public class ParameterChecker { /** * Walk the tree of children of the statement , process all children of type ReflectionFragment . * @ param statement The parsed statement element . * @ param paramMap The method parameters , keyed by name . * @ param method The method declaration which was annotated . */ private static void doCheck ( SqlFragmentContainer statement , HashMap < String , ParameterDeclaration > paramMap , final MethodDeclaration method ) { } }
SqlFragment [ ] fragments = statement . getChildren ( ) ; for ( SqlFragment fragment : fragments ) { // if the fragment is a container check all of its children . if ( fragment instanceof SqlFragmentContainer ) { doCheck ( ( SqlFragmentContainer ) fragment , paramMap , method ) ; // reflection fragment - make sure it can be mapped using the method ' s param values . } else if ( fragment instanceof ReflectionFragment ) { checkReflectionFragment ( ( ReflectionFragment ) fragment , paramMap , method ) ; } }
public class Update { /** * A key - value map that contains the parameters associated with the update . * @ param params * A key - value map that contains the parameters associated with the update . */ public void setParams ( java . util . Collection < UpdateParam > params ) { } }
if ( params == null ) { this . params = null ; return ; } this . params = new java . util . ArrayList < UpdateParam > ( params ) ;
public class CQueue { /** * 将QueueElement根据eWeight由小到大的顺序插入队列 * @ param newElement */ public void enQueue ( QueueElement newElement ) { } }
QueueElement pCur = pHead , pPre = null ; while ( pCur != null && pCur . weight < newElement . weight ) { pPre = pCur ; pCur = pCur . next ; } newElement . next = pCur ; if ( pPre == null ) pHead = newElement ; else pPre . next = newElement ;
public class SqlBuilder { /** * 构造一个插入SQL * @ param activeRecord ActiveRecord对象 * @ return 返回insert sql语句 */ static QueryMeta buildInsertSql ( ActiveRecord activeRecord ) { } }
QueryMeta queryMeta = new QueryMeta ( ) ; String tableName = activeRecord . getTableName ( ) ; StringBuilder sb = new StringBuilder ( Const . SQL_INSERT + " " ) ; sb . append ( tableName ) ; sb . append ( " (" ) ; StringBuffer values = new StringBuffer ( " VALUES (" ) ; Stream . of ( activeRecord . getClass ( ) . getDeclaredFields ( ) ) . filter ( field -> null == field . getAnnotation ( Transient . class ) ) . forEach ( field -> { try { field . setAccessible ( true ) ; if ( field . get ( activeRecord ) != null ) { Pair < String , String > pair = getColumnName ( field ) ; sb . append ( pair . getLeft ( ) ) . append ( ", " ) ; values . append ( ':' ) . append ( pair . getRight ( ) ) . append ( ", " ) ; } } catch ( Exception e ) { } } ) ; sb . append ( ')' ) ; values . append ( ')' ) ; String sql = sb . append ( values ) . toString ( ) . replace ( ", )" , ")" ) ; queryMeta . setSql ( sql ) ; return queryMeta ;
public class Whitebox { /** * Invoke a private or inner class method without the need to specify the * method name . This is thus a more refactor friendly version of the * { @ link # invokeMethod ( Object , String , Object . . . ) } method and is recommend * over this method for that reason . This method might be useful to test * private methods . * @ throws Exception if something wrong . */ public static synchronized < T > T invokeMethod ( Object instance , Object ... arguments ) throws Exception { } }
return WhiteboxImpl . invokeMethod ( instance , arguments ) ;
public class InProcessLauncher { /** * Visible for testing . */ Method findSparkSubmit ( ) throws IOException { } }
ClassLoader cl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( cl == null ) { cl = getClass ( ) . getClassLoader ( ) ; } Class < ? > sparkSubmit ; // SPARK - 22941 : first try the new SparkSubmit interface that has better error handling , // but fall back to the old interface in case someone is mixing & matching launcher and // Spark versions . try { sparkSubmit = cl . loadClass ( "org.apache.spark.deploy.InProcessSparkSubmit" ) ; } catch ( Exception e1 ) { try { sparkSubmit = cl . loadClass ( "org.apache.spark.deploy.SparkSubmit" ) ; } catch ( Exception e2 ) { throw new IOException ( "Cannot find SparkSubmit; make sure necessary jars are available." , e2 ) ; } } Method main ; try { main = sparkSubmit . getMethod ( "main" , String [ ] . class ) ; } catch ( Exception e ) { throw new IOException ( "Cannot find SparkSubmit main method." , e ) ; } CommandBuilderUtils . checkState ( Modifier . isStatic ( main . getModifiers ( ) ) , "main method is not static." ) ; return main ;
public class TimephasedUtility { /** * Used to locate the first timephased resource assignment block which * intersects with the target date range . * @ param < T > payload type * @ param range target date range * @ param assignments timephased resource assignments * @ param startIndex index at which to start the search * @ return index of timephased resource assignment which intersects with the target date range */ private < T extends TimephasedItem < ? > > int getStartIndex ( DateRange range , List < T > assignments , int startIndex ) { } }
int result = - 1 ; if ( assignments != null ) { long rangeStart = range . getStart ( ) . getTime ( ) ; long rangeEnd = range . getEnd ( ) . getTime ( ) ; for ( int loop = startIndex ; loop < assignments . size ( ) ; loop ++ ) { T assignment = assignments . get ( loop ) ; int compareResult = DateHelper . compare ( assignment . getStart ( ) , assignment . getFinish ( ) , rangeStart ) ; // The start of the target range falls after the assignment end - // move on to test the next assignment . if ( compareResult > 0 ) { continue ; } // The start of the target range falls within the assignment - // return the index of this assignment to the caller . if ( compareResult == 0 ) { result = loop ; break ; } // At this point , we know that the start of the target range is before // the assignment start . We need to determine if the end of the // target range overlaps the assignment . compareResult = DateHelper . compare ( assignment . getStart ( ) , assignment . getFinish ( ) , rangeEnd ) ; if ( compareResult >= 0 ) { result = loop ; break ; } } } return result ;
public class ListWidget { /** * Recycle all views in the list . The host views might be reused for other data to * save resources on creating new widgets . */ protected void recycleChildren ( ) { } }
for ( ListItemHostWidget host : getAllHosts ( ) ) { recycle ( host ) ; } mContent . onTransformChanged ( ) ; mContent . requestLayout ( ) ;
public class AbstractWizardModel { /** * Gets the current list of wizard steps , flattening nested ( dependent ) pages based on the * user ' s choices . */ public List < Page > getCurrentPageSequence ( ) { } }
ArrayList < Page > flattened = new ArrayList < Page > ( ) ; mRootPageList . flattenCurrentPageSequence ( flattened ) ; return flattened ;
public class LogRecordContext { /** * Registers new context extension . To avoid memory leaks Extensions are * stored as weak references . It means that caller need to keep strong * reference ( a static field for example ) to keep that extension in the * registration map . * @ param key * String key to associate with the registered extension * @ param extension * { @ link Extension } implementation returning extension runtime * values * @ throws IllegalArgumentException * if parameter < code > key < / code > or < code > extension < / code > are * < code > null < / code > ; or if < code > key < / code > already has * extension associated with it . */ public static void registerExtension ( String key , Extension extension ) { } }
if ( key == null || extension == null ) { throw new IllegalArgumentException ( "Neither 'key' nor 'extension' parameter can be null." ) ; } w . lock ( ) ; try { if ( extensionMap . containsKey ( key ) ) { throw new IllegalArgumentException ( "Extension with the key " + key + " is registered already" ) ; } extensionMap . put ( key , new WeakReference < Extension > ( extension ) ) ; } finally { w . unlock ( ) ; }
public class Padding { /** * Allows to set Table ' s padding with the Padding object , which has be done externally , as it ' s not part * of the standard libGDX API . * @ param table will have the padding set according to the this object ' s data . * @ return the given table for chaining . */ public Table applyPadding ( final Table table ) { } }
table . pad ( top , left , bottom , right ) ; return table ;
public class LFltUnaryOpDelta { /** * < editor - fold desc = " object " > */ public static boolean argEquals ( LFltUnaryOpDelta the , Object that ) { } }
return Null . < LFltUnaryOpDelta > equals ( the , that , ( one , two ) -> { if ( one . getClass ( ) != two . getClass ( ) ) { return false ; } LFltUnaryOpDelta other = ( LFltUnaryOpDelta ) two ; return LBiObjFltTriple . argEquals ( one . function , one . deltaFunction , one . lastValue ( ) , other . function , other . deltaFunction , other . lastValue ( ) ) ; } ) ;
public class HldEntitiesProcessorNames { /** * < p > Get thing for given class and thing name . < / p > * @ param pClass a Class * @ param pThingName Thing Name * @ return a thing */ @ Override public final String getFor ( final Class < ? > pClass , final String pThingName ) { } }
if ( "entityEdit" . equals ( pThingName ) ) { if ( IPersistableBase . class . isAssignableFrom ( pClass ) ) { return PrcEntityPbEditDelete . class . getSimpleName ( ) ; } else { return PrcEntityRetrieve . class . getSimpleName ( ) ; } } else if ( "entityPrint" . equals ( pThingName ) ) { return PrcEntityRetrieve . class . getSimpleName ( ) ; } else if ( "entityCopy" . equals ( pThingName ) ) { if ( IPersistableBase . class . isAssignableFrom ( pClass ) ) { return PrcEntityPbCopy . class . getSimpleName ( ) ; } else { return PrcEntityCopy . class . getSimpleName ( ) ; } } else if ( "entitySave" . equals ( pThingName ) ) { if ( IPersistableBase . class . isAssignableFrom ( pClass ) ) { return PrcEntityPbSave . class . getSimpleName ( ) ; } else if ( EmailMsg . class == pClass ) { return PrcEmailMsgSave . class . getSimpleName ( ) ; } else { return PrcEntitySave . class . getSimpleName ( ) ; } } else if ( "entityFolDelete" . equals ( pThingName ) ) { if ( Eattachment . class == pClass ) { return PrcEntityFfolDelete . class . getSimpleName ( ) ; } return PrcEntityFolDelete . class . getSimpleName ( ) ; } else if ( "entityFolSave" . equals ( pThingName ) ) { if ( Eattachment . class == pClass ) { return PrcEntityFfolSave . class . getSimpleName ( ) ; } return PrcEntityFolSave . class . getSimpleName ( ) ; } else if ( "entityDelete" . equals ( pThingName ) ) { if ( IPersistableBase . class . isAssignableFrom ( pClass ) ) { return PrcEntityPbDelete . class . getSimpleName ( ) ; } else { return PrcEntityDelete . class . getSimpleName ( ) ; } } else if ( "entityCreate" . equals ( pThingName ) ) { if ( CsvColumn . class == pClass ) { return PrcCsvColumnCreate . class . getSimpleName ( ) ; } else if ( MatchForeignLine . class == pClass ) { return PrcMatchForeignLineCreate . class . getSimpleName ( ) ; } else { return PrcEntityCreate . class . getSimpleName ( ) ; } } else if ( "entityConfirmDelete" . equals ( pThingName ) ) { if ( IPersistableBase . class . isAssignableFrom ( pClass ) ) { return PrcEntityPbEditDelete . class . getSimpleName ( ) ; } else { return PrcEntityRetrieve . class . getSimpleName ( ) ; } } return null ;
public class ImageAssetManager { /** * Returns the previously set bitmap or null . */ @ Nullable public Bitmap updateBitmap ( String id , @ Nullable Bitmap bitmap ) { } }
if ( bitmap == null ) { LottieImageAsset asset = imageAssets . get ( id ) ; Bitmap ret = asset . getBitmap ( ) ; asset . setBitmap ( null ) ; return ret ; } Bitmap prevBitmap = imageAssets . get ( id ) . getBitmap ( ) ; putBitmap ( id , bitmap ) ; return prevBitmap ;
public class Email { /** * Add a " to " recipient address . * @ param to * one or several recipient addresses * @ return this instance for fluent chaining */ public Email to ( EmailAddress ... to ) { } }
for ( EmailAddress t : to ) { recipient ( t , RecipientType . TO ) ; } return this ;
public class Handover { /** * Reports an exception . The consumer will throw the given exception immediately , if * it is currently blocked in the { @ link # pollNext ( ) } method , or the next time it * calls that method . * < p > After this method has been called , no call to either { @ link # produce ( ConsumerRecords ) } * or { @ link # pollNext ( ) } will ever return regularly any more , but will always return * exceptionally . * < p > If another exception was already reported , this method does nothing . * < p > For the producer , the Handover will appear as if it was { @ link # close ( ) closed } . * @ param t The exception to report . */ public void reportError ( Throwable t ) { } }
checkNotNull ( t ) ; synchronized ( lock ) { // do not override the initial exception if ( error == null ) { error = t ; } next = null ; lock . notifyAll ( ) ; }
public class XmlEscape { /** * Perform an XML 1.1 level 2 ( markup - significant and all non - ASCII chars ) < strong > escape < / strong > operation * on a < tt > String < / tt > input meant to be an XML attribute value . * < em > Level 2 < / em > means this method will escape : * < ul > * < li > The five markup - significant characters : < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , * < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > < / li > * < li > All non ASCII characters . < / li > * < / ul > * This escape will be performed by replacing those chars by the corresponding XML Character Entity References * ( e . g . < tt > ' & amp ; lt ; ' < / tt > ) when such CER exists for the replaced character , and replacing by a hexadecimal * character reference ( e . g . < tt > ' & amp ; # x2430 ; ' < / tt > ) when there there is no CER for the replaced character . * Besides , being an attribute value also < tt > & # 92 ; t < / tt > , < tt > & # 92 ; n < / tt > and < tt > & # 92 ; r < / tt > will * be escaped to avoid white - space normalization from removing line feeds ( turning them into white * spaces ) during future parsing operations . * This method calls { @ link # escapeXml11 ( String , XmlEscapeType , XmlEscapeLevel ) } with the following * preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . xml . XmlEscapeType # CHARACTER _ ENTITY _ REFERENCES _ DEFAULT _ TO _ HEXA } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . xml . XmlEscapeLevel # LEVEL _ 2 _ ALL _ NON _ ASCII _ PLUS _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact * same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and * no additional < tt > String < / tt > objects will be created during processing ) . Will * return < tt > null < / tt > if input is < tt > null < / tt > . * @ since 1.1.5 */ public static String escapeXml11Attribute ( final String text ) { } }
return escapeXml ( text , XmlEscapeSymbols . XML11_ATTRIBUTE_SYMBOLS , XmlEscapeType . CHARACTER_ENTITY_REFERENCES_DEFAULT_TO_HEXA , XmlEscapeLevel . LEVEL_2_ALL_NON_ASCII_PLUS_MARKUP_SIGNIFICANT ) ;
public class ConcentrationNormalizer { /** * deals with the normalization of special cases */ private ValueUnitWrapper normalizeSpecialCases ( double value , String unit ) throws UnknownUnitException { } }
if ( unit . equals ( "mol/dm3" ) ) { return new ValueUnitWrapper ( value , MOLAR_NORMALIZED_UNIT ) ; } else { throw new UnknownUnitException ( unit ) ; }
public class BaseField { /** * Convert the field ' s value to a index ( for popup ) ( usually overidden ) . * @ param string The string to convert to an index . * @ return The resulting index . */ public int convertStringToIndex ( String tempString ) { } }
int index = 1 ; if ( tempString . length ( ) == 0 ) return ( short ) index ; if ( ( tempString . charAt ( 0 ) >= '0' ) & ( tempString . charAt ( 0 ) <= '9' ) ) index = tempString . charAt ( 0 ) - '0' ; // Convert to number ; 1 = 1 , 2 = 2 , etc . . . if ( ( tempString . charAt ( 0 ) >= 'A' ) & ( tempString . charAt ( 0 ) <= 'Z' ) ) index = tempString . charAt ( 0 ) - 'A' + 1 ; // Convert to number ; A = 1 , B = 2 , etc . . . if ( ( tempString . charAt ( 0 ) >= 'a' ) & ( tempString . charAt ( 0 ) <= 'z' ) ) index = tempString . charAt ( 0 ) - 'a' + 1 ; // Convert to number ; a = 1 , b = 2 , etc . . . return index ; // Return the position
public class AbstractCoalescingBufferQueue { /** * Compose { @ code cumulation } and { @ code next } into a new { @ link CompositeByteBuf } . */ protected final ByteBuf composeIntoComposite ( ByteBufAllocator alloc , ByteBuf cumulation , ByteBuf next ) { } }
// Create a composite buffer to accumulate this pair and potentially all the buffers // in the queue . Using + 2 as we have already dequeued current and next . CompositeByteBuf composite = alloc . compositeBuffer ( size ( ) + 2 ) ; try { composite . addComponent ( true , cumulation ) ; composite . addComponent ( true , next ) ; } catch ( Throwable cause ) { composite . release ( ) ; safeRelease ( next ) ; throwException ( cause ) ; } return composite ;
public class CmsShellCommands { /** * Displays a list of all resources in the current folder . < p > * @ throws Exception if something goes wrong * @ see CmsObject # getResourcesInFolder ( String , CmsResourceFilter ) */ public void ls ( ) throws Exception { } }
String folder = CmsResource . getFolderPath ( m_cms . getRequestContext ( ) . getUri ( ) ) ; List < CmsResource > resources = m_cms . getResourcesInFolder ( folder , CmsResourceFilter . IGNORE_EXPIRATION ) ; m_shell . getOut ( ) . println ( "\n" + getMessages ( ) . key ( Messages . GUI_SHELL_LS_2 , folder , new Integer ( resources . size ( ) ) ) ) ; Iterator < CmsResource > i = resources . iterator ( ) ; while ( i . hasNext ( ) ) { CmsResource r = i . next ( ) ; m_shell . getOut ( ) . println ( m_cms . getSitePath ( r ) ) ; } m_shell . getOut ( ) . println ( ) ;
public class NumberConverter { /** * Creates a Number for the { @ code source } and { @ code destinationType } . */ Number numberFor ( Number source , Class < ? > destinationType ) { } }
if ( destinationType . equals ( source . getClass ( ) ) ) return source ; if ( destinationType . equals ( Byte . class ) ) { long longValue = source . longValue ( ) ; if ( longValue > Byte . MAX_VALUE ) throw new Errors ( ) . errorTooLarge ( source , destinationType ) . toMappingException ( ) ; if ( longValue < Byte . MIN_VALUE ) throw new Errors ( ) . errorTooSmall ( source , destinationType ) . toMappingException ( ) ; return Byte . valueOf ( source . byteValue ( ) ) ; } if ( destinationType . equals ( Short . class ) ) { long longValue = source . longValue ( ) ; if ( longValue > Short . MAX_VALUE ) throw new Errors ( ) . errorTooLarge ( source , destinationType ) . toMappingException ( ) ; if ( longValue < Short . MIN_VALUE ) throw new Errors ( ) . errorTooSmall ( source , destinationType ) . toMappingException ( ) ; return Short . valueOf ( source . shortValue ( ) ) ; } if ( destinationType . equals ( Integer . class ) ) { long longValue = source . longValue ( ) ; if ( longValue > Integer . MAX_VALUE ) throw new Errors ( ) . errorTooLarge ( source , destinationType ) . toMappingException ( ) ; if ( longValue < Integer . MIN_VALUE ) throw new Errors ( ) . errorTooSmall ( source , destinationType ) . toMappingException ( ) ; return Integer . valueOf ( source . intValue ( ) ) ; } if ( destinationType . equals ( Long . class ) ) return Long . valueOf ( source . longValue ( ) ) ; if ( destinationType . equals ( Float . class ) ) { if ( source . doubleValue ( ) > Float . MAX_VALUE ) throw new Errors ( ) . errorTooLarge ( source , destinationType ) . toMappingException ( ) ; return Float . valueOf ( source . floatValue ( ) ) ; } if ( destinationType . equals ( Double . class ) ) return Double . valueOf ( source . doubleValue ( ) ) ; if ( destinationType . equals ( BigDecimal . class ) ) { if ( source instanceof Float || source instanceof Double ) return new BigDecimal ( source . toString ( ) ) ; else if ( source instanceof BigInteger ) return new BigDecimal ( ( BigInteger ) source ) ; else return BigDecimal . valueOf ( source . longValue ( ) ) ; } if ( destinationType . equals ( BigInteger . class ) ) { if ( source instanceof BigDecimal ) return ( ( BigDecimal ) source ) . toBigInteger ( ) ; else return BigInteger . valueOf ( source . longValue ( ) ) ; } throw new Errors ( ) . errorMapping ( source , destinationType ) . toMappingException ( ) ;
public class ApolloCallTracker { /** * < p > Removes provided { @ link ApolloQueryWatcher } that finished his execution , if it is found , else throws an * { @ link AssertionError } . < / p > * If the removal operation is successful and no active running calls are found , then the registered * { @ link ApolloCallTracker # idleResourceCallback } is invoked . * < p > < b > Note < / b > : This method needs to be called right after an apolloCall is completed ( whether successful or * failed ) . < / p > */ void unregisterQueryWatcher ( @ NotNull ApolloQueryWatcher queryWatcher ) { } }
checkNotNull ( queryWatcher , "queryWatcher == null" ) ; OperationName operationName = queryWatcher . operation ( ) . name ( ) ; unregisterCall ( activeQueryWatchers , operationName , queryWatcher ) ;
public class QrCodeAlignmentPatternLocator { /** * If the initial guess is within the inner white circle or black dot this will ensure that it is centered * on the black dot */ boolean centerOnSquare ( QrCode . Alignment pattern , float guessY , float guessX ) { } }
float step = 1 ; float bestMag = Float . MAX_VALUE ; float bestX = guessX ; float bestY = guessY ; for ( int i = 0 ; i < 10 ; i ++ ) { for ( int row = 0 ; row < 3 ; row ++ ) { float gridy = guessY - 1f + row ; for ( int col = 0 ; col < 3 ; col ++ ) { float gridx = guessX - 1f + col ; samples [ row * 3 + col ] = reader . read ( gridy , gridx ) ; } } float dx = ( samples [ 2 ] + samples [ 5 ] + samples [ 8 ] ) - ( samples [ 0 ] + samples [ 3 ] + samples [ 6 ] ) ; float dy = ( samples [ 6 ] + samples [ 7 ] + samples [ 8 ] ) - ( samples [ 0 ] + samples [ 1 ] + samples [ 2 ] ) ; float r = ( float ) Math . sqrt ( dx * dx + dy * dy ) ; if ( bestMag > r ) { // System . out . println ( " good step at " + i ) ; bestMag = r ; bestX = guessX ; bestY = guessY ; } else { // System . out . println ( " bad step at " + i ) ; step *= 0.75f ; } if ( r > 0 ) { guessX = bestX + step * dx / r ; guessY = bestY + step * dy / r ; } else { break ; } } pattern . moduleFound . x = bestX ; pattern . moduleFound . y = bestY ; reader . gridToImage ( ( float ) pattern . moduleFound . y , ( float ) pattern . moduleFound . x , pattern . pixel ) ; return true ;
public class Spies { /** * Monitors calls to a binary function . * @ param < T1 > the function first parameter type * @ param < T2 > the function second parameter type * @ param < R > the function result type * @ param function the function that will be monitored * @ param calls a value holder accumulating calls * @ return the proxied function */ public static < T1 , T2 , R > BiFunction < T1 , T2 , R > monitor ( BiFunction < T1 , T2 , R > function , AtomicLong calls ) { } }
return new BinaryMonitoringFunction < > ( function , calls ) ;
public class TaskLogDao { /** * TaskManager use only */ public String start ( String taskId ) { } }
long now = System . currentTimeMillis ( ) ; return insert ( "INSERT INTO TaskLogs (taskId, resultType, startDate) VALUES (?, ?, ?)" , Integer . parseInt ( taskId ) , TaskLog . INCOMPLETE , now ) ;
public class ElementLayout { /** * Sets the linked state . A change in this state requires reloading of the associated layout . * @ param linked The linked state . */ public void setLinked ( boolean linked ) { } }
if ( linked != this . linked ) { this . linked = linked ; if ( ! initializing ) { internalDeserialize ( true ) ; getRoot ( ) . activate ( true ) ; } }
public class AbstractPageFactory { /** * Sets the application name . * @ param applicationName The application name . This argument must not be { @ code null } or empty . * @ throws java . lang . IllegalArgumentException Thrown if the specified { @ code applicationName } is { @ code null } . * @ since 1.0.0 */ protected final void setApplicationName ( String applicationName ) throws IllegalArgumentException { } }
validateNotEmpty ( applicationName , "applicationName" ) ; synchronized ( this ) { properties . put ( APPLICATION_NAME , applicationName ) ; }
public class View { /** * Executes the view query with the given parameters see { @ link # executeRaw ( Iterable , Configuration ) } * @ param items The items in the bucket which should be processed via the view * @ param config The configuration for the view , acting as a filter on the items processed * @ return A results object which may be inspected * @ throws QueryExecutionException If there was an error while processing the options */ public QueryResult execute ( Iterable < Item > items , Configuration config ) throws QueryExecutionException { } }
return new QueryResult ( JsonUtils . decodeAsMap ( executeRaw ( items , config ) ) ) ;
public class JXMapViewer { /** * Calculates a zoom level so that all points in the specified set will be visible on screen . This is useful if you * have a bunch of points in an area like a city and you want to zoom out so that the entire city and it ' s points * are visible without panning . * @ param positions A set of GeoPositions to calculate the new zoom from */ public void calculateZoomFrom ( Set < GeoPosition > positions ) { } }
// u . p ( " calculating a zoom based on : " ) ; // u . p ( positions ) ; if ( positions . size ( ) < 2 ) { return ; } int zoom = getZoom ( ) ; Rectangle2D rect = generateBoundingRect ( positions , zoom ) ; // Rectangle2D viewport = map . getViewportBounds ( ) ; int count = 0 ; while ( ! getViewportBounds ( ) . contains ( rect ) ) { // u . p ( " not contained " ) ; Point2D centr = new Point2D . Double ( rect . getX ( ) + rect . getWidth ( ) / 2 , rect . getY ( ) + rect . getHeight ( ) / 2 ) ; GeoPosition px = getTileFactory ( ) . pixelToGeo ( centr , zoom ) ; // u . p ( " new geo = " + px ) ; setCenterPosition ( px ) ; count ++ ; if ( count > 30 ) break ; if ( getViewportBounds ( ) . contains ( rect ) ) { // u . p ( " did it finally " ) ; break ; } zoom = zoom + 1 ; if ( zoom > 15 ) // TODO : use maxZoom of the tfInfo { break ; } setZoom ( zoom ) ; rect = generateBoundingRect ( positions , zoom ) ; }
public class ScriptSourceAppender { /** * Create the internal Sarl script . */ public void eInit ( Resource resource , String packageName , IJvmTypeProvider context ) { } }
this . builder . eInit ( resource , packageName , context ) ;
public class HttpInboundServiceContextImpl { /** * @ see com . ibm . ws . http . channel . internal . HttpServiceContextImpl # * checkIncomingMessageLimit ( int ) */ @ Override protected void checkIncomingMessageLimit ( long addition ) throws MessageTooLargeException { } }
super . addToIncomingMsgSize ( addition ) ; // check if we even need to bother comparing sizes against limits if ( HttpConfigConstants . UNLIMITED == getHttpConfig ( ) . getMessageSizeLimit ( ) ) { return ; } if ( queryIncomingMsgSize ( ) > getHttpConfig ( ) . getMessageSizeLimit ( ) ) { // over channel limit , see if the factory has a system wide " large " limit HttpInboundChannelFactory factory = this . myLink . getChannel ( ) . getFactory ( ) ; if ( factory . getConfig ( ) . areMessagesLimited ( ) && factory . allowLargeMessage ( queryIncomingMsgSize ( ) ) ) { this . bContainsLargeMessage = true ; } else { throw new MessageTooLargeException ( "Size=" + queryIncomingMsgSize ( ) ) ; } }
public class LocalDateTimeConverter { /** * Gson invokes this call - back method during serialization when it encounters a field of the * specified type . < p > * In the implementation of this call - back method , you should consider invoking * { @ link JsonSerializationContext # serialize ( Object , Type ) } method to create JsonElements for any * non - trivial field of the { @ code src } object . However , you should never invoke it on the * { @ code src } object itself since that will cause an infinite loop ( Gson will call your * call - back method again ) . * @ param src the object that needs to be converted to Json . * @ param typeOfSrc the actual type ( fully genericized version ) of the source object . * @ return a JsonElement corresponding to the specified object . */ @ Override public JsonElement serialize ( LocalDateTime src , Type typeOfSrc , JsonSerializationContext context ) { } }
return new JsonPrimitive ( FORMATTER . format ( src ) ) ;
public class LToFltBiFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T1 , T2 > LToFltBiFunction < T1 , T2 > toFltBiFunctionFrom ( Consumer < LToFltBiFunctionBuilder < T1 , T2 > > buildingFunction ) { } }
LToFltBiFunctionBuilder builder = new LToFltBiFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class SentryHandler { /** * Returns formatted Event message when provided the message template and * parameters . * @ param message Message template body . * @ param parameters Array of parameters for the message . * @ return Formatted message . */ protected String formatMessage ( String message , Object [ ] parameters ) { } }
String formatted ; if ( printfStyle ) { formatted = String . format ( message , parameters ) ; } else { formatted = MessageFormat . format ( message , parameters ) ; } return formatted ;
public class LifecycleCommitChaincodeDefinitionRequest { /** * The endorsement policy used by this chaincode . Only this or { @ link # setValidationParameter ( byte [ ] ) } maybe used in one request . * @ param chaincodeEndorsementPolicy * @ throws InvalidArgumentException */ public void setChaincodeEndorsementPolicy ( LifecycleChaincodeEndorsementPolicy chaincodeEndorsementPolicy ) throws InvalidArgumentException { } }
if ( null == chaincodeEndorsementPolicy ) { throw new InvalidArgumentException ( " The parameter chaincodeEndorsementPolicy may not be null." ) ; } validationParameter = chaincodeEndorsementPolicy . getByteString ( ) ;
public class LastaShowbaseFilter { protected void initFilterHook ( FilterConfig filterConfig ) throws ServletException { } }
for ( FilterHook hook : assistOutsideHookList ( ) ) { hook . init ( filterConfig ) ; }
public class CasAuthenticationHandler { /** * ( non - Javadoc ) * @ see org . esigate . extension . Extension # init ( org . esigate . Driver , java . util . Properties ) */ @ Override public final void init ( Driver d , Properties properties ) { } }
this . driver = d ; this . driver . getEventManager ( ) . register ( EventManager . EVENT_FRAGMENT_PRE , this ) ; this . driver . getEventManager ( ) . register ( EventManager . EVENT_FRAGMENT_POST , this ) ; loginUrl = CAS_LOGIN_URL . getValue ( properties ) ;
public class ManagedCompletableFuture { /** * Provides the implementation of managedExecutor . completedFuture ( value ) where the target * executor is the default asynchronous execution facility . * @ param value result of the completed future * @ param executor executor to become the default asynchronous execution facility for the completed future * @ return completed completable future */ @ Trivial // traced by caller static < U > CompletableFuture < U > completedFuture ( U value , Executor executor ) { } }
if ( JAVA8 ) { return new ManagedCompletableFuture < U > ( CompletableFuture . completedFuture ( value ) , executor , null ) ; } else { ManagedCompletableFuture < U > future = new ManagedCompletableFuture < U > ( executor , null ) ; future . super_complete ( value ) ; return future ; }
public class SyncImpl { /** * Sets the SyncFrequency on this collection . * @ param syncFrequency the SyncFrequency that contains all the desired options * @ return A Task that completes when the SyncFrequency has been updated */ public Task < Void > updateSyncFrequency ( @ NonNull final SyncFrequency syncFrequency ) { } }
return this . dispatcher . dispatchTask ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { SyncImpl . this . proxy . updateSyncFrequency ( syncFrequency ) ; return null ; } } ) ;
public class NodeUtil { /** * Merge a block with its parent block . * @ return Whether the block was removed . */ public static boolean tryMergeBlock ( Node block , boolean alwaysMerge ) { } }
checkState ( block . isBlock ( ) ) ; Node parent = block . getParent ( ) ; boolean canMerge = alwaysMerge || canMergeBlock ( block ) ; // Try to remove the block if its parent is a block / script or if its // parent is label and it has exactly one child . if ( isStatementBlock ( parent ) && canMerge ) { Node previous = block ; while ( block . hasChildren ( ) ) { Node child = block . removeFirstChild ( ) ; parent . addChildAfter ( child , previous ) ; previous = child ; } parent . removeChild ( block ) ; return true ; } else { return false ; }
public class HarrisFast { /** * 角点判断的依据 * @ param x * @ param y * @ param k * , 一般设为0.06 * @ return */ private float harrisMeasure ( int x , int y , float k ) { } }
float m00 = this . Lx2 [ x ] [ y ] ; float m01 = this . Lxy [ x ] [ y ] ; float m10 = this . Lxy [ x ] [ y ] ; float m11 = this . Ly2 [ x ] [ y ] ; return m00 * m11 - m01 * m10 - k * ( m00 + m11 ) * ( m00 + m11 ) ;
public class CubicBezierCurve { /** * Experimental ! makes the start curve like the end */ public void curveStart ( ) { } }
double d0 = AbstractPoint . angle ( P [ start ] , P [ start + 3 ] ) ; // P0 - > P3 double d1 = AbstractPoint . angle ( P [ start + 3 ] , P [ start ] ) ; // P3 - > P0 double d2 = AbstractPoint . angle ( P [ start + 3 ] , P [ start + 2 ] ) ; // P3 - > P2 double a1 = d1 - d2 ; double a2 = d0 + a1 ; double di = AbstractPoint . distance ( P [ start + 3 ] , P [ start + 2 ] ) ; P [ start + 1 ] = AbstractPoint . move ( P [ start ] , a2 , di ) ;
public class Word { /** * Retrieves the subword of this word starting at the given index and extending until the end of this word . Calling * this method is equivalent to calling * < pre > w . subWord ( fromIndex , w . length ( ) ) < / pre > * @ param fromIndex * the first index , inclusive * @ return the word representing the specified subrange */ @ Nonnull public final Word < I > subWord ( int fromIndex ) { } }
if ( fromIndex <= 0 ) { if ( fromIndex == 0 ) { return this ; } throw new IndexOutOfBoundsException ( "Invalid subword range [" + fromIndex + ",)" ) ; } return subWordInternal ( fromIndex , length ( ) ) ;
public class Ix { /** * Collects the elements of this sequence into a List . * The result ' s iterator ( ) doesn ' t support remove ( ) . * @ return the new Ix instance * @ since 1.0 */ public final Ix < List < T > > collectToList ( ) { } }
return collect ( ToListHelper . < T > initialFactory ( ) , ToListHelper . < T > collector ( ) ) ;
public class UnconditionalValueDerefAnalysis { /** * If this is a method call instruction , check to see if any of the * parameters are @ NonNull , and treat them as dereferences . * @ param location * the Location of the instruction * @ param vnaFrame * the ValueNumberFrame at the Location of the instruction * @ param fact * the dataflow value to modify * @ throws DataflowAnalysisException */ private void checkNonNullReturnValue ( XMethod thisMethod , Location location , ValueNumberFrame vnaFrame , UnconditionalValueDerefSet fact ) throws DataflowAnalysisException { } }
INullnessAnnotationDatabase database = AnalysisContext . currentAnalysisContext ( ) . getNullnessAnnotationDatabase ( ) ; if ( database . getResolvedAnnotation ( thisMethod , true ) != NullnessAnnotation . NONNULL ) { return ; } if ( reportPotentialDereference ( location , invDataflow . getFactAtLocation ( location ) ) ) { ValueNumber vn = vnaFrame . getTopValue ( ) ; fact . addDeref ( vn , location ) ; }
public class AWSCloudCollectorTask { /** * The collection action . This is the task which will run on a schedule to * gather data from the feature content source system and update the * repository with retrieved . */ public void collect ( AWSCloudCollector collector ) { } }
log ( "Starting AWS collection..." ) ; log ( "Collecting AWS Cloud Data..." ) ; Map < String , List < CloudInstance > > accountToInstanceMap = collectInstances ( ) ; Map < String , String > instanceToAccountMap = new HashMap < > ( ) ; for ( String account : accountToInstanceMap . keySet ( ) ) { Collection < CloudInstance > instanceList = accountToInstanceMap . get ( account ) ; for ( CloudInstance ci : instanceList ) { instanceToAccountMap . put ( ci . getInstanceId ( ) , account ) ; } } collectVolume ( instanceToAccountMap ) ; log ( "Finished Cloud collection." ) ;
public class JsonModelBuilder { /** * Fixiates the current state for coding . * @ return A JsonModelCoder instance for the actual coding . * @ author vvakame */ public JsonModelCoder < T > fix ( ) { } }
Map < String , JsonPropertyCoder < T , ? > > properties = new LinkedHashMap < String , JsonPropertyCoder < T , ? > > ( ) ; for ( String key : map . keySet ( ) ) { JsonPropertyBuilder < T , ? > builder = map . get ( key ) ; JsonPropertyCoder < T , ? > fixed = builder . fix ( ) ; properties . put ( key , fixed ) ; } JsonModelCoder < T > fixed = new JsonModelCoder < T > ( baseClass , treatUnknownKeyAsError , properties ) ; return fixed ;
public class ArrayMapper { /** * Returns a new instance with the given element at the end . * @ param oldArray the old array * @ param newElement the element to contatenate * @ param < E > the element type * @ return a new array */ @ SuppressWarnings ( "unchecked" ) public < E extends T > T [ ] concat ( T [ ] oldArray , E newElement ) { } }
T [ ] arr = ( T [ ] ) Array . newInstance ( type , oldArray . length + 1 ) ; System . arraycopy ( oldArray , 0 , arr , 0 , oldArray . length ) ; arr [ oldArray . length ] = newElement ; return arr ;
public class MemorySegment { /** * Swaps bytes between two memory segments , using the given auxiliary buffer . * @ param tempBuffer The auxiliary buffer in which to put data during triangle swap . * @ param seg2 Segment to swap bytes with * @ param offset1 Offset of this segment to start swapping * @ param offset2 Offset of seg2 to start swapping * @ param len Length of the swapped memory region */ public final void swapBytes ( byte [ ] tempBuffer , MemorySegment seg2 , int offset1 , int offset2 , int len ) { } }
if ( ( offset1 | offset2 | len | ( tempBuffer . length - len ) ) >= 0 ) { final long thisPos = this . address + offset1 ; final long otherPos = seg2 . address + offset2 ; if ( thisPos <= this . addressLimit - len && otherPos <= seg2 . addressLimit - len ) { // this - > temp buffer UNSAFE . copyMemory ( this . heapMemory , thisPos , tempBuffer , BYTE_ARRAY_BASE_OFFSET , len ) ; // other - > this UNSAFE . copyMemory ( seg2 . heapMemory , otherPos , this . heapMemory , thisPos , len ) ; // temp buffer - > other UNSAFE . copyMemory ( tempBuffer , BYTE_ARRAY_BASE_OFFSET , seg2 . heapMemory , otherPos , len ) ; return ; } else if ( this . address > this . addressLimit ) { throw new IllegalStateException ( "this memory segment has been freed." ) ; } else if ( seg2 . address > seg2 . addressLimit ) { throw new IllegalStateException ( "other memory segment has been freed." ) ; } } // index is in fact invalid throw new IndexOutOfBoundsException ( String . format ( "offset1=%d, offset2=%d, len=%d, bufferSize=%d, address1=%d, address2=%d" , offset1 , offset2 , len , tempBuffer . length , this . address , seg2 . address ) ) ;
public class VarOptItemsSketch { /** * Decreases sketch ' s value of k by 1 , updating stored values as needed . * < p > Subject to certain pre - conditions , decreasing k causes tau to increase . This fact is used by * the unioning algorithm to force " marked " items out of H and into the reservoir region . < / p > */ void decreaseKBy1 ( ) { } }
if ( k_ <= 1 ) { throw new SketchesStateException ( "Cannot decrease k below 1 in union" ) ; } if ( ( h_ == 0 ) && ( r_ == 0 ) ) { // exact mode , but no data yet ; this reduction is somewhat gratuitous -- k_ ; } else if ( ( h_ > 0 ) && ( r_ == 0 ) ) { // exact mode , but we have some data -- k_ ; if ( h_ > k_ ) { transitionFromWarmup ( ) ; } } else if ( ( h_ > 0 ) && ( r_ > 0 ) ) { // reservoir mode , but we have some exact samples . // Our strategy will be to pull an item out of H ( which we are allowed to do since it ' s // still just data ) , reduce k , and then re - insert the item // first , slide the R zone to the left by 1 , temporarily filling the gap final int oldGapIdx = h_ ; final int oldFinalRIdx = ( h_ + 1 + r_ ) - 1 ; assert oldFinalRIdx == k_ ; swapValues ( oldFinalRIdx , oldGapIdx ) ; // now we pull an item out of H ; any item is ok , but if we grab the rightmost and then // reduce h _ , the heap invariant will be preserved ( and the gap will be restored ) , plus // the push ( ) of the item that will probably happen later will be cheap . final int pulledIdx = h_ - 1 ; final T pulledItem = data_ . get ( pulledIdx ) ; final double pulledWeight = weights_ . get ( pulledIdx ) ; final boolean pulledMark = marks_ . get ( pulledIdx ) ; if ( pulledMark ) { -- numMarksInH_ ; } weights_ . set ( pulledIdx , - 1.0 ) ; // to make bugs easier to spot -- h_ ; -- k_ ; -- n_ ; // will be re - incremented with the update update ( pulledItem , pulledWeight , pulledMark ) ; } else if ( ( h_ == 0 ) && ( r_ > 0 ) ) { // pure reservoir mode , so can simply eject a randomly chosen sample from the reservoir assert r_ >= 2 ; final int rIdxToDelete = 1 + SamplingUtil . rand . nextInt ( r_ ) ; // 1 for the gap final int rightmostRIdx = ( 1 + r_ ) - 1 ; swapValues ( rIdxToDelete , rightmostRIdx ) ; weights_ . set ( rightmostRIdx , - 1.0 ) ; -- k_ ; -- r_ ; }
public class Element { /** * Inserts the given child nodes into this element at the specified index . Current nodes will be shifted to the * right . The inserted nodes will be moved from their current parent . To prevent moving , copy the nodes first . * @ param index 0 - based index to insert children at . Specify { @ code 0 } to insert at the start , { @ code - 1 } at the * end * @ param children child nodes to insert * @ return this element , for chaining . */ public Element insertChildren ( int index , Node ... children ) { } }
Validate . notNull ( children , "Children collection to be inserted must not be null." ) ; int currentSize = childNodeSize ( ) ; if ( index < 0 ) index += currentSize + 1 ; // roll around Validate . isTrue ( index >= 0 && index <= currentSize , "Insert position out of bounds." ) ; addChildren ( index , children ) ; return this ;
public class ProxyQueueConversationGroupImpl { /** * Invoked to notify the group that the conversation that backs it has gone away . * Iterate over the queues and notify them . */ public void conversationDroppedNotification ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "conversationDroppedNotification" ) ; LinkedList < ProxyQueue > notifyList = null ; synchronized ( this ) { // Make a copy of the map ' s values to avoid a concurrent modification // exception later . notifyList = new LinkedList < ProxyQueue > ( ) ; notifyList . addAll ( idToProxyQueueMap . values ( ) ) ; } Iterator iterator = notifyList . iterator ( ) ; while ( iterator . hasNext ( ) ) { ProxyQueue queue = ( ProxyQueue ) iterator . next ( ) ; queue . conversationDroppedNotification ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "conversationDroppedNotification" ) ;
public class FunctionGenerator { /** * Checks to see if the * caller * of this expression should become part * of a function . If so , either a new function is created , or the * expression becomes part of the last function we created . * @ param parentExpr */ private void visitParentExpression ( AbstractFunctionExpression parentExpr ) { } }
if ( nextFunctionBodyStart == null ) { nextFunctionBodyStart = parentExpr ; } if ( currentFunctionName != null ) { updateCurrentFunction ( parentExpr ) ; } else { createFunctionIfNeeded ( parentExpr ) ; } if ( GremlinQueryOptimizer . isOrExpression ( parentExpr ) ) { // reset currentFunctionName = null ; // don ' t include ' or ' in generated functions nextFunctionBodyStart = null ; }
public class AbstractLockedMessageEnumeration { /** * Add a new message to te end of the LME * @ param id * @ param message * @ param isStored * @ param isRecoverable * @ throws SIResourceException */ void addNewMessage ( JsMessageWrapper message , boolean isStored , boolean isRecoverable ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addNewMessage" , new Object [ ] { new Integer ( hashCode ( ) ) , message , new Boolean ( isStored ) } ) ; JsMessage jsMsg = message . getMessage ( ) ; long id = jsMsg . getSystemMessageValue ( ) ; SIBUuid8 uuid = jsMsg . getSystemMessageSourceUuid ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "addNewMessage" , new Object [ ] { new Integer ( hashCode ( ) ) , new Long ( jsMsg . getSystemMessageValue ( ) ) , jsMsg . getSystemMessageSourceUuid ( ) , message , new Boolean ( isStored ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( localConsumerPoint , tc , "verboseMsg OUT : " + message . getMessage ( ) . toVerboseString ( ) ) ; synchronized ( this ) { LMEMessage newElement = null ; long messageReferenceExpiry = messageProcessor . getCustomProperties ( ) . get_message_reference_expiry_value ( ) ; // We have to create an object if the pool is empty if ( pooledMsg == null ) { newElement = new LMEMessage ( id , uuid , message , isStored , isRecoverable , messageLockExpiry , messageReferenceExpiry ) ; } // Otherwise pop the first pooled object and use that else { newElement = pooledMsg ; pooledMsg = pooledMsg . next ; newElement . next = null ; pooledCount -- ; newElement . reuseMessage ( id , uuid , message , isStored , isRecoverable , messageLockExpiry , messageReferenceExpiry ) ; } // Add the message to the end of the list newElement . previous = lastMsg ; if ( lastMsg != null ) lastMsg . next = newElement ; else firstMsg = newElement ; lastMsg = newElement ; // If the locks are set to expire and this is the first message we need to // register an alarm if ( messageLockExpiry != 0 ) { // If there are no other messages expiring , this will be the first one if ( nextMsgToExpire == null ) nextMsgToExpire = newElement ; // If we don ' t have an alarm registered , register one now if ( ! alarmRegistered ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "addNewMessage" , "Registering MessageLock Expiry alarm for: " + nextMsgToExpire ) ; alarmManager . create ( messageLockExpiry , this ) ; alarmRegistered = true ; } } if ( messageReferenceExpiry != 0 ) { if ( messageLockExpiry != 0 && messageReferenceExpiry > messageLockExpiry ) { // The messageReferenceExpiry is greater than the messageLockExpiry . There is no // point in creating the alarm for messageReferenceExpiry as the message lock expiry // will always go first . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "MessageReferenceExpiry: " + messageReferenceExpiry + " is greater than messageLockExpiry: " + messageLockExpiry ) ; SibTr . debug ( tc , "MessageReferneceExpiry Alarm not registered" ) ; } } else { try { if ( message . getReportCOD ( ) == null ) { // Now register the message reference expiry alarm // If there are no other message references expiring , this will be the first one if ( nextMsgReferenceToExpire == null ) nextMsgReferenceToExpire = newElement ; // If we don ' t have an alarm registered , register one now if ( ! msgReferenceAlarmRegistered ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "addNewMessage" , "Registering MessageReference Expiry alarm for: " + nextMsgReferenceToExpire ) ; alarmManager . create ( messageReferenceExpiry , new MessageReferenceExpiryAlarm ( ) ) ; msgReferenceAlarmRegistered = true ; } } } catch ( SIResourceException e ) { // No FFDC code needed // There was a problem getting hold of the ReportCOD of the message // assume it is set and don ' t create the expiry alarm . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Thrown exception when trying to create msg ref expiry alarm: " + e ) ; } } } // By adding a message we must be about to call consumeMessages and therefore // we ' re in a valid state for calls validState = true ; } // synchronized if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addNewMessage" , this ) ;
public class NetworkInterfacesInner { /** * Gets information about all network interfaces in a virtual machine in a virtual machine scale set . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; NetworkInterfaceInner & gt ; object */ public Observable < ServiceResponse < Page < NetworkInterfaceInner > > > listVirtualMachineScaleSetVMNetworkInterfacesNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return listVirtualMachineScaleSetVMNetworkInterfacesNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < NetworkInterfaceInner > > , Observable < ServiceResponse < Page < NetworkInterfaceInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < NetworkInterfaceInner > > > call ( ServiceResponse < Page < NetworkInterfaceInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listVirtualMachineScaleSetVMNetworkInterfacesNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class CMMLHelper { /** * Get the root apply node from the content mathml . * It will search for the first node of the MathML - Content annotations * or the semantics / apply node within a CMMLInfo document . * @ param cmmlInfo CMMLInfo document * @ return first node of the MathML - Content annotations within a MathML document * @ throws XPathExpressionException parser exception */ public static Node getFirstApplyNode ( CMMLInfo cmmlInfo ) throws XPathExpressionException { } }
// 1 . search for a separate cmml semantic XPath xpath = XMLHelper . namespaceAwareXpath ( "m" , CMMLInfo . NS_MATHML ) ; Node applyRoot = getElement ( cmmlInfo , "m:math/m:semantics/m:annotation-xml[@encoding='MathML-Content']/m:apply" , xpath ) ; if ( applyRoot == null ) { // 2 . search for a main cmml semantic applyRoot = getElement ( cmmlInfo , "*//m:semantics/m:apply" , xpath ) ; if ( applyRoot == null ) { // 3 . try to take the apply right beneath the math elements applyRoot = getElement ( cmmlInfo , "m:math/m:apply" , xpath ) ; } } return applyRoot ;
public class HtmlWriter { /** * Generates HTML Output for a { @ link Link } . */ private static String linkToHtml ( Link l ) { } }
if ( l == null ) { return "null" ; } StringBuilder result = new StringBuilder ( ) ; result . append ( "<div class=\"Link\"><b class=\"Link\">Link:</b>" + l . getType ( ) + ": \"" + convertTags ( l . getText ( ) ) + "\" -> \"" + convertTags ( l . getTarget ( ) ) + "\"" ) ; if ( l . getParameters ( ) . size ( ) != 0 ) { for ( String parameter : l . getParameters ( ) ) { result . append ( "<br>\nPARAMETER: \"" + convertTags ( parameter ) + "\"" ) ; } } result . append ( "</div>\n" ) ; return result . toString ( ) ;
public class App { /** * Removes a constraint from the map . * @ param type the type * @ param field the field * @ param constraintName the constraint name * @ return true if successful */ public boolean removeValidationConstraint ( String type , String field , String constraintName ) { } }
if ( ! StringUtils . isBlank ( type ) && ! StringUtils . isBlank ( field ) && constraintName != null ) { Map < String , Map < String , Map < String , ? > > > fieldsMap = getValidationConstraints ( ) . get ( type ) ; if ( fieldsMap != null && fieldsMap . containsKey ( field ) ) { if ( fieldsMap . get ( field ) . containsKey ( constraintName ) ) { fieldsMap . get ( field ) . remove ( constraintName ) ; } if ( fieldsMap . get ( field ) . isEmpty ( ) ) { getValidationConstraints ( ) . get ( type ) . remove ( field ) ; } if ( getValidationConstraints ( ) . get ( type ) . isEmpty ( ) ) { getValidationConstraints ( ) . remove ( type ) ; } return true ; } } return false ;
public class AssociateRoleToGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AssociateRoleToGroupRequest associateRoleToGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( associateRoleToGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( associateRoleToGroupRequest . getGroupId ( ) , GROUPID_BINDING ) ; protocolMarshaller . marshall ( associateRoleToGroupRequest . getRoleArn ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PRStream { /** * Sets the data associated with the stream , either compressed or * uncompressed . Note that the data will never be compressed if * Document . compress is set to false . * @ param data raw data , decrypted and uncompressed . * @ param compress true if you want the stream to be compressed . * @ param compressionLevela value between - 1 and 9 ( ignored if compress = = false ) * @ sinceiText 2.1.3 */ public void setData ( byte [ ] data , boolean compress , int compressionLevel ) { } }
remove ( PdfName . FILTER ) ; this . offset = - 1 ; if ( Document . compress && compress ) { try { ByteArrayOutputStream stream = new ByteArrayOutputStream ( ) ; Deflater deflater = new Deflater ( compressionLevel ) ; DeflaterOutputStream zip = new DeflaterOutputStream ( stream , deflater ) ; zip . write ( data ) ; zip . close ( ) ; deflater . end ( ) ; bytes = stream . toByteArray ( ) ; this . compressionLevel = compressionLevel ; } catch ( IOException ioe ) { throw new ExceptionConverter ( ioe ) ; } put ( PdfName . FILTER , PdfName . FLATEDECODE ) ; } else bytes = data ; setLength ( bytes . length ) ;
public class IconicsDrawable { /** * Set background contour width from dp for the icon * @ return The current IconicsDrawable for chaining . */ @ NonNull public IconicsDrawable backgroundContourWidthDp ( @ Dimension ( unit = DP ) int sizeDp ) { } }
return backgroundContourWidthPx ( Utils . convertDpToPx ( mContext , sizeDp ) ) ;
public class Storage { /** * Truncate a storage . This deletes all relevant data . All running sessions * must be closed beforehand . * @ param pConf * the storage at this path should be deleted . * @ throws TTException * any kind of false Treetank behaviour */ public static synchronized void truncateStorage ( final StorageConfiguration pConf ) throws TTException { } }
// check that database must be closed beforehand if ( ! STORAGEMAP . containsKey ( pConf . mFile ) ) { if ( existsStorage ( pConf . mFile ) ) { final IStorage storage = new Storage ( pConf ) ; final File [ ] resources = new File ( pConf . mFile , StorageConfiguration . Paths . Data . getFile ( ) . getName ( ) ) . listFiles ( ) ; for ( final File resource : resources ) { storage . truncateResource ( new SessionConfiguration ( resource . getName ( ) , null ) ) ; } storage . close ( ) ; // instantiate the database for deletion IOUtils . recursiveDelete ( pConf . mFile ) ; } }
public class QuickDiagnosingMatcherBase { /** * Uses the { @ code matcher } to validate { @ code item } . * If validation fails , an error message is appended to { @ code mismatch } . * The code is equivalent to * < pre > { @ code * if ( matcher . matches ( item ) ) { * return true ; * } else { * matcher . describeMismatch ( item , mismatch ) ; * return false ; * } < / pre > * but uses optimizations for diagnosing matchers . * @ param matcher * @ param item * @ param mismatch * @ return { @ code true } iif { @ code item } was matched * @ see DiagnosingMatcher * @ see QuickDiagnosingMatcher */ protected static boolean quickMatch ( Matcher < ? > matcher , Object item , Description mismatch ) { } }
return QuickDiagnose . matches ( matcher , item , mismatch ) ;
public class ByteBuddyCrossClassLoaderSerializationSupport { /** * Custom implementation of the < code > writeReplace < / code > method for serialization . * Here ' s how it ' s working and why : * < ol > * < li > * < p > When first entering in this method , it ' s because some is serializing the mock , with some code like : < / p > * < pre class = " code " > < code class = " java " > * objectOutputStream . writeObject ( mock ) ; * < / code > < / pre > * < p > So , { @ link ObjectOutputStream } will track the < code > writeReplace < / code > method in the instance and * execute it , which is wanted to replace the mock by another type that will encapsulate the actual mock . * At this point , the code will return an * { @ link CrossClassLoaderSerializableMock } . < / p > * < / li > * < li > * < p > Now , in the constructor * { @ link CrossClassLoaderSerializationProxy # CrossClassLoaderSerializationProxy ( java . lang . Object ) } * the mock is being serialized in a custom way ( using { @ link MockitoMockObjectOutputStream } ) to a * byte array . So basically it means the code is performing double nested serialization of the passed * < code > mockitoMock < / code > . < / p > * < p > However the < code > ObjectOutputStream < / code > will still detect the custom * < code > writeReplace < / code > and execute it . * < em > ( For that matter disabling replacement via { @ link ObjectOutputStream # enableReplaceObject ( boolean ) } * doesn ' t disable the < code > writeReplace < / code > call , but just just toggle replacement in the * written stream , < strong > < code > writeReplace < / code > is always called by * < code > ObjectOutputStream < / code > < / strong > . ) < / em > < / p > * < p > In order to avoid this recursion , obviously leading to a { @ link StackOverflowError } , this method is using * a flag that marks the mock as already being replaced , and then shouldn ' t replace itself again . * < strong > This flag is local to this class < / strong > , which means the flag of this class unfortunately needs * to be protected against concurrent access , hence the reentrant lock . < / p > * < / li > * < / ol > * @ param mockitoMock The Mockito mock to be serialized . * @ return A wrapper ( { @ link CrossClassLoaderSerializationProxy } ) to be serialized by the calling ObjectOutputStream . * @ throws java . io . ObjectStreamException */ public Object writeReplace ( Object mockitoMock ) throws ObjectStreamException { } }
// reentrant lock for critical section . could it be improved ? mutex . lock ( ) ; try { // mark started flag / / per thread , not per instance // temporary loosy hack to avoid stackoverflow if ( mockIsCurrentlyBeingReplaced ( ) ) { return mockitoMock ; } mockReplacementStarted ( ) ; return new CrossClassLoaderSerializationProxy ( mockitoMock ) ; } catch ( IOException ioe ) { MockName mockName = MockUtil . getMockName ( mockitoMock ) ; String mockedType = MockUtil . getMockSettings ( mockitoMock ) . getTypeToMock ( ) . getCanonicalName ( ) ; throw new MockitoSerializationIssue ( join ( "The mock '" + mockName + "' of type '" + mockedType + "'" , "The Java Standard Serialization reported an '" + ioe . getClass ( ) . getSimpleName ( ) + "' saying :" , " " + ioe . getMessage ( ) ) , ioe ) ; } finally { // unmark mockReplacementCompleted ( ) ; mutex . unlock ( ) ; }
public class RequestedGlobalProperties { /** * Filters these properties by what can be preserved by the given node when propagated down * to the given input . * @ param node The node representing the contract . * @ param input The index of the input . * @ return True , if any non - default value is preserved , false otherwise . */ public RequestedGlobalProperties filterByNodesConstantSet ( OptimizerNode node , int input ) { } }
// check if partitioning survives if ( this . ordering != null ) { for ( int col : this . ordering . getInvolvedIndexes ( ) ) { if ( ! node . isFieldConstant ( input , col ) ) { return null ; } } } else if ( this . partitioningFields != null ) { for ( int colIndex : this . partitioningFields ) { if ( ! node . isFieldConstant ( input , colIndex ) ) { return null ; } } } if ( this . partitioning == PartitioningProperty . FULL_REPLICATION ) { return null ; } return this ;
public class GroupsInterface { /** * Browse groups for the given category ID . If a null value is passed for the category then the root category is used . * @ param catId * The optional category id . Null value will be ignored . * @ return The Collection of Photo objects * @ throws FlickrException * @ deprecated Flickr returns just empty results */ @ Deprecated public Category browse ( String catId ) throws FlickrException { } }
List < Subcategory > subcategories = new ArrayList < Subcategory > ( ) ; List < Group > groups = new ArrayList < Group > ( ) ; Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_BROWSE ) ; if ( catId != null ) { parameters . put ( "cat_id" , catId ) ; } Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element categoryElement = response . getPayload ( ) ; Category category = new Category ( ) ; category . setName ( categoryElement . getAttribute ( "name" ) ) ; category . setPath ( categoryElement . getAttribute ( "path" ) ) ; category . setPathIds ( categoryElement . getAttribute ( "pathids" ) ) ; NodeList subcatNodes = categoryElement . getElementsByTagName ( "subcat" ) ; for ( int i = 0 ; i < subcatNodes . getLength ( ) ; i ++ ) { Element node = ( Element ) subcatNodes . item ( i ) ; Subcategory subcategory = new Subcategory ( ) ; subcategory . setId ( Integer . parseInt ( node . getAttribute ( "id" ) ) ) ; subcategory . setName ( node . getAttribute ( "name" ) ) ; subcategory . setCount ( Integer . parseInt ( node . getAttribute ( "count" ) ) ) ; subcategories . add ( subcategory ) ; } NodeList groupNodes = categoryElement . getElementsByTagName ( "group" ) ; for ( int i = 0 ; i < groupNodes . getLength ( ) ; i ++ ) { Element node = ( Element ) groupNodes . item ( i ) ; Group group = new Group ( ) ; group . setId ( node . getAttribute ( "nsid" ) ) ; group . setName ( node . getAttribute ( "name" ) ) ; group . setMembers ( node . getAttribute ( "members" ) ) ; groups . add ( group ) ; } category . setGroups ( groups ) ; category . setSubcategories ( subcategories ) ; return category ;
public class JSON { /** * / * Internal methods , other */ protected JsonGenerator _config ( JsonGenerator g ) { } }
// First , possible pretty printing PrettyPrinter pp = _prettyPrinter ; if ( pp != null ) { if ( pp instanceof Instantiatable < ? > ) { pp = ( PrettyPrinter ) ( ( Instantiatable < ? > ) pp ) . createInstance ( ) ; } g . setPrettyPrinter ( pp ) ; } else if ( isEnabled ( Feature . PRETTY_PRINT_OUTPUT ) ) { g . useDefaultPrettyPrinter ( ) ; } return g ;
public class BreakpointConditionEditor { /** * Handle that the value changed */ protected void valueChanged ( ) { } }
String newValue = fViewer . getDocument ( ) . get ( ) ; if ( ! newValue . equals ( fOldValue ) ) { fOldValue = newValue ; } refreshValidState ( ) ;
public class IsVpcPeeredRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( IsVpcPeeredRequest isVpcPeeredRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( isVpcPeeredRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FindDeserializers { /** * { @ inheritDoc } */ @ Override public JsonDeserializer < ? > findBeanDeserializer ( JavaType type , DeserializationConfig config , BeanDescription beanDesc ) throws JsonMappingException { } }
if ( jsonContext . isSupportedType ( type . getRawClass ( ) ) ) { return new BeanTypeDeserializer ( jsonContext , type . getRawClass ( ) ) ; } return null ;
public class BlockPlacementPolicy { /** * choose < i > numOfReplicas < / i > data nodes for < i > writer < / i > * to re - replicate a block with size < i > blocksize < / i > * If not , return as many as we can . * The base implemenatation extracts the pathname of the file from the * specified srcInode , but this could be a costly operation depending on the * file system implementation . Concrete implementations of this class should * override this method to avoid this overhead . * @ param srcInode The inode of the file for which chooseTarget is being invoked . * @ param numOfReplicas additional number of replicas wanted . * @ param writer the writer ' s machine , null if not in the cluster . * @ param chosenNodes datanodes that have been chosen as targets . * @ param blocksize size of the data to be written . * @ return array of DatanodeDescriptor instances chosen as target * and sorted as a pipeline . */ DatanodeDescriptor [ ] chooseTarget ( FSInodeInfo srcInode , int numOfReplicas , DatanodeDescriptor writer , List < DatanodeDescriptor > chosenNodes , List < Node > excludesNodes , long blocksize ) { } }
return chooseTarget ( FSNamesystem . getFullPathName ( srcInode ) , numOfReplicas , writer , chosenNodes , blocksize ) ;
public class RedisClientFactory { /** * On pool config event . */ private JedisPoolConfig onPoolConfig ( final byte WHEN_EXHAUSTED_FAIL , String maxActivePerNode , String maxIdlePerNode , String minIdlePerNode , String maxTotal ) { } }
if ( ! StringUtils . isBlank ( maxActivePerNode ) && StringUtils . isNumeric ( maxActivePerNode ) ) { logger . info ( "configuring connection pool" ) ; JedisPoolConfig poolConfig = new JedisPoolConfig ( ) ; if ( maxTotal != null && StringUtils . isNumeric ( maxTotal ) ) { poolConfig . setMaxTotal ( Integer . valueOf ( maxTotal ) ) ; } if ( maxIdlePerNode != null && StringUtils . isNumeric ( maxIdlePerNode ) ) { poolConfig . setMaxIdle ( Integer . valueOf ( maxIdlePerNode ) ) ; } if ( minIdlePerNode != null && StringUtils . isNumeric ( minIdlePerNode ) ) { poolConfig . setMinIdle ( Integer . parseInt ( minIdlePerNode ) ) ; } if ( maxActivePerNode != null && StringUtils . isNumeric ( maxActivePerNode ) ) { // poolConfig . setWhenExhaustedAction ( WHEN _ EXHAUSTED _ FAIL ) ; poolConfig . setBlockWhenExhausted ( true ) ; } return poolConfig ; } return null ;
public class Latent { /** * { @ inheritDoc } */ public TypeDescription . Generic getType ( ) { } }
return parameterType . accept ( TypeDescription . Generic . Visitor . Substitutor . ForAttachment . of ( this ) ) ;
public class ListVirtualNodesResult { /** * The list of existing virtual nodes for the specified service mesh . * @ param virtualNodes * The list of existing virtual nodes for the specified service mesh . */ public void setVirtualNodes ( java . util . Collection < VirtualNodeRef > virtualNodes ) { } }
if ( virtualNodes == null ) { this . virtualNodes = null ; return ; } this . virtualNodes = new java . util . ArrayList < VirtualNodeRef > ( virtualNodes ) ;
public class SearchExpressionFacade { /** * Validates the given search expressions . We only validate it , for performance reasons , if the current { @ link ProjectStage } is * { @ link ProjectStage # Development } . * @ param context The { @ link FacesContext } . * @ param source The source component . E . g . a button . * @ param expressions The search expression . * @ param splittedExpressions The already splitted expressions . */ protected static void validateExpressions ( FacesContext context , UIComponent source , String expressions , String [ ] splittedExpressions ) { } }
if ( context . isProjectStage ( ProjectStage . Development ) ) { if ( splittedExpressions . length > 1 ) { if ( expressions . contains ( SearchExpressionConstants . NONE_KEYWORD ) || expressions . contains ( SearchExpressionConstants . ALL_KEYWORD ) ) { throw new FacesException ( "It's not possible to use @none or @all combined with other expressions." + " Expressions: \"" + expressions + "\" referenced from \"" + source . getClientId ( context ) + "\"" ) ; } } }
public class JavaZipFileSystem { /** * { @ inheritDoc } */ public File getFile ( VirtualFile mountPoint , VirtualFile target ) throws IOException { } }
final ZipNode zipNode = getExistingZipNode ( mountPoint , target ) ; // check if we have cached one already File cachedFile = zipNode . cachedFile ; if ( cachedFile != null ) { return cachedFile ; } synchronized ( zipNode ) { // double - check cachedFile = zipNode . cachedFile ; if ( cachedFile != null ) { return cachedFile ; } // nope , create a cached temp final JarEntry zipEntry = zipNode . entry ; String name = target . getPathNameRelativeTo ( mountPoint ) ; cachedFile = buildFile ( contentsDir , name ) ; if ( zipEntry == null ) { cachedFile . mkdir ( ) ; } else { VFSUtils . copyStreamAndClose ( zipFile . getInputStream ( zipEntry ) , new BufferedOutputStream ( new FileOutputStream ( cachedFile ) ) ) ; } zipNode . cachedFile = cachedFile ; return cachedFile ; }
public class X11InputDeviceRegistry { /** * Convenience method to convert X11 button codes to glass button codes * @ param button */ private static int buttonToGlassButton ( int button ) { } }
switch ( button ) { case X . Button1 : return MouseEvent . BUTTON_LEFT ; case X . Button2 : return MouseEvent . BUTTON_OTHER ; case X . Button3 : return MouseEvent . BUTTON_RIGHT ; default : return MouseEvent . BUTTON_NONE ; }
public class KvStateRegistry { private KvStateRegistryListener getKvStateRegistryListener ( JobID jobId ) { } }
// first check whether we are running the legacy code which registers // a single listener under HighAvailabilityServices . DEFAULT _ JOB _ ID KvStateRegistryListener listener = listeners . get ( HighAvailabilityServices . DEFAULT_JOB_ID ) ; if ( listener == null ) { listener = listeners . get ( jobId ) ; } return listener ;
public class SettingsParser { /** * TODO : use a utility to read XML and convert to Object and use Objects to write XML */ private static void readSettings ( XmlPullParser parser , GVRContext context , CursorManager cursorManager ) throws XmlPullParserException , IOException { } }
Map < String , CursorTheme > themes = cursorManager . getThemeMap ( ) ; parser . require ( XmlPullParser . START_TAG , null , SETTINGS ) ; while ( parser . next ( ) != XmlPullParser . END_TAG ) { if ( parser . getEventType ( ) == XmlPullParser . TEXT ) { continue ; } else if ( parser . getEventType ( ) != XmlPullParser . START_TAG ) { throw new XmlPullParserException ( "Cannot find start tag" ) ; } String name = parser . getName ( ) ; Log . d ( TAG , "Reading tag:" + name ) ; // Starts by looking for the entry tag if ( name . equals ( GLOBAL ) ) { GlobalSettingsFactory . readGlobalSettings ( parser ) ; } else if ( name . equals ( THEME ) ) { Log . d ( TAG , "Reading the theme tag" ) ; CursorTheme theme = CursorThemeFactory . readTheme ( parser , context ) ; themes . put ( theme . getId ( ) , theme ) ; } else if ( name . equals ( CURSOR ) ) { Log . d ( TAG , "Reading the cursor tag" ) ; cursorManager . addCursor ( CursorFactory . readCursor ( parser , context , cursorManager ) ) ; } } if ( 0 == cursorManager . getCursorCount ( ) ) { throw new XmlPullParserException ( "No cursors specified in settings.xml" ) ; }