signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Parser { /** * Match against the passed string . * Case is ignored if the ignoreCase flag is set . * @ param match * @ param textProvider * @ return true if matched */ public boolean exactOrError ( String match , TextProvider textProvider ) { } }
if ( ! exact ( match , textProvider , false ) ) { throw new ParserException ( "Expected " + match , textProvider ) ; } return true ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertFontResolutionRPuBaseToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class Unpooled { /** * Creates a read - only buffer which disallows any modification operations * on the specified { @ code buffer } . The new buffer has the same * { @ code readerIndex } and { @ code writerIndex } with the specified * { @ code buffer } . * @ deprecated Use { @ link ByteBuf # asReadOnly ( ) } . */ @ Deprecated public static ByteBuf unmodifiableBuffer ( ByteBuf buffer ) { } }
ByteOrder endianness = buffer . order ( ) ; if ( endianness == BIG_ENDIAN ) { return new ReadOnlyByteBuf ( buffer ) ; } return new ReadOnlyByteBuf ( buffer . order ( BIG_ENDIAN ) ) . order ( LITTLE_ENDIAN ) ;
public class DefaultClassSource { /** * 检查是否发生变化 , 如果发生变化则更新 */ public boolean checkAndScan ( ) { } }
String changeLogs = null ; try { synchronized ( lock ) { checkFileChange ( ) ; } if ( changes . isEmpty ( ) ) { return false ; } for ( ; ; ) { String log = changes . poll ( ) ; if ( log == null ) { break ; } if ( changeLogs == null ) { changeLogs = "" ; } changeLogs += log + "\n" ; } } catch ( Throwable e ) { log . error ( e . getMessage ( ) , e ) ; } if ( changeLogs != null ) { log . debug ( "startScanClassSources from events:\n" + changeLogs ) ; scanClassSources ( ) ; return true ; } return false ;
public class DeprecatedListWriter { /** * Add the index link . * @ param builder the deprecated list builder * @ param type the type of list being documented * @ param contentTree the content tree to which the index link will be added */ private void addIndexLink ( DeprecatedAPIListBuilder builder , DeprElementKind kind , Content contentTree ) { } }
if ( builder . hasDocumentation ( kind ) ) { Content li = HtmlTree . LI ( getHyperLink ( getAnchorName ( kind ) , contents . getContent ( getHeadingKey ( kind ) ) ) ) ; contentTree . addContent ( li ) ; }
public class DatasetUtils { /** * Given a { @ link Iterable } of dataset identifiers ( e . g . , name , URN , etc . ) , return a { @ link Map } that links each * dataset with the extra configuration information specified in the state via { @ link # DATASET _ SPECIFIC _ PROPS } . */ public static Map < String , State > getDatasetSpecificProps ( Iterable < String > datasets , State state ) { } }
if ( ! Strings . isNullOrEmpty ( state . getProp ( DATASET_SPECIFIC_PROPS ) ) || ! Strings . isNullOrEmpty ( state . getProp ( KAFKA_TOPIC_SPECIFIC_STATE ) ) ) { Map < String , State > datasetSpecificConfigMap = Maps . newHashMap ( ) ; JsonArray array = ! Strings . isNullOrEmpty ( state . getProp ( DATASET_SPECIFIC_PROPS ) ) ? state . getPropAsJsonArray ( DATASET_SPECIFIC_PROPS ) : state . getPropAsJsonArray ( KAFKA_TOPIC_SPECIFIC_STATE ) ; // Iterate over the entire JsonArray specified by the config key for ( JsonElement datasetElement : array ) { // Check that each entry in the JsonArray is a JsonObject Preconditions . checkArgument ( datasetElement . isJsonObject ( ) , "The value for property " + DATASET_SPECIFIC_PROPS + " is malformed" ) ; JsonObject object = datasetElement . getAsJsonObject ( ) ; // Only process JsonObjects that have a dataset identifier if ( object . has ( DATASET ) ) { JsonElement datasetNameElement = object . get ( DATASET ) ; Preconditions . checkArgument ( datasetNameElement . isJsonPrimitive ( ) , "The value for property " + DATASET_SPECIFIC_PROPS + " is malformed, the " + DATASET + " field must be a string" ) ; // Iterate through each dataset that matches the value of the JsonObjects DATASET field for ( String dataset : Iterables . filter ( datasets , new DatasetPredicate ( datasetNameElement . getAsString ( ) ) ) ) { // If an entry already exists for a dataset , add it to the current state , else create a new state if ( datasetSpecificConfigMap . containsKey ( dataset ) ) { datasetSpecificConfigMap . get ( dataset ) . addAll ( StateUtils . jsonObjectToState ( object , DATASET ) ) ; } else { datasetSpecificConfigMap . put ( dataset , StateUtils . jsonObjectToState ( object , DATASET ) ) ; } } } else { LOG . warn ( "Skipping JsonElement " + datasetElement + " as it is does not contain a field with key " + DATASET ) ; } } return datasetSpecificConfigMap ; } return Maps . newHashMap ( ) ;
public class DataNode { /** * Parse and verify command line arguments and set configuration parameters . * @ return false if passed argements are incorrect */ private static boolean parseArguments ( String args [ ] , Configuration conf ) { } }
int argsLen = ( args == null ) ? 0 : args . length ; StartupOption startOpt = StartupOption . REGULAR ; for ( int i = 0 ; i < argsLen ; i ++ ) { String cmd = args [ i ] ; if ( "-r" . equalsIgnoreCase ( cmd ) || "--rack" . equalsIgnoreCase ( cmd ) ) { LOG . error ( "-r, --rack arguments are not supported anymore. RackID " + "resolution is handled by the NameNode." ) ; System . exit ( - 1 ) ; } else if ( "-rollback" . equalsIgnoreCase ( cmd ) ) { startOpt = StartupOption . ROLLBACK ; } else if ( "-regular" . equalsIgnoreCase ( cmd ) ) { startOpt = StartupOption . REGULAR ; } else if ( "-d" . equalsIgnoreCase ( cmd ) ) { ++ i ; if ( i >= argsLen ) { LOG . error ( "-D option requires following argument." ) ; System . exit ( - 1 ) ; } String [ ] keyval = args [ i ] . split ( "=" , 2 ) ; if ( keyval . length == 2 ) { conf . set ( keyval [ 0 ] , keyval [ 1 ] ) ; } else { LOG . error ( "-D option invalid (expected =): " + args [ i ] ) ; System . exit ( - 1 ) ; } } else return false ; } setStartupOption ( conf , startOpt ) ; return true ;
public class HttpUtils { /** * Extract the client IP address from an x - forwarded - for header . Returns null if there is no x - forwarded - for header * @ param xForwardedFor a < code > String < / code > value * @ return a < code > String < / code > value */ public static String extractClientIpFromXForwardedFor ( String xForwardedFor ) { } }
if ( xForwardedFor == null ) { return null ; } xForwardedFor = xForwardedFor . trim ( ) ; String tokenized [ ] = xForwardedFor . split ( "," ) ; if ( tokenized . length == 0 ) { return null ; } else { return tokenized [ 0 ] . trim ( ) ; }
public class MoreCollectors { /** * Adapts a { @ code Collector } accepting elements of type { @ code U } to one * accepting elements of type { @ code T } by applying a flat mapping function * to each input element before accumulation . The flat mapping function maps * an input element to a { @ link Stream stream } covering zero or more output * elements that are then accumulated downstream . Each mapped stream is * { @ link java . util . stream . BaseStream # close ( ) closed } after its contents * have been placed downstream . ( If a mapped stream is { @ code null } an empty * stream is used , instead . ) * This method is similar to { @ code Collectors . flatMapping } method which * appears in JDK 9 . However when downstream collector is * < a href = " package - summary . html # ShortCircuitReduction " > short - circuiting < / a > * , this method will also return a short - circuiting collector . * @ param < T > the type of the input elements * @ param < U > type of elements accepted by downstream collector * @ param < A > intermediate accumulation type of the downstream collector * @ param < R > result type of collector * @ param mapper a function to be applied to the input elements , which * returns a stream of results * @ param downstream a collector which will receive the elements of the * stream returned by mapper * @ return a collector which applies the mapping function to the input * elements and provides the flat mapped results to the downstream * collector * @ since 0.4.1 */ public static < T , U , A , R > Collector < T , ? , R > flatMapping ( Function < ? super T , ? extends Stream < ? extends U > > mapper , Collector < ? super U , A , R > downstream ) { } }
BiConsumer < A , ? super U > downstreamAccumulator = downstream . accumulator ( ) ; Predicate < A > finished = finished ( downstream ) ; if ( finished != null ) { return new CancellableCollectorImpl < > ( downstream . supplier ( ) , ( acc , t ) -> { if ( finished . test ( acc ) ) return ; try ( Stream < ? extends U > stream = mapper . apply ( t ) ) { if ( stream != null ) { stream . spliterator ( ) . forEachRemaining ( u -> { downstreamAccumulator . accept ( acc , u ) ; if ( finished . test ( acc ) ) throw new CancelException ( ) ; } ) ; } } catch ( CancelException ex ) { // ignore } } , downstream . combiner ( ) , downstream . finisher ( ) , finished , downstream . characteristics ( ) ) ; } return Collector . of ( downstream . supplier ( ) , ( acc , t ) -> { try ( Stream < ? extends U > stream = mapper . apply ( t ) ) { if ( stream != null ) { stream . spliterator ( ) . forEachRemaining ( u -> downstreamAccumulator . accept ( acc , u ) ) ; } } } , downstream . combiner ( ) , downstream . finisher ( ) , downstream . characteristics ( ) . toArray ( new Characteristics [ 0 ] ) ) ;
public class Anchor { /** * We override this because the < a > < / a > tag doesn ' t support the disabled property . So on clicks and focus , if disabled then ignore * @ param event dom event */ @ Override public void onBrowserEvent ( final Event event ) { } }
switch ( DOM . eventGetType ( event ) ) { case Event . ONDBLCLICK : case Event . ONFOCUS : case Event . ONCLICK : if ( ! isEnabled ( ) ) { return ; } break ; } super . onBrowserEvent ( event ) ;
public class DataUtil { /** * big - endian or motorola format . */ public static void writeLongBigEndian ( ByteBuffer io , long value ) { } }
io . put ( ( byte ) ( ( value >> 56 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 48 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 40 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 32 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 24 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 16 ) & 0xFF ) ) ; io . put ( ( byte ) ( ( value >> 8 ) & 0xFF ) ) ; io . put ( ( byte ) ( value & 0xFF ) ) ;
public class WPartialDateField { /** * Returns the month value . * @ return the month , or null if unspecified . */ public Integer getMonth ( ) { } }
String dateValue = getValue ( ) ; if ( dateValue != null && dateValue . length ( ) >= MONTH_END ) { return parseDateComponent ( dateValue . substring ( MONTH_START , MONTH_END ) , getPaddingChar ( ) ) ; } else { return null ; }
public class DefaultUnifiedDiffDisplayer { /** * Ends the last { @ link UnifiedDiffBlock } by adding a number of unmodified elements . * @ param state the state of the displayer * @ param contextSize the number of unmodified elements to display at the end of a block * @ param < E > the type of composite elements that are compared to produce the first level diff * @ param < F > the type of sub - elements that are compared to produce the second - level diff when a composite element * is modified */ private < E , F > void maybeEndBlock ( State < E , F > state , int contextSize ) { } }
if ( ! state . getBlocks ( ) . isEmpty ( ) ) { int start = state . getLastDelta ( ) . getPrevious ( ) . getLastIndex ( ) + 1 ; int end = Math . min ( start + contextSize , state . getPrevious ( ) . size ( ) ) ; state . getBlocks ( ) . peek ( ) . addAll ( this . < E , F > getUnmodifiedElements ( state . getPrevious ( ) , start , end ) ) ; }
public class UIInput { /** * Executes validation logic . */ private void executeValidate ( FacesContext context ) { } }
try { validate ( context ) ; } catch ( RuntimeException e ) { context . renderResponse ( ) ; throw e ; } if ( ! isValid ( ) ) { context . validationFailed ( ) ; context . renderResponse ( ) ; }
public class XsdAsmInterfaces { /** * Creates the inner classes that are used to support the sequence behaviour . * @ param typeName The name of the next type to return . * @ param className The name of the class which contains the sequence . * @ param apiName The name of the generated fluent interface . * @ return The { @ link ClassWriter } object which represents the inner class created to support sequence behaviour . */ private ClassWriter generateInnerSequenceClass ( String typeName , String className , String apiName ) { } }
ClassWriter classWriter = generateClass ( typeName , JAVA_OBJECT , new String [ ] { CUSTOM_ATTRIBUTE_GROUP } , getClassSignature ( new String [ ] { CUSTOM_ATTRIBUTE_GROUP } , typeName , apiName ) , ACC_PUBLIC + ACC_SUPER , apiName ) ; generateClassMethods ( classWriter , typeName , className , apiName , false ) ; return classWriter ;
public class DefaultNameProvider { /** * Turns a SQL table by its name into a java class name ( upper - case camel - case ) . * Example : WEB _ USERS - & gt ; WebUsers * @ param table The database table * @ return The java class name */ @ Override public String getClassNameForTable ( final Table table ) { } }
final StringBuilder classNameBuilder = new StringBuilder ( ) ; final List < String > words = new ArrayList < > ( Arrays . asList ( table . getName ( ) . toUpperCase ( ) . replaceAll ( "(^[0-9]+|[^A-Z0-9_-])" , "" ) // Delete every not - alphanumeric or _ / - character and numbers at beginning . split ( "_" ) ) ) ; words . removeAll ( Arrays . asList ( "" , null ) ) ; for ( String word : words ) { classNameBuilder . append ( word . substring ( 0 , 1 ) ) ; // First letter as uppercase classNameBuilder . append ( word . substring ( 1 ) . toLowerCase ( ) ) ; // Remaining string as lowercase } return classNameBuilder . toString ( ) ;
public class PubSubRealization { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # deletePubSubOutputHandler ( com . ibm . ws . sib . utils . SIBUuid8) */ public synchronized void deletePubSubOutputHandler ( SIBUuid8 neighbourUUID ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deletePubSubOutputHandler" , neighbourUUID ) ; // If we ' re being deleted then the output handlers are removed // through a different path , otherwise we may make a stream // unflushable by removing the only link to the downstream node // capable of flushing the stream . if ( _baseDestinationHandler . isToBeDeleted ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deletePubSubOutputHandler" ) ; return ; } _pubsubOutputHandlerLockManager . lockExclusive ( ) ; // Remove the PubSubOutputHandler from the list of output handlers available . _pubsubOutputHandlers . remove ( neighbourUUID ) ; _pubsubOutputHandlerLockManager . unlockExclusive ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deletePubSubOutputHandler" ) ;
public class ConditionalFunctions { /** * Returned expression results in NegInf if expression1 = expression2 , otherwise returns expression1. * Returns MISSING or NULL if either input is MISSING or NULL . */ public static Expression negInfIf ( Expression expression1 , Expression expression2 ) { } }
return x ( "NEGINFIF(" + expression1 . toString ( ) + ", " + expression2 . toString ( ) + ")" ) ;
public class SelectorDisplayer { /** * View notifications */ public void onItemSelected ( ) { } }
// Reset the current filter ( if any ) DataColumn firstColumn = dataSet . getColumnByIndex ( 0 ) ; String firstColumnId = firstColumn . getId ( ) ; List < Integer > currentFilter = filterIndexes ( firstColumnId ) ; if ( currentFilter != null && ! currentFilter . isEmpty ( ) ) { filterReset ( ) ; } ColumnSettings columnSettings = displayerSettings . getColumnSettings ( firstColumn ) ; String firstColumnName = columnSettings . getColumnName ( ) ; String selected = view . getSelectedId ( ) ; if ( selected != null ) { // Filter by the selected value ( if any ) filterUpdate ( firstColumnId , Integer . parseInt ( selected ) ) ; view . showResetHint ( firstColumnName ) ; } else { view . showSelectHint ( firstColumnName ) ; }
public class DialogImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . api . tc . dialog . Dialog # sendUni ( ) */ public void send ( TCUniRequest event ) throws TCAPSendException { } }
if ( this . previewMode ) return ; if ( this . isStructured ( ) ) { throw new TCAPSendException ( "Structured dialogs do not use Uni" ) ; } try { this . dialogLock . lock ( ) ; TCUniMessageImpl msg = ( TCUniMessageImpl ) TcapFactory . createTCUniMessage ( ) ; if ( event . getApplicationContextName ( ) != null ) { DialogPortion dp = TcapFactory . createDialogPortion ( ) ; DialogUniAPDU apdu = TcapFactory . createDialogAPDUUni ( ) ; apdu . setDoNotSendProtocolVersion ( doNotSendProtocolVersion ( ) ) ; apdu . setApplicationContextName ( event . getApplicationContextName ( ) ) ; if ( event . getUserInformation ( ) != null ) { apdu . setUserInformation ( event . getUserInformation ( ) ) ; } dp . setUnidirectional ( true ) ; dp . setDialogAPDU ( apdu ) ; msg . setDialogPortion ( dp ) ; } if ( this . scheduledComponentList . size ( ) > 0 ) { Component [ ] componentsToSend = new Component [ this . scheduledComponentList . size ( ) ] ; this . prepareComponents ( componentsToSend ) ; msg . setComponent ( componentsToSend ) ; } AsnOutputStream aos = new AsnOutputStream ( ) ; try { msg . encode ( aos ) ; if ( this . provider . getStack ( ) . getStatisticsEnabled ( ) ) { this . provider . getStack ( ) . getCounterProviderImpl ( ) . updateTcUniSentCount ( this ) ; } this . provider . send ( aos . toByteArray ( ) , event . getReturnMessageOnError ( ) , this . remoteAddress , this . localAddress , this . seqControl , this . networkId , this . localSsn , this . remotePc ) ; this . scheduledComponentList . clear ( ) ; } catch ( Exception e ) { if ( logger . isEnabledFor ( Level . ERROR ) ) { logger . error ( "Failed to send message: " , e ) ; } throw new TCAPSendException ( "Failed to send TC-Uni message: " + e . getMessage ( ) , e ) ; } finally { release ( ) ; } } finally { this . dialogLock . unlock ( ) ; }
public class RemoveUnusedCode { /** * Mark any remaining unused parameters as being unused so it can be used elsewhere . * @ param paramList list of function ' s parameters * @ param fparamScope */ private void markUnusedParameters ( Node paramList , Scope fparamScope ) { } }
for ( Node param = paramList . getFirstChild ( ) ; param != null ; param = param . getNext ( ) ) { if ( param . isUnusedParameter ( ) ) { continue ; } Node lValue = nameOfParam ( param ) ; if ( lValue == null ) { continue ; } VarInfo varInfo = traverseNameNode ( lValue , fparamScope ) ; if ( varInfo . isRemovable ( ) ) { param . setUnusedParameter ( true ) ; compiler . reportChangeToEnclosingScope ( paramList ) ; } }
public class MigrationOperation { /** * Notifies services that migration started , invokes all sent migration tasks and updates the replica versions . */ private void doRun ( ) { } }
if ( migrationInfo . startProcessing ( ) ) { try { if ( firstFragment ) { executeBeforeMigrations ( ) ; } for ( Operation migrationOperation : fragmentMigrationState . getMigrationOperations ( ) ) { runMigrationOperation ( migrationOperation ) ; } success = true ; } catch ( Throwable e ) { failureReason = e ; getLogger ( ) . severe ( "Error while executing replication operations " + migrationInfo , e ) ; } finally { afterMigrate ( ) ; } } else { logMigrationCancelled ( ) ; }
public class KeyPath { /** * For a given key and depth , returns how much the depth should be incremented by when * resolving a keypath to children . * This can be 0 or 2 when there is a globstar and the next key either matches or doesn ' t match * the current key . */ @ RestrictTo ( RestrictTo . Scope . LIBRARY ) public int incrementDepthBy ( String key , int depth ) { } }
if ( isContainer ( key ) ) { // If it ' s a container then we added programatically and it isn ' t a part of the keypath . return 0 ; } if ( ! keys . get ( depth ) . equals ( "**" ) ) { // If it ' s not a globstar then it is part of the keypath . return 1 ; } if ( depth == keys . size ( ) - 1 ) { // The last key is a globstar . return 0 ; } if ( keys . get ( depth + 1 ) . equals ( key ) ) { // We are a globstar and the next key is our current key so consume both . return 2 ; } return 0 ;
public class Utils { /** * Convert an AWT color to a hex color string , such as # 00000 */ public static String colorToHex ( Color color ) { } }
return String . format ( "#%02x%02x%02x" , color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) ) ;
public class JsHdrsImpl { /** * Get the contents of the ReverseRoutingPath field from the message header * The List returned is a copy of the header field , so no updates to it * affect the Message header itself . * Javadoc description supplied by SIBusMessage interface . */ public final List < SIDestinationAddress > getReverseRoutingPath ( ) { } }
List < String > fNames = ( List < String > ) getHdr2 ( ) . getField ( JsHdr2Access . REVERSEROUTINGPATH_DESTINATIONNAME ) ; List < byte [ ] > fMEs = ( List < byte [ ] > ) getHdr2 ( ) . getField ( JsHdr2Access . REVERSEROUTINGPATH_MEID ) ; byte [ ] fLos = ( byte [ ] ) getHdr2 ( ) . getField ( JsHdr2Access . REVERSEROUTINGPATHLOCALONLY ) ; List < String > fBuses = ( List < String > ) getHdr2 ( ) . getField ( JsHdr2Access . REVERSEROUTINGPATH_BUSNAME ) ; return new RoutingPathList ( fNames , fLos , fMEs , fBuses ) ;
public class Messenger { /** * Can be called for forcing conversation loading in background * @ param peer conversation ' s peer */ @ ObjectiveCName ( "onConversationPreLoadWithPeer:" ) public void onConversationPreLoad ( @ NotNull Peer peer ) { } }
modules . getEvents ( ) . post ( new PeerChatPreload ( peer ) ) ;
public class ZooSession { /** * Opens the database at the given location . * The database is created if it does not exist . * By default databases are created in % USER _ HOME % / zoodb . * @ param dbName The database name or path * @ return ZooRollingSession object */ public static final ZooSession open ( String dbName ) { } }
if ( ! ZooHelper . dbExists ( dbName ) ) { // create database // By default , all database files will be created in % USER _ HOME % / zoodb ZooHelper . createDb ( dbName ) ; } return new ZooSession ( dbName ) ;
public class PreferencesFxUtils { /** * Returns a list of all the settings which are contained in a list of { @ code groups } * recursively . */ public static List < Setting > groupsToSettings ( List < Group > groups ) { } }
return groups . stream ( ) . map ( Group :: getSettings ) . flatMap ( Collection :: stream ) . collect ( Collectors . toList ( ) ) ;
public class StaticJAASConfiguration { /** * { @ inheritDoc } * @ see javax . security . auth . login . Configuration # getAppConfigurationEntry ( java . lang . String ) */ @ Override public AppConfigurationEntry [ ] getAppConfigurationEntry ( String name ) { } }
return new AppConfigurationEntry [ ] { new AppConfigurationEntry ( "com.sun.security.auth.module.Krb5LoginModule" , LoginModuleControlFlag . REQUIRED , this . options ) } ;
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createCGBA ( int ) */ @ Override public CircuitGroupBlockingAckMessage createCGBA ( int cic ) { } }
CircuitGroupBlockingAckMessage msg = createCGBA ( ) ; CircuitIdentificationCode code = this . parameterFactory . createCircuitIdentificationCode ( ) ; code . setCIC ( cic ) ; msg . setCircuitIdentificationCode ( code ) ; return msg ;
public class KubernetesAssistant { /** * Awaits at most 5 minutes until all pods meets the given predicate . * @ param filter used to wait to detect that a pod is up and running . */ public void awaitPodReadinessOrFail ( Predicate < Pod > filter ) { } }
await ( ) . atMost ( 5 , TimeUnit . MINUTES ) . until ( ( ) -> { List < Pod > list = client . pods ( ) . inNamespace ( namespace ) . list ( ) . getItems ( ) ; return list . stream ( ) . filter ( filter ) . filter ( Readiness :: isPodReady ) . collect ( Collectors . toList ( ) ) . size ( ) >= 1 ; } ) ;
public class DeprecationUtil { /** * Returns { @ code true } if the given member is contained in a deprecated member . * @ param member the member to be checked */ public static boolean isTransitivelyDeprecatedMember ( JvmMember member ) { } }
EObject container = member ; while ( container instanceof JvmMember ) { if ( isDeprecatedMember ( ( JvmMember ) container ) ) { return true ; } container = container . eContainer ( ) ; } return false ;
public class StyleUtilities { /** * Collect all { @ link ExternalGraphic } s from the given { @ link Rule } . * @ param rule the rule to check . * @ return the extracted { @ link ExternalGraphic } s . */ public static List < ExternalGraphic > externalGraphicsFromRule ( Rule rule ) { } }
List < ExternalGraphic > gList = new ArrayList < ExternalGraphic > ( ) ; List < Symbolizer > symbolizers = rule . symbolizers ( ) ; if ( symbolizers . size ( ) != 0 ) { for ( Symbolizer symbolizer : symbolizers ) { Graphic [ ] graphics = new Graphic [ 2 ] ; if ( symbolizer instanceof PointSymbolizer ) { PointSymbolizer pointSymbolizer = ( PointSymbolizer ) symbolizer ; graphics [ 0 ] = pointSymbolizer . getGraphic ( ) ; } else if ( symbolizer instanceof LineSymbolizer ) { LineSymbolizer lineSymbolizer = ( LineSymbolizer ) symbolizer ; Stroke stroke = lineSymbolizer . getStroke ( ) ; graphics [ 0 ] = stroke . getGraphicStroke ( ) ; } else if ( symbolizer instanceof PolygonSymbolizer ) { PolygonSymbolizer polygonSymbolizer = ( PolygonSymbolizer ) symbolizer ; Stroke stroke = polygonSymbolizer . getStroke ( ) ; if ( stroke != null ) graphics [ 0 ] = stroke . getGraphicStroke ( ) ; Fill fill = polygonSymbolizer . getFill ( ) ; if ( fill != null ) graphics [ 1 ] = fill . getGraphicFill ( ) ; } for ( int i = 0 ; i < graphics . length ; i ++ ) { if ( graphics [ i ] != null ) { for ( GraphicalSymbol gs : graphics [ i ] . graphicalSymbols ( ) ) { if ( ( gs != null ) && ( gs instanceof ExternalGraphic ) ) { ExternalGraphic externalGraphic = ( ExternalGraphic ) gs ; gList . add ( externalGraphic ) ; } } } } } return gList ; } return Collections . emptyList ( ) ;
public class BaseEnvelopeSchemaConverter { /** * Get the schema of a field * @ param record the input record which has the schema id * @ param schemaIdLocation a dot separated location string the schema id * @ return a schema referenced by the schema id */ protected Schema getFieldSchema ( GenericRecord record , String schemaIdLocation ) throws Exception { } }
Optional < Object > schemaIdValue = AvroUtils . getFieldValue ( record , schemaIdLocation ) ; if ( ! schemaIdValue . isPresent ( ) ) { throw new Exception ( "Schema id with key " + schemaIdLocation + " not found in the record" ) ; } String schemaKey = String . valueOf ( schemaIdValue . get ( ) ) ; return ( Schema ) registry . getSchemaByKey ( schemaKey ) ;
public class BoardPanel { /** * Adds an item to draw in a particular position * @ param coordinates the position of the item * @ param item the drawable element */ public void addItem ( Point coordinates , Drawable item ) { } }
assertEDT ( ) ; if ( coordinates == null || item == null ) { throw new IllegalArgumentException ( "Coordinates and added item cannot be null" ) ; } log . trace ( "[addItem] New item added @ {}" , coordinates ) ; getPanelAt ( coordinates ) . addModel ( item ) ; getPanelAt ( coordinates ) . repaint ( ) ;
public class DockerImage { /** * Prepare AQL query to get all the manifest layers from Artifactory . * Needed for build - info sha1 / md5 checksum for each artifact and dependency . * @ return * @ throws IOException */ private String getAqlQuery ( boolean includeVirtualRepos ) throws IOException { } }
List < String > layersDigest = DockerUtils . getLayersDigests ( manifest ) ; StringBuilder aqlRequestForDockerSha = new StringBuilder ( "items.find({" ) . append ( "\"path\":\"" ) . append ( imagePath ) . append ( "\",\"$or\":[" ) ; List < String > layersQuery = new ArrayList < String > ( ) ; for ( String digest : layersDigest ) { String shaVersion = DockerUtils . getShaVersion ( digest ) ; String shaValue = DockerUtils . getShaValue ( digest ) ; String singleFileQuery = String . format ( "{\"name\": \"%s\"}" , DockerUtils . digestToFileName ( digest ) ) ; if ( StringUtils . equalsIgnoreCase ( shaVersion , "sha1" ) ) { singleFileQuery = String . format ( "{\"actual_sha1\": \"%s\"}" , shaValue ) ; } layersQuery . add ( singleFileQuery ) ; } aqlRequestForDockerSha . append ( StringUtils . join ( layersQuery , "," ) ) ; if ( includeVirtualRepos ) { aqlRequestForDockerSha . append ( "]}).include(\"name\",\"repo\",\"path\",\"actual_sha1\",\"virtual_repos\")" ) ; } else { aqlRequestForDockerSha . append ( "]}).include(\"name\",\"repo\",\"path\",\"actual_sha1\")" ) ; } return aqlRequestForDockerSha . toString ( ) ;
public class ScalingPolicy { /** * The CloudWatch alarms related to the policy . * @ param alarms * The CloudWatch alarms related to the policy . */ public void setAlarms ( java . util . Collection < Alarm > alarms ) { } }
if ( alarms == null ) { this . alarms = null ; return ; } this . alarms = new com . amazonaws . internal . SdkInternalList < Alarm > ( alarms ) ;
public class OagBuilder { /** * Computes " induced dependency " releation IDP ( definition 2 ) . * The relation contains pairs of AttributeOcurrences . * @ param dp dependency relation */ public Graph < AttributeOccurrence > [ ] createIDP ( Graph < AttributeOccurrence > [ ] dp ) { } }
Graph < AttributeOccurrence > [ ] idp ; Graph < AttributeOccurrence > [ ] idpClosure ; boolean [ ] touched ; int p ; int q ; AttributeOccurrence left ; AttributeOccurrence right ; int symbol ; int ofs ; boolean modified ; AttributeOccurrence newLeft ; AttributeOccurrence newRight ; EdgeIterator < AttributeOccurrence > iter ; idp = new Graph [ dp . length ] ; idpClosure = new Graph [ dp . length ] ; touched = new boolean [ dp . length ] ; for ( p = 0 ; p < idp . length ; p ++ ) { idp [ p ] = new Graph < AttributeOccurrence > ( ) ; idp [ p ] . addGraph ( dp [ p ] ) ; idpClosure [ p ] = new Graph < AttributeOccurrence > ( ) ; idpClosure [ p ] . addGraph ( dp [ p ] ) ; idpClosure [ p ] . closureHere ( ) ; touched [ p ] = true ; } do { modified = false ; for ( q = 0 ; q < idp . length ; q ++ ) { if ( touched [ q ] ) { touched [ q ] = false ; iter = idpClosure [ q ] . edges ( ) ; while ( iter . step ( ) ) { left = iter . left ( ) ; right = iter . right ( ) ; if ( left . sameSymbolOccurrence ( right ) ) { for ( p = 0 ; p < idp . length ; p ++ ) { for ( ofs = 0 ; ofs <= grammar . getLength ( p ) ; ofs ++ ) { symbol = semantics . getGrammar ( ) . getSymbol ( p , ofs ) ; if ( symbol == left . attr . symbol ) { newLeft = new AttributeOccurrence ( left . attr , ofs - 1 ) ; newRight = new AttributeOccurrence ( right . attr , ofs - 1 ) ; if ( idp [ p ] . addEdge ( newLeft , newRight ) ) { idpClosure [ p ] . addEdge ( newLeft , newRight ) ; idpClosure [ p ] . closureHere ( ) ; touched [ p ] = true ; modified = true ; } } } } } } } } } while ( modified ) ; return idp ;
public class Node { /** * Sets whether this is a synthetic block that should not be considered * a real source block . */ public final void setIsSyntheticBlock ( boolean val ) { } }
checkState ( token == Token . BLOCK ) ; putBooleanProp ( Prop . SYNTHETIC , val ) ;
public class CompactionQueue { /** * Compacts a row into a single { @ link KeyValue } . * @ param row The row containing all the KVs to compact . * Must contain at least one element . * @ return A compacted version of this row . */ KeyValue compact ( final ArrayList < KeyValue > row , List < Annotation > annotations , List < HistogramDataPoint > histograms ) { } }
final KeyValue [ ] compacted = { null } ; compact ( row , compacted , annotations , histograms ) ; return compacted [ 0 ] ;
public class StringUtil { /** * Converts the specified byte value to a two digit hex string . * @ param b The < code > byte < / code > value to convert to a string . * @ return The two digit hexadecimal representation of < code > b < / code > . */ public static String toHex ( byte b ) { } }
final char [ ] string = { hexDigits [ ( b >> 4 ) & 0x0f ] , hexDigits [ b & 0x0f ] } ; return new String ( string ) ;
public class Matrix { /** * Creates a new Matrix that stores the result of { @ code A + c } * @ param c the scalar to add to each value in < i > this < / i > * @ param threadPool the source of threads to do computation in parallel * @ return { @ code A + B } */ public Matrix add ( double c , ExecutorService threadPool ) { } }
Matrix toReturn = getThisSideMatrix ( null ) ; toReturn . mutableAdd ( c , threadPool ) ; return toReturn ;
public class StoredPaymentChannelClientStates { /** * < p > Removes the channel with the given id from this set of stored states and notifies the wallet of an update to * this wallet extension . < / p > * < p > Note that the channel will still have its contract and refund transactions broadcast via the connected * { @ link TransactionBroadcaster } as long as this { @ link StoredPaymentChannelClientStates } continues to * exist in memory . < / p > */ void removeChannel ( StoredClientChannel channel ) { } }
lock . lock ( ) ; try { mapChannels . remove ( channel . id , channel ) ; } finally { lock . unlock ( ) ; } updatedChannel ( channel ) ;
public class CmsJspStatusBean { /** * Returns the localized resource string for a given message key . < p > * For a detailed parameter description , see { @ link CmsJspStatusBean # key ( String ) } . < p > * @ param keyName the key for the desired string * @ param defaultKeyName the default key for the desired string , used if the keyName delivered no resource string * @ return the resource string for the given key */ public String key ( String keyName , String defaultKeyName ) { } }
String value = getMessages ( ) . key ( keyName , getLocalizeParameters ( ) ) ; if ( value . startsWith ( CmsMessages . UNKNOWN_KEY_EXTENSION ) && CmsStringUtil . isNotEmpty ( defaultKeyName ) ) { value = getMessages ( ) . key ( defaultKeyName , getLocalizeParameters ( ) ) ; } return CmsStringUtil . escapeHtml ( value ) ;
public class AuthHelper { /** * Find a { @ link Principal } for the given credential * @ param key * @ param value * @ return principal */ public static < T > Principal getPrincipalForCredential ( final PropertyKey < T > key , final T value ) { } }
return getPrincipalForCredential ( key , value , false ) ;
public class CPRuleUserSegmentRelLocalServiceUtil { /** * Deletes the cp rule user segment rel from the database . Also notifies the appropriate model listeners . * @ param cpRuleUserSegmentRel the cp rule user segment rel * @ return the cp rule user segment rel that was removed * @ throws PortalException */ public static com . liferay . commerce . product . model . CPRuleUserSegmentRel deleteCPRuleUserSegmentRel ( com . liferay . commerce . product . model . CPRuleUserSegmentRel cpRuleUserSegmentRel ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . deleteCPRuleUserSegmentRel ( cpRuleUserSegmentRel ) ;
public class KMeansDriver { /** * Main worker thread */ @ Override public void computeImpl ( ) { } }
KMeansModel model = null ; Key bestOutputKey = Key . make ( ) ; try { init ( true ) ; // Do lock even before checking the errors , since this block is finalized by unlock // ( not the best solution , but the code is more readable ) // Something goes wrong if ( error_count ( ) > 0 ) throw H2OModelBuilderIllegalArgumentException . makeFromBuilder ( KMeans . this ) ; // The model to be built // Set fold _ column to null and will be added back into model parameter after String fold_column = _parms . _fold_column ; _parms . _fold_column = null ; model = new KMeansModel ( dest ( ) , _parms , new KMeansModel . KMeansOutput ( KMeans . this ) ) ; model . delete_and_lock ( _job ) ; int startK = _parms . _estimate_k ? 1 : _parms . _k ; final Vec vecs [ ] = _train . vecs ( ) ; // mults & means for standardization final double [ ] means = _train . means ( ) ; // means are used to impute NAs final double [ ] mults = _parms . _standardize ? _train . mults ( ) : null ; final int [ ] impute_cat = new int [ vecs . length ] ; for ( int i = 0 ; i < vecs . length ; i ++ ) impute_cat [ i ] = vecs [ i ] . isCategorical ( ) ? DataInfo . imputeCat ( vecs [ i ] , true ) : - 1 ; model . _output . _normSub = means ; model . _output . _normMul = mults ; model . _output . _mode = impute_cat ; // Initialize cluster centers and standardize if requested double [ ] [ ] centers = initial_centers ( model , vecs , means , mults , impute_cat , startK ) ; if ( centers == null ) return ; // Stopped / cancelled during center - finding boolean work_unit_iter = ! _parms . _estimate_k ; // Run the main KMeans Clustering loop // Stop after enough iterations or reassigned _ count < TOLERANCE * num _ rows double sum_squares = 0 ; final double rel_improvement_cutoff = Math . min ( 0.02 + 10. / _train . numRows ( ) + 2.5 / Math . pow ( model . _output . nfeatures ( ) , 2 ) , 0.8 ) ; if ( _parms . _estimate_k ) Log . info ( "Cutoff for relative improvement in within_cluster_sum_of_squares: " + rel_improvement_cutoff ) ; Vec [ ] vecs2 = Arrays . copyOf ( vecs , vecs . length + 1 ) ; vecs2 [ vecs2 . length - 1 ] = vecs2 [ 0 ] . makeCon ( - 1 ) ; for ( int k = startK ; k <= _parms . _k ; ++ k ) { Log . info ( "Running Lloyds iteration for " + k + " centroids." ) ; model . _output . _iterations = 0 ; // Loop ends only when iterations > max _ iterations with strict inequality double [ ] [ ] lo = null , hi = null ; boolean stop = false ; do { // Lloyds algorithm assert ( centers . length == k ) ; LloydsIterationTask task = new LloydsIterationTask ( centers , means , mults , impute_cat , _isCats , k , hasWeightCol ( ) ) . doAll ( vecs2 ) ; // 1 PASS OVER THE DATA // Pick the max categorical level for cluster center max_cats ( task . _cMeans , task . _cats , _isCats ) ; // Handle the case where some centers go dry . Rescue only 1 cluster // per iteration ( ' cause we only tracked the 1 worst row ) if ( ! _parms . _estimate_k && cleanupBadClusters ( task , vecs , centers , means , mults , impute_cat ) ) continue ; // Compute model stats ; update standardized cluster centers centers = computeStatsFillModel ( task , model , vecs , means , mults , impute_cat , k ) ; if ( model . _parms . _score_each_iteration ) Log . info ( model . _output . _model_summary ) ; lo = task . _lo ; hi = task . _hi ; if ( work_unit_iter ) { model . update ( _job ) ; // Update model in K / V store _job . update ( 1 ) ; // 1 more Lloyds iteration } stop = ( task . _reassigned_count < Math . max ( 1 , train ( ) . numRows ( ) * TOLERANCE ) || model . _output . _iterations >= _parms . _max_iterations || stop_requested ( ) ) ; if ( stop ) { if ( model . _output . _iterations < _parms . _max_iterations ) Log . info ( "Lloyds converged after " + model . _output . _iterations + " iterations." ) ; else Log . info ( "Lloyds stopped after " + model . _output . _iterations + " iterations." ) ; } } while ( ! stop ) ; double sum_squares_now = model . _output . _tot_withinss ; double rel_improvement ; if ( sum_squares == 0 ) { rel_improvement = 1 ; } else { rel_improvement = ( sum_squares - sum_squares_now ) / sum_squares ; } Log . info ( "Relative improvement in total withinss: " + rel_improvement ) ; sum_squares = sum_squares_now ; if ( _parms . _estimate_k && k > 1 ) { boolean outerConverged = rel_improvement < rel_improvement_cutoff ; if ( outerConverged ) { KMeansModel . KMeansOutput best = DKV . getGet ( bestOutputKey ) ; model . _output = best ; Log . info ( "Converged. Retrieving the best model with k=" + model . _output . _k [ model . _output . _k . length - 1 ] ) ; break ; } } if ( ! work_unit_iter ) { DKV . put ( bestOutputKey , IcedUtils . deepCopy ( model . _output ) ) ; // store a clone to avoid sharing the state between DKV and here model . update ( _job ) ; // Update model in K / V store _job . update ( 1 ) ; // 1 more round for auto - clustering } if ( lo != null && hi != null && _parms . _estimate_k ) centers = splitLargestCluster ( centers , lo , hi , means , mults , impute_cat , vecs2 , k ) ; } // k - finder vecs2 [ vecs2 . length - 1 ] . remove ( ) ; // Create metrics by scoring on training set otherwise scores are based on last Lloyd iteration model . score ( _train ) . delete ( ) ; model . _output . _training_metrics = ModelMetrics . getFromDKV ( model , _train ) ; Log . info ( model . _output . _model_summary ) ; Log . info ( model . _output . _scoring_history ) ; Log . info ( ( ( ModelMetricsClustering ) model . _output . _training_metrics ) . createCentroidStatsTable ( ) . toString ( ) ) ; // At the end : validation scoring ( no need to gather scoring history ) if ( _valid != null ) { model . score ( _parms . valid ( ) ) . delete ( ) ; // this appends a ModelMetrics on the validation set model . _output . _validation_metrics = ModelMetrics . getFromDKV ( model , _parms . valid ( ) ) ; } model . _parms . _fold_column = fold_column ; model . update ( _job ) ; // Update model in K / V store } finally { if ( model != null ) model . unlock ( _job ) ; DKV . remove ( bestOutputKey ) ; }
public class ChronoIndex { /** * Creates the mapping and the reverse mapping . The generated information * will be added to the query buffer . This list will be cleared afterwards . */ private void addToBuffer ( ) { } }
if ( list != null && ! list . isEmpty ( ) ) { ChronoIndexData info ; // Real index in revision history mapped to RevisionCounter // Sorted by real index ( time ) in ascending order Collections . sort ( list ) ; StringBuilder reverseMapping = new StringBuilder ( ) ; int size = list . size ( ) ; for ( int i = 1 ; i <= size ; i ++ ) { info = list . get ( i - 1 ) ; if ( info . getRevisionCounter ( ) != i ) { if ( reverseMapping . length ( ) > 0 ) { reverseMapping . append ( " " ) ; } reverseMapping . append ( i ) ; reverseMapping . append ( " " ) ; reverseMapping . append ( info . getRevisionCounter ( ) ) ; } info . setIndex ( i ) ; info . setSortFlag ( false ) ; } // RevisionCounter mapped to real index in revision history // Sorted by revisionCounters in ascending order Collections . sort ( list ) ; StringBuilder mapping = new StringBuilder ( ) ; while ( ! list . isEmpty ( ) ) { info = list . remove ( 0 ) ; if ( info . getRevisionCounter ( ) != info . getIndex ( ) ) { if ( mapping . length ( ) > 0 ) { mapping . append ( " " ) ; } mapping . append ( info . getRevisionCounter ( ) ) ; mapping . append ( " " ) ; mapping . append ( info . getIndex ( ) ) ; } } if ( mapping . length ( ) > 0 ) { boolean sql = ! insertStatement . isEmpty ( ) ; String val = ( sql ? "(" : "" ) + articleID + ( sql ? ",'" : ",\"" ) + mapping . toString ( ) + ( sql ? "','" : "\",\"" ) + reverseMapping . toString ( ) + ( sql ? "')" : "\"" ) ; if ( buffer . length ( ) + val . length ( ) >= MAX_ALLOWED_PACKET ) { storeBuffer ( ) ; } if ( sql && buffer . length ( ) > insertStatement . length ( ) ) { buffer . append ( "," ) ; } buffer . append ( val ) ; if ( ! sql ) { buffer . append ( "\n" ) ; } } }
public class TasksInner { /** * Lists all the tasks for a specified container registry . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; TaskInner & gt ; object */ public Observable < Page < TaskInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < TaskInner > > , Page < TaskInner > > ( ) { @ Override public Page < TaskInner > call ( ServiceResponse < Page < TaskInner > > response ) { return response . body ( ) ; } } ) ;
public class PersistenceApi { /** * Execute Query * Execute query with defined criteria * @ param query Query defined with criteria ( required ) * @ return ApiResponse & lt ; QueryResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < QueryResponse > executeQueryPostWithHttpInfo ( Query query ) throws ApiException { } }
com . squareup . okhttp . Call call = executeQueryPostValidateBeforeCall ( query , null , null ) ; Type localVarReturnType = new TypeToken < QueryResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class RequestedAttributeTemplates { /** * Creates a { @ code RequestedAttribute } object for the given attribute name . * @ param name * the attribute name * @ param friendlyName * the attribute friendly name ( optional ) * @ param nameFormat * the name format ( defaults to { @ code urn : oasis : names : tc : SAML : 2.0 : attrname - format : uri } if the value is not * supplied ) * @ param isRequired * flag to tell whether the attribute is required * @ return a { @ code RequestedAttribute } object */ public static RequestedAttribute create ( String name , String friendlyName , String nameFormat , Boolean isRequired ) { } }
XMLObjectBuilder < ? extends XMLObject > builder = XMLObjectProviderRegistrySupport . getBuilderFactory ( ) . getBuilder ( RequestedAttribute . DEFAULT_ELEMENT_NAME ) ; Object object = builder . buildObject ( RequestedAttribute . DEFAULT_ELEMENT_NAME ) ; RequestedAttribute ra = RequestedAttribute . class . cast ( object ) ; ra . setName ( name ) ; ra . setFriendlyName ( friendlyName ) ; ra . setNameFormat ( nameFormat != null ? nameFormat : Attribute . URI_REFERENCE ) ; ra . setIsRequired ( isRequired ) ; return ra ;
public class ClientBwListConfigHandler { /** * Parses client B / W list filtering configuration in JSON format and applies the configuration . * @ param configJson Configuration object */ public void handleConfig ( JsonObject configJson ) { } }
try { JsonObject bwListConfigJson = getObject ( configJson , "clientBwList" ) ; ClientBwListDTO configDTO = new ClientBwListDTO ( ) ; configDTO . fromJson ( bwListConfigJson ) ; applyConfig ( configDTO ) ; } catch ( Exception e ) { LOGGER . warning ( "Could not apply client B/W list filtering." , e ) ; }
public class CatalystSerializableSerializer { /** * Dynamically created a reference factory for a pooled type . */ private ReferenceFactory < ? > createFactory ( final Constructor < ? > constructor ) { } }
return manager -> { try { return ( ReferenceCounted < ? > ) constructor . newInstance ( manager ) ; } catch ( InstantiationException | IllegalAccessException | InvocationTargetException e ) { throw new SerializationException ( "failed to instantiate reference" , e ) ; } } ;
public class StringGroovyMethods { /** * Iterates through the given CharSequence line by line , splitting each line using * the given separator Pattern . The list of tokens for each line is then passed to * the given closure . * @ param self a CharSequence * @ param pattern the regular expression Pattern for the delimiter * @ param closure a closure * @ return the last value returned by the closure * @ throws java . io . IOException if an error occurs * @ since 1.8.2 */ public static < T > T splitEachLine ( CharSequence self , Pattern pattern , @ ClosureParams ( value = FromString . class , options = "List<String>" ) Closure < T > closure ) throws IOException { } }
final List < String > list = readLines ( self ) ; T result = null ; for ( String line : list ) { List vals = Arrays . asList ( pattern . split ( line ) ) ; result = closure . call ( vals ) ; } return result ;
public class TemporalExtendedParameterDefinition { /** * Returns whether a parameter has the same id and value , and consistent * duration as specified by this extended parameter definition . * @ param parameter a < code > Parameter < / code > * @ return < code > true < / code > if < code > parameter < / code > has the same id and * value , and consistent duration as specified by this extended parameter * definition , or < code > false < / code > if not , or if < code > parameter < / code > * is < code > null < / code > . */ @ Override boolean getMatches ( Proposition proposition , Collection < String > propIds ) throws KnowledgeSourceReadException { } }
if ( ! super . getMatches ( proposition , propIds ) ) { return false ; } if ( ! ( proposition instanceof TemporalParameter ) ) { return false ; } TemporalParameter tp = ( TemporalParameter ) proposition ; if ( this . value != null ) { Value pValue = tp . getValue ( ) ; if ( this . value != pValue && ! this . value . equals ( pValue ) ) { return false ; } } return true ;
public class MailSender { /** * 通过JSON配置 * @ param jsonObject { @ link JSONObject } */ public static void config ( JSONObject jsonObject ) { } }
config ( jsonObject . getString ( "host" ) , jsonObject . getString ( "personal" ) , jsonObject . getString ( "from" ) , jsonObject . getString ( "key" ) , jsonObject . getInteger ( "port" ) ) ; setSslEnable ( jsonObject . getBoolean ( "ssl" ) ) ;
public class SafeCloseSmtpServer { /** * Handle an SMTP transaction , i . e . all activity between initial connect and QUIT command . * @ param out output stream * @ param input input stream * @ return List of SmtpMessage */ private List < SmtpMessage > handleTransaction ( PrintWriter out , BufferedReader input ) throws IOException { } }
// Initialize the state machine SmtpState smtpState = SmtpState . CONNECT ; SmtpRequest smtpRequest = new SmtpRequest ( SmtpActionType . CONNECT , "" , smtpState ) ; // Execute the connection request SmtpResponse smtpResponse = smtpRequest . execute ( ) ; // Send initial response sendResponse ( out , smtpResponse ) ; smtpState = smtpResponse . getNextState ( ) ; List < SmtpMessage > msgList = new ArrayList < > ( ) ; SmtpMessage msg = new SmtpMessage ( ) ; while ( smtpState != SmtpState . CONNECT ) { String line = input . readLine ( ) ; if ( line == null ) { break ; } // Create request from client input and current state SmtpRequest request = SmtpRequest . createRequest ( line , smtpState ) ; // Execute request and create response object SmtpResponse response = request . execute ( ) ; // Move to next internal state smtpState = response . getNextState ( ) ; // Send response to client sendResponse ( out , response ) ; // Store input in message String params = request . getParams ( ) ; msg . store ( response , params ) ; // If message reception is complete save it if ( smtpState == SmtpState . QUIT ) { msgList . add ( msg ) ; msg = new SmtpMessage ( ) ; } } return msgList ;
public class StringUtils { /** * < p > Check if a CharSequence ends with any of the provided case - sensitive suffixes . < / p > * < pre > * StringUtils . endsWithAny ( null , null ) = false * StringUtils . endsWithAny ( null , new String [ ] { " abc " } ) = false * StringUtils . endsWithAny ( " abcxyz " , null ) = false * StringUtils . endsWithAny ( " abcxyz " , new String [ ] { " " } ) = true * StringUtils . endsWithAny ( " abcxyz " , new String [ ] { " xyz " } ) = true * StringUtils . endsWithAny ( " abcxyz " , new String [ ] { null , " xyz " , " abc " } ) = true * StringUtils . endsWithAny ( " abcXYZ " , " def " , " XYZ " ) = true * StringUtils . endsWithAny ( " abcXYZ " , " def " , " xyz " ) = false * < / pre > * @ param sequence the CharSequence to check , may be null * @ param searchStrings the case - sensitive CharSequences to find , may be empty or contain { @ code null } * @ see StringUtils # endsWith ( CharSequence , CharSequence ) * @ return { @ code true } if the input { @ code sequence } is { @ code null } AND no { @ code searchStrings } are provided , or * the input { @ code sequence } ends in any of the provided case - sensitive { @ code searchStrings } . * @ since 3.0 */ public static boolean endsWithAny ( final CharSequence sequence , final CharSequence ... searchStrings ) { } }
if ( isEmpty ( sequence ) || ArrayUtils . isEmpty ( searchStrings ) ) { return false ; } for ( final CharSequence searchString : searchStrings ) { if ( endsWith ( sequence , searchString ) ) { return true ; } } return false ;
public class CommonOps_DSCC { /** * Sets every element in the matrix to the specified value . This can require a very large amount of * memory and might exceed the maximum array size < br > * < br > * A < sub > ij < / sub > = value * @ param A A matrix whose elements are about to be set . Modified . * @ param value The value each element will have . */ public static void fill ( DMatrixSparseCSC A , double value ) { } }
int N = A . numCols * A . numRows ; A . growMaxLength ( N , false ) ; A . col_idx [ 0 ] = 0 ; for ( int col = 0 ; col < A . numCols ; col ++ ) { int idx0 = A . col_idx [ col ] ; int idx1 = A . col_idx [ col + 1 ] = idx0 + A . numRows ; for ( int i = idx0 ; i < idx1 ; i ++ ) { A . nz_rows [ i ] = i - idx0 ; A . nz_values [ i ] = value ; } } A . nz_length = N ; A . indicesSorted = true ;
public class CpcSketch { /** * Returns the best estimate of the upper bound of the confidence interval given < i > kappa < / i > , * the number of standard deviations from the mean . * @ param kappa the given number of standard deviations from the mean : 1 , 2 or 3. * @ return the best estimate of the upper bound of the confidence interval given < i > kappa < / i > . */ public double getUpperBound ( final int kappa ) { } }
if ( mergeFlag ) { return CpcConfidence . getIconConfidenceUB ( lgK , numCoupons , kappa ) ; } return CpcConfidence . getHipConfidenceUB ( lgK , numCoupons , hipEstAccum , kappa ) ;
public class UrlBuilder { /** * Add a parameter . * @ param name * name of param * @ param value * value of param * @ return this to allow concatenation */ public UrlBuilder addParameter ( String name , String value ) { } }
if ( value == null ) { value = "" ; } params . put ( name , value ) ; return this ;
public class CellTypeProteinConcentration { /** * setter for concentration - sets * @ generated * @ param v value to set into the feature */ public void setConcentration ( Concentration v ) { } }
if ( CellTypeProteinConcentration_Type . featOkTst && ( ( CellTypeProteinConcentration_Type ) jcasType ) . casFeat_concentration == null ) jcasType . jcas . throwFeatMissing ( "concentration" , "ch.epfl.bbp.uima.types.CellTypeProteinConcentration" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( CellTypeProteinConcentration_Type ) jcasType ) . casFeatCode_concentration , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class ClaimsUtils { /** * Replace the jose4j Map < String , Object > with a JsonObject */ private void replaceMapWithJsonObject ( String claimName , JwtClaims claimsSet ) { } }
try { Map < String , Object > map = claimsSet . getClaimValue ( claimName , Map . class ) ; JsonObjectBuilder builder = Json . createObjectBuilder ( ) ; for ( Map . Entry < String , Object > entry : map . entrySet ( ) ) { builder . add ( entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; } JsonObject jsonObject = builder . build ( ) ; claimsSet . setClaim ( claimName , jsonObject ) ; } catch ( MalformedClaimException e ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "The value for the claim [" + claimName + "] could not be convered to a Map: " + e . getLocalizedMessage ( ) ) ; } }
public class NotificationService { /** * Delete a notification . * @ param notification The notification to delete . * @ return True if the operation was successful . */ public boolean deleteNotification ( Notification notification ) { } }
boolean result = notification . canDelete ( ) ; if ( result ) { broker . callRPC ( "RGCWXQ ALRPP" , notification . getAlertId ( ) ) ; } return result ;
public class SchemaManager { /** * drop all schemas with the given authorisation */ void dropSchemas ( Grantee grantee , boolean cascade ) { } }
HsqlArrayList list = getSchemas ( grantee ) ; Iterator it = list . iterator ( ) ; while ( it . hasNext ( ) ) { Schema schema = ( Schema ) it . next ( ) ; dropSchema ( schema . name . name , cascade ) ; }
public class FFprobe { /** * TODO Add Probe Inputstream */ public FFmpegProbeResult probe ( String mediaPath , @ Nullable String userAgent ) throws IOException { } }
checkIfFFprobe ( ) ; ImmutableList . Builder < String > args = new ImmutableList . Builder < String > ( ) ; // TODO Add : // . add ( " - - show _ packets " ) // . add ( " - - show _ frames " ) args . add ( path ) . add ( "-v" , "quiet" ) ; if ( userAgent != null ) { args . add ( "-user-agent" , userAgent ) ; } args . add ( "-print_format" , "json" ) . add ( "-show_error" ) . add ( "-show_format" ) . add ( "-show_streams" ) . add ( mediaPath ) ; Process p = runFunc . run ( args . build ( ) ) ; try { Reader reader = wrapInReader ( p ) ; if ( LOG . isDebugEnabled ( ) ) { reader = new LoggingFilterReader ( reader , LOG ) ; } FFmpegProbeResult result = gson . fromJson ( reader , FFmpegProbeResult . class ) ; throwOnError ( p ) ; if ( result == null ) { throw new IllegalStateException ( "Gson returned null, which shouldn't happen :(" ) ; } return result ; } finally { p . destroy ( ) ; }
public class ChatApplet { /** * The main ( ) method acts as the applet ' s entry point when it is run * as a standalone application . It is ignored if the applet is run from * within an HTML page . */ public static void main ( String args [ ] ) { } }
BaseApplet . main ( args ) ; ChatApplet applet = ( ChatApplet ) ChatApplet . getSharedInstance ( ) ; if ( applet == null ) applet = new ChatApplet ( args ) ; new JBaseFrame ( "Test" , applet ) ;
public class BaseFileManager { /** * Wait for a period of inactivity before calling close ( ) . * The length of the period of inactivity is given by { @ code deferredCloseTimeout } */ protected void deferredClose ( ) { } }
Thread t = new Thread ( getClass ( ) . getName ( ) + " DeferredClose" ) { @ Override public void run ( ) { try { synchronized ( BaseFileManager . this ) { long now = System . currentTimeMillis ( ) ; while ( now < lastUsedTime + deferredCloseTimeout ) { BaseFileManager . this . wait ( lastUsedTime + deferredCloseTimeout - now ) ; now = System . currentTimeMillis ( ) ; } deferredCloseTimeout = 0 ; close ( ) ; } } catch ( InterruptedException e ) { } catch ( IOException e ) { } } } ; t . setDaemon ( true ) ; t . start ( ) ;
public class Expression { /** * Get the string value * @ param formatter * the CCS target * @ return the value */ String stringValue ( CssFormatter formatter ) { } }
String str ; try { formatter . addOutput ( ) ; appendTo ( formatter ) ; } catch ( Exception ex ) { throw createException ( ex ) ; } finally { str = formatter . releaseOutput ( ) ; } return str ;
public class LazyList { public static ListIterator listIterator ( Object list ) { } }
if ( list == null ) return Collections . EMPTY_LIST . listIterator ( ) ; if ( list instanceof List ) return ( ( List ) list ) . listIterator ( ) ; return getList ( list ) . listIterator ( ) ;
public class AVIMClient { /** * get AVIMClient instance by AVUser * @ param user * @ return */ public static AVIMClient getInstance ( AVUser user ) { } }
if ( null == user ) { return null ; } String clientId = user . getObjectId ( ) ; String sessionToken = user . getSessionToken ( ) ; if ( StringUtil . isEmpty ( clientId ) || StringUtil . isEmpty ( sessionToken ) ) { return null ; } AVIMClient client = getInstance ( clientId ) ; client . userSessionToken = sessionToken ; return client ;
public class AutoMlClient { /** * Gets a table spec . * < p > Sample code : * < pre > < code > * try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) { * TableSpecName name = TableSpecName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ DATASET ] " , " [ TABLE _ SPEC ] " ) ; * TableSpec response = autoMlClient . getTableSpec ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The resource name of the table spec to retrieve . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final TableSpec getTableSpec ( String name ) { } }
GetTableSpecRequest request = GetTableSpecRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getTableSpec ( request ) ;
public class PushedNotification { /** * Filters a list of pushed notifications and returns only the ones that were successful . * @ param notifications a list of pushed notifications * @ return a filtered list containing only notifications that were succcessful */ public static List < PushedNotification > findSuccessfulNotifications ( List < PushedNotification > notifications ) { } }
List < PushedNotification > filteredList = new Vector < PushedNotification > ( ) ; for ( PushedNotification notification : notifications ) { if ( notification . isSuccessful ( ) ) filteredList . add ( notification ) ; } return filteredList ;
public class AsyncMutateInBuilder { /** * Insert a fragment , replacing the old value if the path exists . * @ param path the path where to insert ( or replace ) a dictionary value . * @ param fragment the new dictionary value to be applied . * @ param createPath true to create missing intermediary nodes . * @ deprecated Use { @ link # upsert ( String , Object , SubdocOptionsBuilder ) } instead . */ @ Deprecated public < T > AsyncMutateInBuilder upsert ( String path , T fragment , boolean createPath ) { } }
return upsert ( path , fragment , new SubdocOptionsBuilder ( ) . createPath ( createPath ) ) ;
public class IoUtil { /** * 拷贝流 , 使用NIO , 不会关闭流 * @ param in { @ link ReadableByteChannel } * @ param out { @ link WritableByteChannel } * @ param bufferSize 缓冲大小 , 如果小于等于0 , 使用默认 * @ return 拷贝的字节数 * @ throws IORuntimeException IO异常 * @ since 4.5.0 */ public static long copy ( ReadableByteChannel in , WritableByteChannel out , int bufferSize ) throws IORuntimeException { } }
return copy ( in , out , bufferSize , null ) ;
public class NamingTypeAttribute { /** * { @ link NamingType } に変換します 。 * @ return 対応する { @ link NamingType } */ public NamingType convertToEnum ( ) { } }
if ( NamingType . LOWER_CASE . name ( ) . equalsIgnoreCase ( value ) ) { return NamingType . LOWER_CASE ; } if ( NamingType . UPPER_CASE . name ( ) . equalsIgnoreCase ( value ) ) { return NamingType . UPPER_CASE ; } if ( NamingType . SNAKE_LOWER_CASE . name ( ) . equalsIgnoreCase ( value ) ) { return NamingType . SNAKE_LOWER_CASE ; } if ( NamingType . SNAKE_UPPER_CASE . name ( ) . equalsIgnoreCase ( value ) ) { return NamingType . SNAKE_UPPER_CASE ; } if ( NamingType . NONE . name ( ) . equalsIgnoreCase ( value ) ) { return NamingType . NONE ; } throw new AssertionError ( "unreachable." ) ;
public class ImageLoader { /** * Convert an image in to a raveled tensor of * the bgr values of the image * @ param image the image to parse * @ return the raveled tensor of bgr values */ public INDArray toRaveledTensor ( BufferedImage image ) { } }
try { image = scalingIfNeed ( image , false ) ; return toINDArrayBGR ( image ) . ravel ( ) ; } catch ( Exception e ) { throw new RuntimeException ( "Unable to load image" , e ) ; }
public class TiffITProfile { /** * Validate Binary Lineart . * @ param ifd the ifd * @ param p the profile ( default = 0 , P1 = 1) */ private void validateIfdBL ( IFD ifd , int p ) { } }
IfdTags metadata = ifd . getMetadata ( ) ; if ( p == 1 ) { checkRequiredTag ( metadata , "NewSubfileType" , 1 , new long [ ] { 0 } ) ; } checkRequiredTag ( metadata , "ImageLength" , 1 ) ; checkRequiredTag ( metadata , "ImageWidth" , 1 ) ; checkRequiredTag ( metadata , "BitsPerSample" , 1 , new long [ ] { 1 } ) ; checkRequiredTag ( metadata , "Compression" , 1 , new long [ ] { 32898 } ) ; if ( p == 0 ) { checkRequiredTag ( metadata , "PhotometricInterpretation" , 1 , new long [ ] { 0 , 1 } ) ; } else { checkRequiredTag ( metadata , "PhotometricInterpretation" , 1 , new long [ ] { 0 } ) ; } checkRequiredTag ( metadata , "StripOffsets" , 1 ) ; if ( p == 0 ) { checkRequiredTag ( metadata , "Orientation" , 1 , new long [ ] { 1 , 4 , 5 , 8 } ) ; } else { checkRequiredTag ( metadata , "Orientation" , 1 , new long [ ] { 1 } ) ; } checkRequiredTag ( metadata , "SamplesPerPixel" , 1 , new long [ ] { 1 } ) ; checkRequiredTag ( metadata , "StripBYTECount" , 1 ) ; checkRequiredTag ( metadata , "XResolution" , 1 ) ; checkRequiredTag ( metadata , "YResolution" , 1 ) ; if ( p == 1 ) { checkRequiredTag ( metadata , "ResolutionUnit" , 1 , new long [ ] { 2 , 3 } ) ; checkRequiredTag ( metadata , "DotRange" , 2 , new long [ ] { 0 , 255 } ) ; } checkRequiredTag ( metadata , "ImageColorIndicator" , 1 , new long [ ] { 0 , 1 , 2 } ) ; checkRequiredTag ( metadata , "BackgroundColorIndicator" , 1 , new long [ ] { 0 , 1 , 2 } ) ;
public class FogOfWar { /** * MapTileRenderer */ @ Override public void renderTile ( Graphic g , MapTile map , Tile tile , int x , int y ) { } }
final int tx = tile . getInTileX ( ) ; final int ty = tile . getInTileY ( ) ; final Tile fogTile = mapFogged . getTile ( tx , ty ) ; if ( fogMap && fogTile != null && fogTile . getNumber ( ) != MapTileFog . NO_FOG ) { fogTiles . setLocation ( x , y ) ; fogTiles . setTile ( fogTile . getNumber ( ) ) ; fogTiles . render ( g ) ; } final Tile hideTile = mapHidden . getTile ( tx , ty ) ; if ( hideMap && hideTile != null && hideTile . getNumber ( ) != MapTileFog . NO_FOG ) { hideTiles . setTile ( hideTile . getNumber ( ) ) ; hideTiles . setLocation ( x , y ) ; hideTiles . render ( g ) ; }
public class RuntimeMvcViewFactoryCreator { /** * / * ( non - Javadoc ) * @ see org . springframework . webflow . mvc . builder . MvcViewFactoryCreator # createMvcViewFactory ( org . springframework . binding . expression . Expression , org . springframework . binding . expression . ExpressionParser , org . springframework . binding . convert . ConversionService , org . springframework . webflow . engine . builder . BinderConfiguration ) */ @ Override protected AbstractMvcViewFactory createMvcViewFactory ( Expression viewId , ExpressionParser expressionParser , ConversionService conversionService , BinderConfiguration binderConfiguration ) { } }
return new RuntimeMvcViewFactory ( viewId , flowViewResolver , expressionParser , conversionService , binderConfiguration , messageCodesResolver ) ;
public class WaveformDetailComponent { /** * Clear the playback state stored for a player , such as when it has unloaded the track . * @ param player the player number whose playback state is no longer valid * @ since 0.5.0 */ public synchronized void clearPlaybackState ( int player ) { } }
PlaybackState oldFurthestState = getFurthestPlaybackState ( ) ; PlaybackState oldState = playbackStateMap . remove ( player ) ; repaintDueToPlaybackStateChange ( oldState , null , oldFurthestState ) ;
public class Tanimoto { /** * Calculates Tanimoto distance for two count fingerprints using method 2 { @ cdk . cite Grant06 } . * @ param fp1 count fingerprint 1 * @ param fp2 count fingerprint 2 * @ return a Tanimoto distance */ public static double method2 ( ICountFingerprint fp1 , ICountFingerprint fp2 ) { } }
long maxSum = 0 , minSum = 0 ; int i = 0 , j = 0 ; while ( i < fp1 . numOfPopulatedbins ( ) || j < fp2 . numOfPopulatedbins ( ) ) { Integer hash1 = i < fp1 . numOfPopulatedbins ( ) ? fp1 . getHash ( i ) : null ; Integer hash2 = j < fp2 . numOfPopulatedbins ( ) ? fp2 . getHash ( j ) : null ; Integer count1 = i < fp1 . numOfPopulatedbins ( ) ? fp1 . getCount ( i ) : null ; Integer count2 = j < fp2 . numOfPopulatedbins ( ) ? fp2 . getCount ( j ) : null ; if ( count2 == null || ( hash1 != null && hash1 < hash2 ) ) { maxSum += count1 ; i ++ ; continue ; } if ( count1 == null || ( hash2 != null && hash1 > hash2 ) ) { maxSum += count2 ; j ++ ; continue ; } if ( hash1 . equals ( hash2 ) ) { maxSum += Math . max ( count1 , count2 ) ; minSum += Math . min ( count1 , count2 ) ; i ++ ; j ++ ; } } return ( ( double ) minSum ) / maxSum ;
public class IteratorExtensions { /** * Returns { @ code true } if every element in { @ code iterator } satisfies the predicate . If { @ code iterator } is empty , * { @ code true } is returned . In other words , < code > false < / code > is returned if at least one element fails to fulfill * the predicate . * @ param iterator * the iterator . May not be < code > null < / code > . * @ param predicate * the predicate . May not be < code > null < / code > . * @ return < code > true < / code > if every element in { @ code iterator } satisfies the predicate and also if there is no element . */ public static < T > boolean forall ( Iterator < T > iterator , Function1 < ? super T , Boolean > predicate ) { } }
if ( predicate == null ) throw new NullPointerException ( "predicate" ) ; while ( iterator . hasNext ( ) ) { if ( ! predicate . apply ( iterator . next ( ) ) ) return false ; } return true ;
public class HystrixCommandExecutionHook { /** * Invoked when { @ link HystrixInvokable } fails with an Exception . * @ param commandInstance The executing HystrixInvokable instance . * @ param failureType { @ link FailureType } enum representing which type of error * @ param e exception object * @ since 1.2 */ public < T > Exception onError ( HystrixInvokable < T > commandInstance , FailureType failureType , Exception e ) { } }
return e ; // by default , just pass through
public class PersistHTTP { /** * Tests whether a given URI can be accessed using range - requests . * @ param uri resource identifier * @ return - 1 if range - requests are not supported , otherwise content length of the requested resource * @ throws IOException when communication fails */ long checkRangeSupport ( URI uri ) throws IOException { } }
HttpRequestBase req = createReq ( uri , true ) ; try ( CloseableHttpClient client = HttpClientBuilder . create ( ) . build ( ) ; CloseableHttpResponse response = client . execute ( req ) ) { Header acceptRangesHeader = response . getFirstHeader ( HttpHeaders . ACCEPT_RANGES ) ; Header contentLengthHeader = response . getFirstHeader ( HttpHeaders . CONTENT_LENGTH ) ; boolean acceptByteRange = ( acceptRangesHeader != null ) && "bytes" . equalsIgnoreCase ( acceptRangesHeader . getValue ( ) ) ; if ( ! acceptByteRange || contentLengthHeader == null ) { return - 1L ; } return Long . valueOf ( contentLengthHeader . getValue ( ) ) ; }
public class EigenvalueDecomposition { /** * Back transformation to get eigenvectors of original matrix . */ private void hqr2BackTransformation ( int nn , int low , int high ) { } }
for ( int j = nn - 1 ; j >= low ; j -- ) { final int last = j < high ? j : high ; for ( int i = low ; i <= high ; i ++ ) { final double [ ] Vi = V [ i ] ; double sum = 0. ; for ( int k = low ; k <= last ; k ++ ) { sum += Vi [ k ] * H [ k ] [ j ] ; } Vi [ j ] = sum ; } }
public class FSDataset { /** * Finalize the block in FSDataset . * @ param dstNamespaceId * the namespace id for dstBlock * @ param dstBlock * the block that needs to be finalized * @ param dstBlockFile * the block file for the block that has to be finalized * @ throws IOException */ private void copyBlockLocalFinalize ( int dstNamespaceId , Block dstBlock , File dstBlockFile ) throws IOException { } }
boolean inlineChecksum = Block . isInlineChecksumBlockFilename ( dstBlockFile . getName ( ) ) ; long blkSize = 0 ; long fileSize = dstBlockFile . length ( ) ; lock . writeLock ( ) . lock ( ) ; try { DatanodeBlockInfo info = volumeMap . get ( dstNamespaceId , dstBlock ) ; if ( info == null ) { throw new IOException ( "Could not find information for " + dstBlock ) ; } if ( inlineChecksum ) { blkSize = BlockInlineChecksumReader . getBlockSizeFromFileLength ( fileSize , info . getChecksumType ( ) , info . getBytesPerChecksum ( ) ) ; } else { blkSize = fileSize ; } FSVolume dstVol = info . getBlockDataFile ( ) . getVolume ( ) ; // Finalize block on disk . File dest = dstVol . addBlock ( dstNamespaceId , dstBlock , dstBlockFile , info . isInlineChecksum ( ) , info . getChecksumType ( ) , info . getBytesPerChecksum ( ) ) ; volumeMap . add ( dstNamespaceId , dstBlock , new DatanodeBlockInfo ( dstVol , dest , blkSize , true , inlineChecksum , info . getChecksumType ( ) , info . getBytesPerChecksum ( ) , false , 0 ) ) ; volumeMap . removeOngoingCreates ( dstNamespaceId , dstBlock ) ; } finally { lock . writeLock ( ) . unlock ( ) ; }
public class GqSessionFactoryImpl { /** * / * ( non - Javadoc ) * @ see org . jdiameter . api . auth . ClientAuthSessionListener # * doAuthAnswerEvent ( org . jdiameter . api . auth . ClientAuthSession , org . jdiameter . api . app . AppRequestEvent , org . jdiameter . api . app . AppAnswerEvent ) */ @ Override public void doAuthAnswerEvent ( ClientAuthSession appSession , AppRequestEvent request , AppAnswerEvent answer ) throws InternalException , IllegalDiameterStateException , RouteException , OverloadException { } }
logger . info ( "Diameter Gq AuthorizationSessionFactory :: doAuthAnswerEvent :: appSession[{}], Request[{}], Answer[{}]" , new Object [ ] { appSession , request , answer } ) ;
public class FileSystemWatcher { /** * Add source folders to monitor . Cannot be called after the watcher has been * { @ link # start ( ) started } . * @ param folders the folders to monitor */ public void addSourceFolders ( Iterable < File > folders ) { } }
Assert . notNull ( folders , "Folders must not be null" ) ; synchronized ( this . monitor ) { for ( File folder : folders ) { addSourceFolder ( folder ) ; } }
public class OracleNoSQLClient { /** * Read embeddable . * @ param key * the key * @ param columnsToSelect * the columns to select * @ param entityMetadata * the entity metadata * @ param metamodel * the metamodel * @ param schemaTable * the schema table * @ param value * the value * @ param attribute * the attribute */ private void readEmbeddable ( Object key , List < String > columnsToSelect , EntityMetadata entityMetadata , MetamodelImpl metamodel , Table schemaTable , RecordValue value , Attribute attribute ) { } }
EmbeddableType embeddableId = metamodel . embeddable ( ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ; Set < Attribute > embeddedAttributes = embeddableId . getAttributes ( ) ; for ( Attribute embeddedAttrib : embeddedAttributes ) { String columnName = ( ( AbstractAttribute ) embeddedAttrib ) . getJPAColumnName ( ) ; Object embeddedColumn = PropertyAccessorHelper . getObject ( key , ( Field ) embeddedAttrib . getJavaMember ( ) ) ; // either null or empty or contains that column if ( eligibleToFetch ( columnsToSelect , columnName ) ) { NoSqlDBUtils . add ( schemaTable . getField ( columnName ) , value , embeddedColumn , columnName ) ; } }
public class CommercePriceListUserSegmentEntryRelLocalServiceUtil { /** * Returns all the commerce price list user segment entry rels matching the UUID and company . * @ param uuid the UUID of the commerce price list user segment entry rels * @ param companyId the primary key of the company * @ return the matching commerce price list user segment entry rels , or an empty list if no matches were found */ public static java . util . List < com . liferay . commerce . price . list . model . CommercePriceListUserSegmentEntryRel > getCommercePriceListUserSegmentEntryRelsByUuidAndCompanyId ( String uuid , long companyId ) { } }
return getService ( ) . getCommercePriceListUserSegmentEntryRelsByUuidAndCompanyId ( uuid , companyId ) ;
public class VoiceApi { /** * Send EventUserEvent to T - Server with the provided attached data . For details about EventUserEvent , refer to the * [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / SpecialEvents ) . * @ param userData The data to send . This is an array of objects with the properties key , type , and value . * @ param callUuid The universally unique identifier associated with the call . ( optional ) * @ param connId The connection ID for the call . This value comes from the Tlib event . ( optional ) */ public void sendUserEvent ( KeyValueCollection userData , String callUuid , String connId ) throws WorkspaceApiException { } }
try { SendUserEventDataData sendUserEventData = new SendUserEventDataData ( ) ; sendUserEventData . setUserData ( Util . toKVList ( userData ) ) ; sendUserEventData . setCallUuid ( callUuid ) ; sendUserEventData . setConnId ( connId ) ; SendUserEventData data = new SendUserEventData ( ) ; data . data ( sendUserEventData ) ; ApiSuccessResponse response = this . voiceApi . sendUserEvent ( data ) ; throwIfNotOk ( "sendUserEvent" , response ) ; } catch ( ApiException e ) { throw new WorkspaceApiException ( "sendUserEvent failed." , e ) ; }
public class Reflecter { /** * Loops the object ' s property and value * @ param decision * @ return */ public < K , V > Reflecter < T > keyValLoop ( final Decision < Triple < K , Field , V > > decision ) { } }
fieldLoop ( new Decisional < Field > ( ) { @ SuppressWarnings ( "unchecked" ) @ Override protected void decision ( Field input ) { decision . apply ( ( Triple < K , Field , V > ) Triple . of ( input . getName ( ) , input , getPropVal ( input , input . getName ( ) ) ) ) ; } } ) ; return this ;
public class ClassFile { /** * Adds a public , no - arg constructor with the code buffer properly defined . */ public MethodInfo addDefaultConstructor ( ) { } }
MethodInfo mi = addConstructor ( Modifiers . PUBLIC , null ) ; CodeBuilder builder = new CodeBuilder ( mi ) ; builder . loadThis ( ) ; builder . invokeSuperConstructor ( null ) ; builder . returnVoid ( ) ; return mi ;
public class InstanceFactory { /** * 获取指定类型的对象实例 。 如果IoC容器没配置好或者IoC容器中找不到该类型的实例则抛出异常 。 * @ param < T > 对象的类型 * @ param beanClass 对象的类 * @ return 类型为T的对象实例 */ public static < T > T getInstance ( Class < T > beanClass ) { } }
return ( T ) getInstanceProvider ( ) . getInstance ( beanClass ) ;
public class SynchronizationContext { /** * Run all tasks in the queue in the current thread , if no other thread is running this method . * Otherwise do nothing . * < p > Upon returning , it guarantees that all tasks submitted by { @ code # executeLater } before it * have been or will eventually be run , while not requiring any more calls to { @ code drain ( ) } . */ public final void drain ( ) { } }
do { if ( ! drainingThread . compareAndSet ( null , Thread . currentThread ( ) ) ) { return ; } try { Runnable runnable ; while ( ( runnable = queue . poll ( ) ) != null ) { try { runnable . run ( ) ; } catch ( Throwable t ) { uncaughtExceptionHandler . uncaughtException ( Thread . currentThread ( ) , t ) ; } } } finally { drainingThread . set ( null ) ; } // must check queue again here to catch any added prior to clearing drainingThread } while ( ! queue . isEmpty ( ) ) ;
public class JavacState { /** * Save the javac _ state file . */ public void save ( ) throws IOException { } }
if ( ! needsSaving ) return ; try ( FileWriter out = new FileWriter ( javacStateFilename ) ) { StringBuilder b = new StringBuilder ( ) ; long millisNow = System . currentTimeMillis ( ) ; Date d = new Date ( millisNow ) ; SimpleDateFormat df = new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss SSS" ) ; b . append ( "# javac_state ver 0.3 generated " + millisNow + " " + df . format ( d ) + "\n" ) ; b . append ( "# This format might change at any time. Please do not depend on it.\n" ) ; b . append ( "# M module\n" ) ; b . append ( "# P package\n" ) ; b . append ( "# S C source_tobe_compiled timestamp\n" ) ; b . append ( "# S L link_only_source timestamp\n" ) ; b . append ( "# G C generated_source timestamp\n" ) ; b . append ( "# A artifact timestamp\n" ) ; b . append ( "# D dependency\n" ) ; b . append ( "# I pubapi\n" ) ; b . append ( "# R arguments\n" ) ; b . append ( "R " ) . append ( theArgs ) . append ( "\n" ) ; // Copy over the javac _ state for the packages that did not need recompilation . now . copyPackagesExcept ( prev , recompiledPackages , new HashSet < String > ( ) ) ; // Save the packages , ie package names , dependencies , pubapis and artifacts ! // I . e . the lot . Module . saveModules ( now . modules ( ) , b ) ; String s = b . toString ( ) ; out . write ( s , 0 , s . length ( ) ) ; }
public class CreateWSDL20 { /** * AddInterfaceOperationType Method . */ public void addInterfaceOperationType ( String strVersion , InterfaceType interfaceType , MessageProcessInfo recMessageProcessInfo ) { } }
InterfaceOperationType interfaceOperationType = wsdlFactory . createInterfaceOperationType ( ) ; interfaceType . getOperationOrFaultOrFeature ( ) . add ( wsdlFactory . createInterfaceTypeOperation ( interfaceOperationType ) ) ; String name = this . fixName ( recMessageProcessInfo . getField ( MessageProcessInfo . DESCRIPTION ) . toString ( ) ) ; interfaceOperationType . setName ( name ) ; String pattern = this . getURIProperty ( MESSAGE_PATTERN_URI ) ; interfaceOperationType . setPattern ( pattern ) ; boolean safe = SAFE_DEFAULT ; String safeValue = ( ( PropertiesField ) recMessageProcessInfo . getField ( MessageProcessInfo . PROPERTIES ) ) . getProperty ( MessageProcessInfo . SAFE ) ; if ( safeValue != null ) safe = Boolean . parseBoolean ( safeValue ) ; interfaceOperationType . setSafe ( safe ) ; String style = ( ( PropertiesField ) recMessageProcessInfo . getField ( MessageProcessInfo . PROPERTIES ) ) . getProperty ( MESSAGE_STYLE_URI ) ; if ( style == null ) style = this . getURIProperty ( MESSAGE_STYLE_URI ) ; interfaceOperationType . setStyle ( style ) ; MessageInfo recMessageInfo = this . getMessageIn ( recMessageProcessInfo ) ; if ( recMessageInfo != null ) { MessageRefType messageRefType = wsdlFactory . createMessageRefType ( ) ; JAXBElement < MessageRefType > interfaceOperationTypeInput = wsdlFactory . createInterfaceOperationTypeInput ( messageRefType ) ; interfaceOperationType . getInputOrOutputOrInfault ( ) . add ( interfaceOperationTypeInput ) ; name = this . fixName ( recMessageInfo . getField ( MessageInfo . DESCRIPTION ) . toString ( ) ) ; String code = recMessageInfo . getField ( MessageInfo . CODE ) . toString ( ) ; if ( code == null ) code = name ; String element = ( ( PropertiesField ) recMessageInfo . getField ( MessageInfo . MESSAGE_PROPERTIES ) ) . getProperty ( MessageInfo . ELEMENT ) ; if ( element == null ) if ( code != null ) element = code ; String messageLabel = "In" ; messageRefType . setElement ( element ) ; messageRefType . setMessageLabel ( messageLabel ) ; messageRefType = wsdlFactory . createMessageRefType ( ) ; } recMessageInfo = this . getMessageOut ( recMessageProcessInfo ) ; if ( recMessageInfo != null ) { MessageRefType messageRefType = wsdlFactory . createMessageRefType ( ) ; JAXBElement < MessageRefType > interfaceOperationTypeOutput = wsdlFactory . createInterfaceOperationTypeOutput ( messageRefType ) ; interfaceOperationType . getInputOrOutputOrInfault ( ) . add ( interfaceOperationTypeOutput ) ; name = this . fixName ( recMessageInfo . getField ( MessageInfo . DESCRIPTION ) . toString ( ) ) ; String code = recMessageInfo . getField ( MessageInfo . CODE ) . toString ( ) ; if ( code == null ) code = name ; String element = ( ( PropertiesField ) recMessageInfo . getField ( MessageInfo . MESSAGE_PROPERTIES ) ) . getProperty ( MessageInfo . ELEMENT ) ; if ( element == null ) if ( code != null ) element = code ; String messageLabel = "Out" ; messageRefType . setElement ( element ) ; messageRefType . setMessageLabel ( messageLabel ) ; }
public class LinkedServersInner { /** * Adds a linked server to the Redis cache ( requires Premium SKU ) . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param linkedServerName The name of the linked server that is being added to the Redis cache . * @ param parameters Parameters supplied to the Create Linked server operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < RedisLinkedServerWithPropertiesInner > createAsync ( String resourceGroupName , String name , String linkedServerName , RedisLinkedServerCreateParameters parameters ) { } }
return createWithServiceResponseAsync ( resourceGroupName , name , linkedServerName , parameters ) . map ( new Func1 < ServiceResponse < RedisLinkedServerWithPropertiesInner > , RedisLinkedServerWithPropertiesInner > ( ) { @ Override public RedisLinkedServerWithPropertiesInner call ( ServiceResponse < RedisLinkedServerWithPropertiesInner > response ) { return response . body ( ) ; } } ) ;
public class SaneEnums { /** * Returns a set of { @ code T } obtained by treating { @ code wireValue } as a bit vector whose bits * represent the wire values of the enum constants of the given { @ code enumType } . */ public static < T extends Enum < T > & SaneEnum > Set < T > enumSet ( Class < T > enumType , int wireValue ) { } }
T [ ] enumConstants = enumType . getEnumConstants ( ) ; List < T > values = Lists . newArrayListWithCapacity ( enumConstants . length ) ; for ( T value : enumConstants ) { if ( ( wireValue & value . getWireValue ( ) ) != 0 ) { values . add ( value ) ; } } return Sets . immutableEnumSet ( values ) ;