signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class systemmemory_stats { /** * < pre > * converts nitro response into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_response ( nitro_service service , String response ) throws Exception { } }
systemmemory_stats [ ] resources = new systemmemory_stats [ 1 ] ; systemmemory_response result = ( systemmemory_response ) service . get_payload_formatter ( ) . string_to_resource ( systemmemory_response . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == 444 ) { service . clear_session ( ) ; } if ( result . severity != null ) { if ( result . severity . equals ( "ERROR" ) ) throw new nitro_exception ( result . message , result . errorcode ) ; } else { throw new nitro_exception ( result . message , result . errorcode ) ; } } resources [ 0 ] = result . systemmemory ; return resources ;
public class ShanksAgentBayesianReasoningCapability { /** * Add information to the Bayesian network to reason with it . * @ param agent * @ param nodeName * @ param status * @ throws ShanksException */ public static void addEvidence ( BayesianReasonerShanksAgent agent , String nodeName , String status ) throws ShanksException { } }
ShanksAgentBayesianReasoningCapability . addEvidence ( agent . getBayesianNetwork ( ) , nodeName , status ) ;
public class VariableEvaluator { /** * Attempt to evaluate a variable expression . * @ param expr the expression string ( for example , " x + 0 " ) * @ param context the context for evaluation * @ return the result , or null if evaluation fails */ @ FFDCIgnore ( { } }
NumberFormatException . class , ArithmeticException . class , ConfigEvaluatorException . class } ) private String tryEvaluateExpression ( String expr , final EvaluationContext context , final boolean ignoreWarnings ) { try { return new ConfigExpressionEvaluator ( ) { @ Override String getProperty ( String argName ) throws ConfigEvaluatorException { return VariableEvaluator . this . getProperty ( argName , context , ignoreWarnings , false ) ; } @ Override Object getPropertyObject ( String argName ) throws ConfigEvaluatorException { return VariableEvaluator . this . getPropertyObject ( argName , context , ignoreWarnings , false ) ; } } . evaluateExpression ( expr ) ; } catch ( NumberFormatException e ) { // $ { 0 + string } , or a number that exceeds MAX _ LONG . } catch ( ArithmeticException e ) { // $ { x / 0} } catch ( ConfigEvaluatorException e ) { // If getPropertyObject fails } return null ;
public class GenericIHEAuditEventMessage { /** * Adds a Participant Object representing a document for XDS Exports * @ param documentUniqueId The Document Entry Unique Id * @ param repositoryUniqueId The Repository Unique Id of the Repository housing the document * @ param homeCommunityId The Home Community Id */ public void addDocumentParticipantObject ( String documentUniqueId , String repositoryUniqueId , String homeCommunityId ) { } }
List < TypeValuePairType > tvp = new LinkedList < > ( ) ; // SEK - 10/19/2011 - added check for empty or null , RE : Issue Tracker artifact artf2295 ( was Issue 135) if ( ! EventUtils . isEmptyOrNull ( repositoryUniqueId ) ) { tvp . add ( getTypeValuePair ( "Repository Unique Id" , repositoryUniqueId . getBytes ( ) ) ) ; } if ( ! EventUtils . isEmptyOrNull ( homeCommunityId ) ) { tvp . add ( getTypeValuePair ( "ihe:homeCommunityID" , homeCommunityId . getBytes ( ) ) ) ; } addParticipantObjectIdentification ( new RFC3881ParticipantObjectCodes . RFC3881ParticipantObjectIDTypeCodes . ReportNumber ( ) , null , null , tvp , documentUniqueId , RFC3881ParticipantObjectTypeCodes . SYSTEM , RFC3881ParticipantObjectTypeRoleCodes . REPORT , null , null ) ;
public class WARCInputFormat { /** * Opens a WARC file ( possibly compressed ) for reading , and returns a RecordReader for accessing it . */ @ Override public RecordReader < LongWritable , WARCWritable > createRecordReader ( InputSplit split , TaskAttemptContext context ) throws IOException , InterruptedException { } }
return new WARCReader ( ) ;
public class DistributionList { /** * A complex type that contains one < code > DistributionSummary < / code > element for each distribution that was created * by the current AWS account . * @ param items * A complex type that contains one < code > DistributionSummary < / code > element for each distribution that was * created by the current AWS account . */ public void setItems ( java . util . Collection < DistributionSummary > items ) { } }
if ( items == null ) { this . items = null ; return ; } this . items = new com . amazonaws . internal . SdkInternalList < DistributionSummary > ( items ) ;
public class XMLContentHandler { /** * Depending on the element , which has to be in the correct namespace , the method adds a property to the node or * removes completed nodes from the node stack . */ @ Override public void endElement ( final String uri , final String localName , final String qName ) throws SAXException { } }
LOG . trace ( "endElement uri={} localName={} qName={}" , uri , localName , qName ) ; if ( this . isNotInkstandNamespace ( uri ) ) { return ; } switch ( localName ) { case "rootNode" : LOG . debug ( "Closing rootNode" ) ; this . nodeStack . pop ( ) ; break ; case "node" : LOG . debug ( "Closing node" ) ; this . nodeStack . pop ( ) ; break ; case "mixin" : LOG . debug ( "Closing mixin" ) ; break ; case "property" : this . endElementProperty ( ) ; break ; default : break ; }
public class Cap { /** * Returns the value of this product under the given model . * @ param evaluationTime Evaluation time . * @ param model The model . * @ return Value of this product und the given model . */ public double getValueAsPrice ( double evaluationTime , AnalyticModel model ) { } }
ForwardCurve forwardCurve = model . getForwardCurve ( forwardCurveName ) ; DiscountCurve discountCurve = model . getDiscountCurve ( discountCurveName ) ; DiscountCurve discountCurveForForward = null ; if ( forwardCurve == null && forwardCurveName != null && forwardCurveName . length ( ) > 0 ) { // User might like to get forward from discount curve . discountCurveForForward = model . getDiscountCurve ( forwardCurveName ) ; if ( discountCurveForForward == null ) { // User specified a name for the forward curve , but no curve was found . throw new IllegalArgumentException ( "No curve of the name " + forwardCurveName + " was found in the model." ) ; } } double value = 0.0 ; for ( int periodIndex = 0 ; periodIndex < schedule . getNumberOfPeriods ( ) ; periodIndex ++ ) { double fixingDate = schedule . getFixing ( periodIndex ) ; double paymentDate = schedule . getPayment ( periodIndex ) ; double periodLength = schedule . getPeriodLength ( periodIndex ) ; /* * We do not count empty periods . * Since empty periods are an indication for a ill - specified product , * it might be reasonable to throw an illegal argument exception instead . */ if ( periodLength == 0 ) { continue ; } double forward = 0.0 ; if ( forwardCurve != null ) { forward += forwardCurve . getForward ( model , fixingDate , paymentDate - fixingDate ) ; } else if ( discountCurveForForward != null ) { /* * Classical single curve case : using a discount curve as a forward curve . * This is only implemented for demonstration purposes ( an exception would also be appropriate : - ) */ if ( fixingDate != paymentDate ) { forward += ( discountCurveForForward . getDiscountFactor ( fixingDate ) / discountCurveForForward . getDiscountFactor ( paymentDate ) - 1.0 ) / ( paymentDate - fixingDate ) ; } } double discountFactor = paymentDate > evaluationTime ? discountCurve . getDiscountFactor ( model , paymentDate ) : 0.0 ; double payoffUnit = discountFactor * periodLength ; double effektiveStrike = strike ; if ( isStrikeMoneyness ) { effektiveStrike += getATMForward ( model , true ) ; } VolatilitySurface volatilitySurface = model . getVolatilitySurface ( volatiltiySufaceName ) ; if ( volatilitySurface == null ) { throw new IllegalArgumentException ( "Volatility surface not found in model: " + volatiltiySufaceName ) ; } if ( volatilitySurface . getQuotingConvention ( ) == QuotingConvention . VOLATILITYLOGNORMAL ) { double volatility = volatilitySurface . getValue ( model , fixingDate , effektiveStrike , VolatilitySurface . QuotingConvention . VOLATILITYLOGNORMAL ) ; value += AnalyticFormulas . blackScholesGeneralizedOptionValue ( forward , volatility , fixingDate , effektiveStrike , payoffUnit ) ; } else { // Default to normal volatility as quoting convention double volatility = volatilitySurface . getValue ( model , fixingDate , effektiveStrike , VolatilitySurface . QuotingConvention . VOLATILITYNORMAL ) ; value += AnalyticFormulas . bachelierOptionValue ( forward , volatility , fixingDate , effektiveStrike , payoffUnit ) ; } } return value / discountCurve . getDiscountFactor ( model , evaluationTime ) ;
public class GerritQueryHandler { /** * Runs the query and returns the result as a list of JSON formatted strings . * @ param queryString the query . * @ param getPatchSets if all patch - sets of the projects found should be included in the result . * Meaning if - - patch - sets should be appended to the command call . * @ param getCurrentPatchSet if the current patch - set for the projects found should be included in the result . * Meaning if - - current - patch - set should be appended to the command call . * @ param getFiles if the files of the patch sets should be included in the result . * Meaning if - - files should be appended to the command call . * @ param getCommitMessage if full commit message should be included in the result . * Meaning if - - commit - message should be appended to the command call . * @ return a List of JSON formatted strings . * @ throws SshException if there is an error in the SSH Connection . * @ throws IOException for some other IO problem . */ public List < String > queryJson ( String queryString , boolean getPatchSets , boolean getCurrentPatchSet , boolean getFiles , boolean getCommitMessage ) throws SshException , IOException { } }
final List < String > list = new LinkedList < String > ( ) ; try { runQuery ( queryString , getPatchSets , getCurrentPatchSet , getFiles , getCommitMessage , false , new LineVisitor ( ) { @ Override public void visit ( String line ) { list . add ( line . trim ( ) ) ; } } ) ; } catch ( GerritQueryException gqe ) { logger . error ( "This should not have happened!" , gqe ) ; } return list ;
public class TransactionHelper { /** * Saves the given object in the database . * @ param < T > * type of given object obj * @ param obj * object to save * @ return saved object * @ throws Exception * save objects failed */ public final < T > T saveObject ( final T obj ) throws Exception { } }
return executeInTransaction ( new Runnable < T > ( ) { @ Override public T run ( final EntityManager entityManager ) { return persist ( obj , entityManager ) ; } } ) ;
public class ModelExt { /** * shot cache ' s name . * if current cacheName ! = the old cacheName , will reset old cache , update cache use the current cacheName and open syncToRedis . */ public void shotCacheName ( String cacheName ) { } }
// reset cache if ( StrKit . notBlank ( cacheName ) && ! cacheName . equals ( this . cacheName ) ) { GlobalSyncRedis . removeSyncCache ( this . cacheName ) ; } this . cacheName = cacheName ; // auto open sync to redis this . syncToRedis = true ; // update model redis mapping ModelRedisMapping . me ( ) . put ( this . tableName ( ) , this . cacheName ) ;
public class AddResourcesListener { /** * Make sure jQuery is loaded before jQueryUI , and that every other Javascript * is loaded later . Also make sure that the BootsFaces resource files are loaded * prior to other resource files , giving the developer the opportunity to * overwrite a CSS or JS file . * @ param root The current UIViewRoot * @ param context The current FacesContext */ private void enforceCorrectLoadOrder ( UIViewRoot root , FacesContext context ) { } }
// / / first , handle the CSS files . // / / Put BootsFaces . css or BootsFaces . min . css first , // / / theme . css second // / / and everything else behind them . List < UIComponent > resources = new ArrayList < UIComponent > ( ) ; List < UIComponent > first = new ArrayList < UIComponent > ( ) ; List < UIComponent > middle = new ArrayList < UIComponent > ( ) ; List < UIComponent > last = new ArrayList < UIComponent > ( ) ; List < UIComponent > datatable = new ArrayList < UIComponent > ( ) ; for ( UIComponent resource : root . getComponentResources ( context , "head" ) ) { String name = ( String ) resource . getAttributes ( ) . get ( "name" ) ; String position = ( String ) resource . getAttributes ( ) . get ( "position" ) ; if ( "first" . equals ( position ) ) { first . add ( resource ) ; } else if ( "last" . equals ( position ) ) { last . add ( resource ) ; } else if ( "middle" . equals ( position ) ) { middle . add ( resource ) ; } else { if ( resource instanceof InternalJavaScriptResource ) { datatable . add ( resource ) ; } else if ( name != null && ( name . endsWith ( ".js" ) ) ) { if ( name . contains ( "dataTables" ) ) { datatable . add ( resource ) ; } else { resources . add ( resource ) ; } } } } // add the JavaScript files in correct order . Collections . sort ( resources , new ResourceFileComparator ( ) ) ; for ( UIComponent c : first ) { root . removeComponentResource ( context , c ) ; } for ( UIComponent c : resources ) { root . removeComponentResource ( context , c ) ; } for ( UIComponent c : last ) { root . removeComponentResource ( context , c ) ; } for ( UIComponent c : datatable ) { root . removeComponentResource ( context , c ) ; } // for ( UIComponent resource : root . getComponentResources ( context , " head " ) ) { // System . out . println ( resource . getClass ( ) . getName ( ) ) ; for ( UIComponent c : root . getComponentResources ( context , "head" ) ) { middle . add ( c ) ; } for ( UIComponent c : middle ) { root . removeComponentResource ( context , c ) ; } for ( UIComponent c : first ) { root . addComponentResource ( context , c , "head" ) ; } for ( UIComponent c : middle ) { root . addComponentResource ( context , c , "head" ) ; } for ( UIComponent c : resources ) { root . addComponentResource ( context , c , "head" ) ; } for ( UIComponent c : last ) { root . addComponentResource ( context , c , "head" ) ; } for ( UIComponent c : datatable ) { root . addComponentResource ( context , c , "head" ) ; }
public class IndexEvents { /** * Create an index entry . * @ param text The text for the Chunk . * @ param in1 The first level . * @ param in2 The second level . * @ return Returns the Chunk . */ public Chunk create ( final String text , final String in1 , final String in2 ) { } }
return create ( text , in1 , in2 , "" ) ;
public class FileMonitor { /** * Used to wait for transferred file ' s content length to stop changes for * 5 seconds * @ param aFile the file */ public static void waitFor ( File aFile ) { } }
if ( aFile == null ) return ; String path = aFile . getAbsolutePath ( ) ; long previousSize = IO . getFileSize ( path ) ; long currentSize = previousSize ; long sleepTime = Config . getPropertyLong ( "file.monitor.file.wait.time" , 100 ) . longValue ( ) ; while ( true ) { try { Thread . sleep ( sleepTime ) ; } catch ( InterruptedException e ) { } currentSize = IO . getFileSize ( path ) ; if ( currentSize == previousSize ) return ; previousSize = currentSize ; }
public class ArrayUtils { /** * Returns an { @ link Iterable } over the elements in the array . * @ param < T > Class type of the elements in the array . * @ param array array to iterate . * @ return an { @ link Iterable } over the elements in the array * or an empty { @ link Iterable } if the array is null or empty . * @ see java . lang . Iterable * @ see # asIterator ( Object [ ] ) */ @ NullSafe @ SafeVarargs public static < T > Iterable < T > asIterable ( T ... array ) { } }
return ( ) -> asIterator ( array ) ;
public class BlobStoreWriter { /** * Installs a finished blob into the store . */ public boolean install ( ) { } }
if ( tempFile == null ) return true ; // already installed // Move temp file to correct location in blob store : String destPath = store . getRawPathForKey ( blobKey ) ; File destPathFile = new File ( destPath ) ; if ( tempFile . renameTo ( destPathFile ) ) // If the move fails , assume it means a file with the same name already exists ; in that // case it must have the identical contents , so we ' re still OK . tempFile = null ; else cancel ( ) ; return true ;
public class ReportLastScan { /** * Generates a report . Defaults to HTML report if reportType is null . * @ param view * @ param model * @ param reportType */ public void generateReport ( ViewDelegate view , Model model , ReportType reportType ) { } }
// ZAP : Allow scan report file name to be specified final ReportType localReportType ; if ( reportType == null ) { localReportType = ReportType . HTML ; } else { localReportType = reportType ; } try { JFileChooser chooser = new WritableFileChooser ( Model . getSingleton ( ) . getOptionsParam ( ) . getUserDirectory ( ) ) ; chooser . setFileFilter ( new FileFilter ( ) { @ Override public boolean accept ( File file ) { if ( file . isDirectory ( ) ) { return true ; } else if ( file . isFile ( ) ) { String lcFileName = file . getName ( ) . toLowerCase ( Locale . ROOT ) ; switch ( localReportType ) { case XML : return lcFileName . endsWith ( XML_FILE_EXTENSION ) ; case MD : return lcFileName . endsWith ( MD_FILE_EXTENSION ) ; case JSON : return lcFileName . endsWith ( JSON_FILE_EXTENSION ) ; case HTML : default : return ( lcFileName . endsWith ( HTM_FILE_EXTENSION ) || lcFileName . endsWith ( HTML_FILE_EXTENSION ) ) ; } } return false ; } @ Override public String getDescription ( ) { switch ( localReportType ) { case XML : return Constant . messages . getString ( "file.format.xml" ) ; case MD : return Constant . messages . getString ( "file.format.md" ) ; case JSON : return Constant . messages . getString ( "file.format.json" ) ; case HTML : default : return Constant . messages . getString ( "file.format.html" ) ; } } } ) ; String fileExtension = "" ; switch ( localReportType ) { case XML : fileExtension = XML_FILE_EXTENSION ; break ; case JSON : fileExtension = JSON_FILE_EXTENSION ; break ; case MD : fileExtension = MD_FILE_EXTENSION ; break ; case HTML : default : fileExtension = HTML_FILE_EXTENSION ; break ; } chooser . setSelectedFile ( new File ( fileExtension ) ) ; // Default the filename to a reasonable extension ; int rc = chooser . showSaveDialog ( View . getSingleton ( ) . getMainFrame ( ) ) ; File file = null ; if ( rc == JFileChooser . APPROVE_OPTION ) { file = chooser . getSelectedFile ( ) ; File report = generate ( file . getAbsolutePath ( ) , model , localReportType ) ; if ( report == null ) { view . showMessageDialog ( Constant . messages . getString ( "report.unknown.error" , file . getAbsolutePath ( ) ) ) ; return ; } if ( Files . notExists ( report . toPath ( ) ) ) { logger . info ( "Not opening report, does not exist: " + report ) ; return ; } try { DesktopUtils . openUrlInBrowser ( report . toURI ( ) ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; view . showMessageDialog ( Constant . messages . getString ( "report.complete.warning" , report . getAbsolutePath ( ) ) ) ; } } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; view . showWarningDialog ( Constant . messages . getString ( "report.unexpected.error" ) ) ; }
public class AbstractModelObject { /** * Convenience method to verify that the current request is a POST request . * @ deprecated * Use { @ link RequirePOST } on your method . */ @ Deprecated protected final void requirePOST ( ) throws ServletException { } }
StaplerRequest req = Stapler . getCurrentRequest ( ) ; if ( req == null ) return ; // invoked outside the context of servlet String method = req . getMethod ( ) ; if ( ! method . equalsIgnoreCase ( "POST" ) ) throw new ServletException ( "Must be POST, Can't be " + method ) ;
public class SubCommandMetaSyncVersion { /** * Parses command - line and synchronizes metadata versions across all * nodes . * @ param args Command - line input * @ param printHelp Tells whether to print help only or execute command * actually * @ throws IOException */ public static void executeCommand ( String [ ] args ) throws IOException { } }
OptionParser parser = getParser ( ) ; String url = null ; Boolean confirm = false ; // parse command - line input OptionSet options = parser . parse ( args ) ; if ( options . has ( AdminParserUtils . OPT_HELP ) ) { printHelp ( System . out ) ; return ; } AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_URL ) ; url = ( String ) options . valueOf ( AdminParserUtils . OPT_URL ) ; if ( options . has ( AdminParserUtils . OPT_CONFIRM ) ) { confirm = true ; } // print summary System . out . println ( "Synchronize metadata versions across all nodes" ) ; System . out . println ( "Location:" ) ; System . out . println ( " bootstrap url = " + url ) ; System . out . println ( " node = all nodes" ) ; AdminClient adminClient = AdminToolUtils . getAdminClient ( url ) ; AdminToolUtils . assertServerNotInRebalancingState ( adminClient ) ; Versioned < Properties > versionedProps = mergeAllVersions ( adminClient ) ; printVersions ( versionedProps ) ; // execute command if ( ! AdminToolUtils . askConfirm ( confirm , "do you want to synchronize metadata versions to all node" ) ) return ; adminClient . metadataMgmtOps . setMetadataVersion ( versionedProps ) ;
public class NmeaUtil { /** * Returns true if and only if the sentence ' s checksum matches the * calculated checksum . * @ param sentence * @ return */ public static boolean isValid ( String sentence ) { } }
// Compare the characters after the asterisk to the calculation try { return sentence . substring ( sentence . lastIndexOf ( "*" ) + 1 ) . equalsIgnoreCase ( getChecksum ( sentence ) ) ; } catch ( AisParseException e ) { return false ; }
public class CountingMemoryCache { /** * Removes all the items from the cache whose key matches the specified predicate . * @ param predicate returns true if an item with the given key should be removed * @ return number of the items removed from the cache */ public int removeAll ( Predicate < K > predicate ) { } }
ArrayList < Entry < K , V > > oldExclusives ; ArrayList < Entry < K , V > > oldEntries ; synchronized ( this ) { oldExclusives = mExclusiveEntries . removeAll ( predicate ) ; oldEntries = mCachedEntries . removeAll ( predicate ) ; makeOrphans ( oldEntries ) ; } maybeClose ( oldEntries ) ; maybeNotifyExclusiveEntryRemoval ( oldExclusives ) ; maybeUpdateCacheParams ( ) ; maybeEvictEntries ( ) ; return oldEntries . size ( ) ;
public class MapDotApi { /** * Get optional value by path . * @ param < T > optional value type * @ param clazz type of value * @ param map subject * @ param pathString nodes to walk in map * @ return value */ public static < T > Optional < Optional < T > > dotGetOptional ( final Map map , final String pathString , final Class < T > clazz ) { } }
return dotGet ( map , Optional . class , pathString ) . map ( opt -> ( Optional < T > ) opt ) ;
public class StubJournal { /** * @ Override * public void onSaveRequest ( Result < Void > result ) * if ( ! _ journal . saveStart ( ) ) { * if ( _ toPeerJournal ! = null ) { * _ toPeerJournal . saveStart ( ) ; */ @ Override public void onSaveEnd ( boolean isValid ) { } }
_journal . saveEnd ( isValid ) ; if ( _toPeerJournal != null ) { _toPeerJournal . saveEnd ( isValid ) ; }
public class JobInProgress { /** * Populate the data structures as a task is scheduled . * Assuming { @ link JobTracker } is locked on entry . * @ param tip The tip for which the task is added * @ param id The attempt - id for the task * @ param tts task - tracker status * @ param isScheduled Whether this task is scheduled from the JT or has * joined back upon restart */ synchronized void addRunningTaskToTIP ( TaskInProgress tip , TaskAttemptID id , TaskTrackerStatus tts , boolean isScheduled ) { } }
// Make an entry in the tip if the attempt is not scheduled i . e externally // added if ( ! isScheduled ) { tip . addRunningTask ( id , tts . getTrackerName ( ) ) ; } final JobTrackerInstrumentation metrics = jobtracker . getInstrumentation ( ) ; // keeping the earlier ordering intact String name ; String splits = "" ; Enum counter = null ; if ( tip . isJobSetupTask ( ) ) { launchedSetup = true ; name = Values . SETUP . name ( ) ; } else if ( tip . isJobCleanupTask ( ) ) { launchedCleanup = true ; name = Values . CLEANUP . name ( ) ; } else if ( tip . isMapTask ( ) ) { if ( firstMapStartTime == 0 ) { firstMapStartTime = JobTracker . getClock ( ) . getTime ( ) ; } name = Values . MAP . name ( ) ; counter = Counter . TOTAL_LAUNCHED_MAPS ; splits = tip . getSplitNodes ( ) ; if ( tip . getActiveTasks ( ) . size ( ) > 1 ) { speculativeMapTasks ++ ; if ( ! garbageCollected ) { totalSpeculativeMapTasks . incrementAndGet ( ) ; } metrics . speculateMap ( id , tip . isUsingProcessingRateForSpeculation ( ) ) ; } metrics . launchMap ( id ) ; } else { if ( firstReduceStartTime == 0 ) { firstReduceStartTime = JobTracker . getClock ( ) . getTime ( ) ; } name = Values . REDUCE . name ( ) ; counter = Counter . TOTAL_LAUNCHED_REDUCES ; if ( tip . getActiveTasks ( ) . size ( ) > 1 ) { speculativeReduceTasks ++ ; if ( ! garbageCollected ) { totalSpeculativeReduceTasks . incrementAndGet ( ) ; } metrics . speculateReduce ( id , tip . isUsingProcessingRateForSpeculation ( ) ) ; } metrics . launchReduce ( id ) ; } // Note that the logs are for the scheduled tasks only . Tasks that join on // restart has already their logs in place . if ( tip . isFirstAttempt ( id ) ) { JobHistory . Task . logStarted ( tip . getTIPId ( ) , name , tip . getExecStartTime ( ) , splits ) ; } if ( ! tip . isJobSetupTask ( ) && ! tip . isJobCleanupTask ( ) ) { jobCounters . incrCounter ( counter , 1 ) ; } // TODO The only problem with these counters would be on restart . // The jobtracker updates the counter only when the task that is scheduled // if from a non - running tip and is local ( data , rack . . . ) . But upon restart // as the reports come from the task tracker , there is no good way to infer // when exactly to increment the locality counters . The only solution is to // increment the counters for all the tasks irrespective of // - whether the tip is running or not // - whether its a speculative task or not // So to simplify , increment the data locality counter whenever there is // data locality . if ( tip . isMapTask ( ) && ! tip . isJobSetupTask ( ) && ! tip . isJobCleanupTask ( ) ) { // increment the data locality counter for maps Node tracker = jobtracker . getNode ( tts . getHost ( ) ) ; int level = this . maxLevel ; // find the right level across split locations for ( String local : maps [ tip . getIdWithinJob ( ) ] . getSplitLocations ( ) ) { Node datanode = jobtracker . getNode ( local ) ; int newLevel = this . maxLevel ; if ( tracker != null && datanode != null ) { newLevel = getMatchingLevelForNodes ( tracker , datanode ) ; } if ( newLevel < level ) { level = newLevel ; // an optimization if ( level == 0 ) { break ; } } } switch ( level ) { case 0 : LOG . info ( "Choosing data-local task " + tip . getTIPId ( ) ) ; jobCounters . incrCounter ( Counter . DATA_LOCAL_MAPS , 1 ) ; metrics . launchDataLocalMap ( id ) ; break ; case 1 : LOG . info ( "Choosing rack-local task " + tip . getTIPId ( ) ) ; jobCounters . incrCounter ( Counter . RACK_LOCAL_MAPS , 1 ) ; metrics . launchRackLocalMap ( id ) ; break ; default : // check if there is any locality if ( level != this . maxLevel ) { LOG . info ( "Choosing cached task at level " + level + tip . getTIPId ( ) ) ; jobCounters . incrCounter ( Counter . OTHER_LOCAL_MAPS , 1 ) ; } break ; } }
public class GetContentModerationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetContentModerationRequest getContentModerationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getContentModerationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getContentModerationRequest . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( getContentModerationRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( getContentModerationRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getContentModerationRequest . getSortBy ( ) , SORTBY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EmbeddableRegisteredResources { /** * Directs the RegisteredResources to recover its state after a failure . * This is based on the given RecoverableUnit object . The participant list is reconstructed . * @ param log The RecoverableUnit holding the RegisteredResources state . */ @ Override public void reconstruct ( RecoveryManager rm , RecoverableUnit log ) throws SystemException { } }
final boolean traceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( traceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "reconstruct" , new Object [ ] { rm , log } ) ; RecoveryManager recoveryManager = rm ; _retryCompletion = true ; reconstructHeuristics ( log ) ; // Read in XAResources and Corba resources ( subordinate coords ) from the log // We save the sections and logUnit although they are never needed again . . // / / Get section id for Corba Resources registered as part of transaction // _ logSection = log . lookupSection ( TransactionImpl . CORBA _ RESOURCE _ SECTION ) ; // if ( _ logSection ! = null ) / / We have some resources to recover // LogCursor logData = null ; // try // logData = _ logSection . data ( ) ; // while ( logData . hasNext ( ) ) // final byte [ ] data = ( byte [ ] ) logData . next ( ) ; // try // final CORBAResourceWrapper crw = new CORBAResourceWrapper ( data ) ; // crw . setResourceStatus ( StatefulResource . PREPARED ) ; // _ resourceObjects . add ( crw ) ; // catch ( Throwable exc ) // FFDCFilter . processException ( exc , " com . ibm . ws . tx . jta . RegisteredResources . reconstruct " , " 794 " , this ) ; // Tr . error ( tc , " WTRN0045 _ CANNOT _ RECOVER _ RESOURCE " , // new Object [ ] { com . ibm . ejs . util . Util . toHexString ( data ) , exc } ) ; // throw exc ; // logData . close ( ) ; // catch ( Throwable exc ) // FFDCFilter . processException ( exc , " com . ibm . ws . tx . jta . RegisteredResources . reconstruct " , " 804 " , this ) ; // Tr . fatal ( tc , " WTRN0000 _ ERR _ INT _ ERROR " , new Object [ ] { " reconstruct " , " com . ibm . ws . tx . jta . RegisteredResources " , exc } ) ; // if ( logData ! = null ) logData . close ( ) ; // if ( traceOn & & tc . isEventEnabled ( ) ) Tr . event ( tc , " Exception raised reconstructing corba resource " ) ; // if ( traceOn & & tc . isEntryEnabled ( ) ) Tr . exit ( tc , " reconstruct " ) ; // throw ( SystemException ) new SystemException ( exc . toString ( ) ) . initCause ( exc ) ; // / / If we recovered at least one remote resource , then create a recovery coordinator // / / so that subordinates can perform replay _ completion . We only need to create one // / / per coordinator / transaction as it is keyed off the globalTID . We also do not need // / / to save a reference as it will add itself to the Transaction ' s sync list and delete // / / itself at end of transaction . // if ( _ resourceObjects . size ( ) > 0) // new RecoveryCoordinatorImpl ( recoveryManager . getFailureScopeController ( ) , ( TransactionImpl ) _ transaction ) . object ( ) ; // Get section id for XAResources registered as part of transaction _xalogSection = log . lookupSection ( TransactionImpl . XARESOURCE_SECTION ) ; if ( _xalogSection != null ) // We have some resources to recover { final byte [ ] tid = _transaction . getXidImpl ( ) . toBytes ( ) ; LogCursor logData = null ; try { logData = _xalogSection . data ( ) ; while ( logData . hasNext ( ) ) { final byte [ ] data = ( byte [ ] ) logData . next ( ) ; try { final JTAXAResourceImpl res = new JTAXAResourceImpl ( recoveryManager . getPartnerLogTable ( ) , tid , data ) ; res . setResourceStatus ( StatefulResource . PREPARED ) ; _resourceObjects . add ( res ) ; if ( res . getPriority ( ) != JTAResource . DEFAULT_COMMIT_PRIORITY ) _gotPriorityResourcesEnlisted = true ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.tx.jta.embeddable.impl.EmbeddableRegisteredResources.reconstruct" , "843" , this ) ; Tr . error ( tc , "WTRN0045_CANNOT_RECOVER_RESOURCE" , new Object [ ] { com . ibm . ejs . util . Util . toHexString ( data ) , exc } ) ; throw exc ; } } logData . close ( ) ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.tx.jta.embeddable.impl.EmbeddableRegisteredResources.reconstruct" , "853" , this ) ; Tr . fatal ( tc , "WTRN0000_ERR_INT_ERROR" , new Object [ ] { "reconstruct" , "com.ibm.ws.tx.jta.RegisteredResources" , exc } ) ; if ( logData != null ) logData . close ( ) ; if ( traceOn && tc . isEventEnabled ( ) ) Tr . event ( tc , "Exception raised reconstructing XA resource" ) ; if ( traceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "reconstruct" ) ; throw ( SystemException ) new SystemException ( exc . toString ( ) ) . initCause ( exc ) ; } } // _ wscResourceSection = log . lookupSection ( TransactionImpl . RESOURCE _ WSC _ SECTION ) ; // final int numResources = _ resourceObjects . size ( ) ; // if ( _ wscResourceSection ! = null ) // final byte [ ] tid = _ transaction . getXid ( ) . getGlobalTransactionId ( ) ; // LogCursor logData = null ; // try // logData = _ wscResourceSection . data ( ) ; // while ( logData . hasNext ( ) ) // final byte [ ] data = ( byte [ ] ) logData . next ( ) ; // try // final WSCoordinatorWrapper wscw = new WSCoordinatorWrapper ( recoveryManager , tid , data ) ; // wscw . setResourceStatus ( StatefulResource . PREPARED ) ; // _ resourceObjects . add ( wscw ) ; // catch ( Throwable exc ) // FFDCFilter . processException ( exc , " com . ibm . ws . tx . jta . RegisteredResources . reconstruct " , " 884 " , this ) ; // Tr . error ( tc , " WTRN0045 _ CANNOT _ RECOVER _ RESOURCE " , // new Object [ ] { com . ibm . ejs . util . Util . toHexString ( data ) , exc } ) ; // throw exc ; // logData . close ( ) ; // catch ( Throwable exc ) // FFDCFilter . processException ( exc , " com . ibm . ws . tx . jta . RegisteredResources . reconstruct " , " 894 " , this ) ; // Tr . fatal ( tc , " WTRN0000 _ ERR _ INT _ ERROR " , new Object [ ] { " reconstruct " , " com . ibm . ws . tx . jta . RegisteredResources " , exc } ) ; // if ( logData ! = null ) logData . close ( ) ; // if ( traceOn & & tc . isEntryEnabled ( ) ) Tr . exit ( tc , " reconstruct " ) ; // throw ( SystemException ) new SystemException ( exc . toString ( ) ) . initCause ( exc ) ; // / / If we recovered at least one remote resource , then create a recovery coordinator // / / so that subordinates can perform replay _ completion . We only need to create one // / / per coordinator / transaction as it is keyed off the globalTID . // if ( _ resourceObjects . size ( ) > numResources ) // final WSCoordinatorImpl wsCoordinator = ( WSCoordinatorImpl ) recoveryManager . getFailureScopeController ( ) . getWSCoordinator ( ) ; // final TransactionWrapper transactionWrapper = wsCoordinator . lookupTransactionWrapper ( tid ) ; // if ( transactionWrapper = = null ) // / / We are either a superior or a downstream server imported via Corba // / / and need to create a Wrapper for replay _ completion // wsCoordinator . storeTransactionWrapper ( tid , new TransactionWrapper ( ( TransactionImpl ) _ transaction ) ) ; // Get section id for WSATAsyncResources registered as part of transaction _wsatAsyncSection = log . lookupSection ( TransactionImpl . WSAT_ASYNC_RESOURCE_SECTION ) ; if ( _wsatAsyncSection != null ) { // We have some resources to recover // Create wrapper new TransactionWrapper ( ( EmbeddableTransactionImpl ) _transaction ) ; if ( traceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "reconstructing async resources" ) ; LogCursor logData = null ; WSATParticipantWrapper wrapper = null ; _asyncResourceObjects = new ArrayList < JTAAsyncResourceBase > ( ) ; try { logData = _wsatAsyncSection . data ( ) ; while ( logData . hasNext ( ) ) { final byte [ ] data = ( byte [ ] ) logData . next ( ) ; try { wrapper = new WSATParticipantWrapper ( data ) ; wrapper . setResourceStatus ( StatefulResource . PREPARED ) ; _asyncResourceObjects . add ( wrapper ) ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.tx.jta.embeddable.impl.EmbeddableRegisteredResources.reconstruct" , "943" , this ) ; Tr . error ( tc , "WTRN0045_CANNOT_RECOVER_RESOURCE" , new java . lang . Object [ ] { data , exc } ) ; throw exc ; } } logData . close ( ) ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.tx.jta.embeddable.impl.EmbeddableRegisteredResources.reconstruct" , "952" , this ) ; Tr . fatal ( tc , "WTRN0000_ERR_INT_ERROR" , new Object [ ] { "reconstruct" , "com.ibm.ws.tx.jta.RegisteredResources" , exc } ) ; if ( logData != null ) logData . close ( ) ; if ( traceOn && tc . isEventEnabled ( ) ) Tr . event ( tc , "Exception raised reconstructing WSAT Async resource" ) ; throw ( SystemException ) new SystemException ( exc . toString ( ) ) . initCause ( exc ) ; } } _logUnit = log ; if ( traceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "reconstruct" ) ;
public class CompoundLocation { /** * Returns the sequence position relative to the compound location . * @ param position * the local sequence position . * @ return the relative position . */ public Long getRelativePosition ( Long position ) { } }
long relativePosition = 0L ; for ( Location location : locations ) { if ( location instanceof RemoteLocation ) { relativePosition += location . getLength ( ) ; } else { if ( position < location . getBeginPosition ( ) || position > location . getEndPosition ( ) ) { relativePosition += location . getLength ( ) ; } else { if ( location . isComplement ( ) ) { relativePosition += ( location . getEndPosition ( ) - position + 1 ) ; } else { relativePosition += ( position - location . getBeginPosition ( ) + 1 ) ; } if ( isComplement ( ) ) { relativePosition = getLength ( ) - relativePosition + 1 ; } return relativePosition ; } } } return null ;
public class DnsResolver { /** * Look into A - record at a specific DNS address . * @ return resolved IP addresses or null if no A - record was present */ @ Nullable public static List < String > resolveARecord ( String rootDomainName ) { } }
if ( isLocalOrIp ( rootDomainName ) ) { return null ; } try { Attributes attrs = getDirContext ( ) . getAttributes ( rootDomainName , new String [ ] { A_RECORD_TYPE , CNAME_RECORD_TYPE } ) ; Attribute aRecord = attrs . get ( A_RECORD_TYPE ) ; Attribute cRecord = attrs . get ( CNAME_RECORD_TYPE ) ; if ( aRecord != null && cRecord == null ) { List < String > result = new ArrayList < > ( ) ; NamingEnumeration < String > entries = ( NamingEnumeration < String > ) aRecord . getAll ( ) ; while ( entries . hasMore ( ) ) { result . add ( entries . next ( ) ) ; } return result ; } } catch ( Exception e ) { logger . warn ( "Cannot load A-record for eureka server address {}" , rootDomainName , e ) ; return null ; } return null ;
public class FlexiantComputeClient { /** * Retrieves a list of resources matching the given prefix on the given attribute * which are of the given type . * @ param prefix the prefix to match . * @ param attribute the attribute where the prefix should match . * @ param resourceType the type of the resource . * @ param type the type of the resulting class . * @ param locationUUID optional location of the type , if null it will be ignored . * @ return a list containing all resources matching the request . * @ throws FlexiantException if an error occurs while contacting the api . */ protected < T > List < T > getResources ( final String prefix , final String attribute , final ResourceType resourceType , final Class < T > type , @ Nullable final String locationUUID ) throws FlexiantException { } }
SearchFilter sf = new SearchFilter ( ) ; FilterCondition fc = new FilterCondition ( ) ; fc . setCondition ( Condition . STARTS_WITH ) ; fc . setField ( attribute ) ; fc . getValue ( ) . add ( prefix ) ; sf . getFilterConditions ( ) . add ( fc ) ; if ( locationUUID != null ) { FilterCondition fcLocation = new FilterCondition ( ) ; fcLocation . setCondition ( Condition . IS_EQUAL_TO ) ; fcLocation . setField ( "vdcUUID" ) ; fcLocation . getValue ( ) . add ( locationUUID ) ; sf . getFilterConditions ( ) . add ( fcLocation ) ; } try { // noinspection unchecked return ( List < T > ) this . getService ( ) . listResources ( sf , null , resourceType ) . getList ( ) ; } catch ( ExtilityException e ) { throw new FlexiantException ( String . format ( "Error while retrieving resource with prefix %s on attribute %s of resourceType %s" , prefix , attribute , resourceType ) , e ) ; }
public class PTSaxton2006 { /** * Equation 7 for calculating Adjusted density , g / cm - 3 * @ param slsnd Sand weight percentage by layer ( [ 0,100 ] % ) * @ param slcly Clay weight percentage by layer ( [ 0,100 ] % ) * @ param omPct Organic matter weight percentage by layer ( [ 0,100 ] % ) , ( = * SLOC * 1.72) * @ param df Density adjustment Factor ( 0.9–1.3) */ public static String calcAdjustedDensity ( String slsnd , String slcly , String omPct , String df ) { } }
if ( compare ( df , "0.9" , CompareMode . NOTLESS ) && compare ( df , "1.3" , CompareMode . NOTGREATER ) ) { String normalDensity = calcNormalDensity ( slsnd , slcly , omPct ) ; String ret = product ( normalDensity , df ) ; LOG . debug ( "Calculate result for Adjusted density, g/cm-3 is {}" , ret ) ; return ret ; } else { LOG . error ( "Density adjustment Factor is out of range (0.9 - 1.3) : {}" , df ) ; return null ; }
public class PerSessionLogHandler { /** * Removes session logs for the given session id . * NB ! If the handler has been configured to capture logs on quit no logs will be removed . * @ param sessionId The session id to use . */ public synchronized void removeSessionLogs ( SessionId sessionId ) { } }
if ( storeLogsOnSessionQuit ) { return ; } ThreadKey threadId = sessionToThreadMap . get ( sessionId ) ; SessionId sessionIdForThread = threadToSessionMap . get ( threadId ) ; if ( threadId != null && sessionIdForThread != null && sessionIdForThread . equals ( sessionId ) ) { threadToSessionMap . remove ( threadId ) ; sessionToThreadMap . remove ( sessionId ) ; } perSessionRecords . remove ( sessionId ) ; logFileRepository . removeLogFile ( sessionId ) ;
public class ByteOrderedTokenRange { /** * For example if the token is 0x01 but significantBytes is 2 , the result is 8 ( 0x0100 ) . */ private BigInteger toBigInteger ( ByteBuffer bb , int significantBytes ) { } }
byte [ ] bytes = Bytes . getArray ( bb ) ; byte [ ] target ; if ( significantBytes != bytes . length ) { target = new byte [ significantBytes ] ; System . arraycopy ( bytes , 0 , target , 0 , bytes . length ) ; } else { target = bytes ; } return new BigInteger ( 1 , target ) ;
public class ProteinPocketFinder { /** * Method assigns the atoms of a biopolymer to the grid . For every atom * the corresponding grid point is identified and set to the value * of the proteinInterior variable . * The atom radius and solvent radius is accounted for with the variables : * double rAtom , and double rSolvent . * @ throws Exception */ public void assignProteinToGrid ( ) throws Exception { } }
// logger . debug . print ( " ASSIGN PROTEIN TO GRID " ) ; // 1 . Step : Set all grid points to solvent accessible this . grid = gridGenerator . initializeGrid ( this . grid , 0 ) ; // 2 . Step Grid points inaccessible to solvent are assigend a value of - 1 // set grid points around ( r _ atom + r _ solv ) to - 1 IAtom [ ] atoms = AtomContainerManipulator . getAtomArray ( protein ) ; Point3d gridPoint = null ; int checkGridPoints = 0 ; double vdWRadius = 0 ; int [ ] dim = gridGenerator . getDim ( ) ; // int proteinAtomCount = 0 ; / / Debugging int [ ] minMax = { 0 , 0 , 0 , 0 , 0 , 0 } ; for ( int i = 0 ; i < atoms . length ; i ++ ) { if ( ( ( PDBAtom ) atoms [ i ] ) . getHetAtom ( ) ) { continue ; } gridPoint = gridGenerator . getGridPointFrom3dCoordinates ( atoms [ i ] . getPoint3d ( ) ) ; this . grid [ ( int ) gridPoint . x ] [ ( int ) gridPoint . y ] [ ( int ) gridPoint . z ] = - 1 ; vdWRadius = PeriodicTable . getVdwRadius ( atoms [ i ] . getSymbol ( ) ) ; if ( vdWRadius == 0 ) { vdWRadius = rAtom ; } checkGridPoints = ( int ) ( ( ( vdWRadius + rSolvent ) / gridGenerator . getLatticeConstant ( ) ) - atomCheckRadius ) ; if ( checkGridPoints < 0 ) { checkGridPoints = 0 ; } minMax [ 0 ] = ( int ) gridPoint . x - checkGridPoints ; minMax [ 1 ] = ( int ) gridPoint . x + checkGridPoints ; minMax [ 2 ] = ( int ) gridPoint . y - checkGridPoints ; minMax [ 3 ] = ( int ) gridPoint . y + checkGridPoints ; minMax [ 4 ] = ( int ) gridPoint . z - checkGridPoints ; minMax [ 5 ] = ( int ) gridPoint . z + checkGridPoints ; minMax = checkBoundaries ( minMax , dim ) ; for ( int x = minMax [ 0 ] ; x <= minMax [ 1 ] ; x ++ ) { for ( int y = minMax [ 2 ] ; y <= minMax [ 3 ] ; y ++ ) { for ( int z = minMax [ 4 ] ; z <= minMax [ 5 ] ; z ++ ) { this . grid [ x ] [ y ] [ z ] = this . grid [ x ] [ y ] [ z ] - 1 ; // proteinAtomCount + + ; / / Debugging } } } } // for atoms . length // logger . debug ( " - checkGridPoints > " + checkGridPoints // + " ProteinGridPoints > " + proteinAtomCount ) ;
public class Utils { /** * It obtains the entities in the graph " g " whose type is the same as * " typeName " . * @ param g The graph considered * @ param typeName The type being searched * @ return The list of entities * @ throws NullEntity */ public static List < GraphEntity > getEntities ( Graph g , String typeName ) throws NullEntity { } }
GraphEntity [ ] ge = g . getEntities ( ) ; List < GraphEntity > result = new ArrayList < > ( ) ; for ( int k = 0 ; k < ge . length ; k ++ ) { if ( ge [ k ] . getType ( ) . equals ( typeName ) ) { result . add ( ge [ k ] ) ; } } return result ;
public class CachedLogger { /** * Dumps ( writes ) the currently cached log messages . * @ throws InvocationException when error occured . * @ see # write ( LogLevel , String , Exception , String ) */ public void dump ( ) throws InvocationException { } }
if ( _updated ) { synchronized ( _lock ) { if ( ! _updated ) return ; List < LogMessage > messages = _cache ; _cache = new ArrayList < LogMessage > ( ) ; save ( messages ) ; _updated = false ; _lastDumpTime = System . currentTimeMillis ( ) ; } }
public class RBACDecorator { /** * Converts { @ link ObjectName } to a key that helps verifying whether different MBeans * can produce same RBAC info * @ param allJmxAclPids * @ param objectName * @ return */ static String pidListKey ( List < String > allJmxAclPids , ObjectName objectName ) throws NoSuchAlgorithmException , UnsupportedEncodingException { } }
List < String > pidCandidates = iterateDownPids ( nameSegments ( objectName ) ) ; MessageDigest md = MessageDigest . getInstance ( "MD5" ) ; for ( String pc : pidCandidates ) { String generalPid = getGeneralPid ( allJmxAclPids , pc ) ; if ( generalPid . length ( ) > 0 ) { md . update ( generalPid . getBytes ( "UTF-8" ) ) ; } } return Hex . encodeHexString ( md . digest ( ) ) ;
public class Transaction { /** * < p > Calculates a signature hash , that is , a hash of a simplified form of the transaction . How exactly the transaction * is simplified is specified by the type and anyoneCanPay parameters . < / p > * < p > This is a low level API and when using the regular { @ link Wallet } class you don ' t have to call this yourself . * When working with more complex transaction types and contracts , it can be necessary . When signing a P2SH output * the redeemScript should be the script encoded into the scriptSig field , for normal transactions , it ' s the * scriptPubKey of the output you ' re signing for . < / p > * @ param inputIndex input the signature is being calculated for . Tx signatures are always relative to an input . * @ param redeemScript the bytes that should be in the given input during signing . * @ param type Should be SigHash . ALL * @ param anyoneCanPay should be false . */ public Sha256Hash hashForSignature ( int inputIndex , byte [ ] redeemScript , SigHash type , boolean anyoneCanPay ) { } }
byte sigHashType = ( byte ) TransactionSignature . calcSigHashValue ( type , anyoneCanPay ) ; return hashForSignature ( inputIndex , redeemScript , sigHashType ) ;
public class Scheduler { /** * Gets a suitable instance to schedule the vertex execution to . * NOTE : This method does is not thread - safe , it needs to be synchronized by the caller . * @ param vertex The task to run . * @ return The instance to run the vertex on , it { @ code null } , if no instance is available . */ protected SimpleSlot getFreeSlotForTask ( ExecutionVertex vertex , Iterable < TaskManagerLocation > requestedLocations , boolean localOnly ) { } }
// we need potentially to loop multiple times , because there may be false positives // in the set - with - available - instances while ( true ) { Pair < Instance , Locality > instanceLocalityPair = findInstance ( requestedLocations , localOnly ) ; if ( instanceLocalityPair == null ) { return null ; } Instance instanceToUse = instanceLocalityPair . getLeft ( ) ; Locality locality = instanceLocalityPair . getRight ( ) ; try { SimpleSlot slot = instanceToUse . allocateSimpleSlot ( ) ; // if the instance has further available slots , re - add it to the set of available resources . if ( instanceToUse . hasResourcesAvailable ( ) ) { this . instancesWithAvailableResources . put ( instanceToUse . getTaskManagerID ( ) , instanceToUse ) ; } if ( slot != null ) { slot . setLocality ( locality ) ; return slot ; } } catch ( InstanceDiedException e ) { // the instance died it has not yet been propagated to this scheduler // remove the instance from the set of available instances removeInstance ( instanceToUse ) ; } // if we failed to get a slot , fall through the loop }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getMDDXmBase ( ) { } }
if ( mddXmBaseEEnum == null ) { mddXmBaseEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 44 ) ; } return mddXmBaseEEnum ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getBSG ( ) { } }
if ( bsgEClass == null ) { bsgEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 223 ) ; } return bsgEClass ;
public class PersistentFactory { /** * Loads all elements of this class from the data store . Use this method only when you know * exactly what you are doing . Otherwise , you will pull a lot of data . * @ return all objects from the database * @ throws PersistenceException an error occurred executing the query */ public Collection < T > list ( ) throws PersistenceException { } }
logger . debug ( "enter - list()" ) ; try { return find ( ( String ) null , ( Object ) null ) ; } finally { logger . debug ( "exit - list()" ) ; }
public class RTMPConnection { /** * Stops measurement . */ private void stopRoundTripMeasurement ( ) { } }
if ( keepAliveTask != null ) { boolean cancelled = keepAliveTask . cancel ( true ) ; keepAliveTask = null ; if ( cancelled && log . isDebugEnabled ( ) ) { log . debug ( "Keep alive was cancelled for {}" , sessionId ) ; } }
public class ApiOvhIp { /** * Get this object properties * REST : GET / ip / { ip } / mitigation / { ipOnMitigation } * @ param ip [ required ] * @ param ipOnMitigation [ required ] */ public OvhMitigationIp ip_mitigation_ipOnMitigation_GET ( String ip , String ipOnMitigation ) throws IOException { } }
String qPath = "/ip/{ip}/mitigation/{ipOnMitigation}" ; StringBuilder sb = path ( qPath , ip , ipOnMitigation ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhMitigationIp . class ) ;
public class DateRangeParam { /** * Sets the upper bound to be greaterthan to the given date */ public DateRangeParam setUpperBoundExclusive ( Date theUpperBound ) { } }
validateAndSet ( myLowerBound , new DateParam ( ParamPrefixEnum . LESSTHAN , theUpperBound ) ) ; return this ;
public class JMStats { /** * Max number . * @ param < N > the type parameter * @ param numberList the number list * @ return the number */ public static < N extends Number > Number max ( List < N > numberList ) { } }
return cal ( numberList , DoubleStream :: max ) ;
public class BProgram { /** * Reads and evaluates the code at the passed input stream . The stream is * read to its end , but is not closed . * @ param inStrm Input stream for reading the script to be evaluated . * @ param scriptName for error reporting purposes . * @ return Result of evaluating the code at { @ code inStrm } . */ protected Object evaluate ( InputStream inStrm , String scriptName ) { } }
InputStreamReader streamReader = new InputStreamReader ( inStrm , StandardCharsets . UTF_8 ) ; BufferedReader br = new BufferedReader ( streamReader ) ; StringBuilder sb = new StringBuilder ( ) ; String line ; try { while ( ( line = br . readLine ( ) ) != null ) { sb . append ( line ) . append ( "\n" ) ; } } catch ( IOException e ) { throw new RuntimeException ( "error while reading javascript from stream" , e ) ; } String script = sb . toString ( ) ; return evaluate ( script , scriptName ) ;
public class ZooKeeperHelper { /** * Create an empty normal ( persistent ) Znode . If the znode already exists , do nothing . * @ param zookeeper ZooKeeper instance to work with . * @ param znode Znode to create . * @ throws KeeperException * @ throws InterruptedException */ static void createIfNotThere ( ZooKeeper zookeeper , String znode ) throws KeeperException , InterruptedException { } }
try { create ( zookeeper , znode ) ; } catch ( KeeperException e ) { if ( e . code ( ) != KeeperException . Code . NODEEXISTS ) { // Rethrow all exceptions , except " node exists " , // because if the node exists , this method reached its goal . throw e ; } }
public class Assert { /** * Verifies the provided title matches the actual title of the current page * the application is on . This information will be logged and recorded , with * a screenshot for traceability and added debugging support . * @ param expectedTitlePattern the friendly name of the page */ @ Override public void titleMatches ( String expectedTitlePattern ) { } }
String title = checkTitleMatches ( expectedTitlePattern , 0 , 0 ) ; assertTrue ( "Title Mismatch: title of '" + title + DOES_NOT_MATCH_PATTERN + expectedTitlePattern + "'" , title . matches ( expectedTitlePattern ) ) ;
public class DefaultHpsmClient { /** * Returns the type of the configuration item . * @ param cmdb * @ return the type of the configuration item . */ private String getItemType ( Cmdb cmdb ) { } }
String itemType = null ; String subType = cmdb . getConfigurationItemSubType ( ) ; String type = cmdb . getConfigurationItemType ( ) ; String hpsmSettingsSubType = hpsmSettings . getAppSubType ( ) ; String hpsmSettingsType = hpsmSettings . getAppType ( ) ; boolean typeCheck = false ; boolean subTypeCheck = false ; if ( ! "" . equals ( hpsmSettingsType ) ) { typeCheck = true ; } if ( ! "" . equals ( hpsmSettingsSubType ) ) { subTypeCheck = true ; } if ( ! typeCheck && subTypeCheck ) { if ( subType != null && subType . equals ( hpsmSettings . getAppSubType ( ) ) ) { itemType = APP_TYPE ; } else if ( subType != null && subType . equals ( hpsmSettings . getCompSubType ( ) ) ) { itemType = COMPONENT_TYPE ; } else if ( subType != null && subType . equals ( hpsmSettings . getEnvSubType ( ) ) ) { itemType = ENVIRONMENT_TYPE ; } } else if ( typeCheck && ! subTypeCheck ) { if ( type != null && type . equals ( hpsmSettings . getAppType ( ) ) ) { itemType = APP_TYPE ; } else if ( type != null && type . equals ( hpsmSettings . getCompType ( ) ) ) { itemType = COMPONENT_TYPE ; } else if ( type != null && type . equals ( hpsmSettings . getEnvType ( ) ) ) { itemType = ENVIRONMENT_TYPE ; } } else { if ( subType != null && subType . equals ( hpsmSettings . getAppSubType ( ) ) && type != null && type . equals ( hpsmSettings . getAppType ( ) ) ) { itemType = APP_TYPE ; } else if ( subType != null && subType . equals ( hpsmSettings . getCompSubType ( ) ) && type != null && type . equals ( hpsmSettings . getCompType ( ) ) ) { itemType = COMPONENT_TYPE ; } else if ( subType != null && subType . equals ( hpsmSettings . getEnvSubType ( ) ) && type != null && type . equals ( hpsmSettings . getEnvType ( ) ) ) { itemType = ENVIRONMENT_TYPE ; } } return itemType ;
public class FastSafeIterableMap { /** * / * ( non - Javadoc ) * @ see android . arch . core . internal . SafeIterableMap # putIfAbsent ( java . lang . Object , java . lang . Object ) */ @ Override public V putIfAbsent ( @ NonNull K key , @ NonNull V v ) { } }
Entry < K , V > current = get ( key ) ; if ( current != null ) { return current . mValue ; } mHashMap . put ( key , put ( key , v ) ) ; return null ;
public class Task { /** * Makes a fluent cast of a Task ' s result possible , avoiding an extra continuation just to cast * the type of the result . */ public < TOut > Task < TOut > cast ( ) { } }
@ SuppressWarnings ( "unchecked" ) Task < TOut > task = ( Task < TOut > ) this ; return task ;
public class JKObjectUtil { public static String toXml ( final Object obj ) { } }
final ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; // XStream x = createXStream ( ) ; // String xml = x . toXML ( obj ) ; // return xml ; final XMLEncoder e = new XMLEncoder ( out ) ; e . setExceptionListener ( new JKObjectUtil . XmlEncoderExceptionListener ( ) ) ; // e . setPersistenceDelegate ( Object . class , new MyPersistenceDelegate ( ) ) ; e . writeObject ( obj ) ; e . close ( ) ; return out . toString ( ) ; // return null ;
public class DFSEvaluatorPreserver { /** * Adds the removed evaluator entry to the evaluator log . * @ param id */ @ Override public synchronized void recordRemovedEvaluator ( final String id ) { } }
if ( this . fileSystem != null && this . changeLogLocation != null ) { final String entry = REMOVE_FLAG + id + System . lineSeparator ( ) ; this . logContainerChange ( entry ) ; }
public class AmazonSageMakerWaiters { /** * Builds a NotebookInstanceStopped waiter by using custom parameters waiterParameters and other parameters defined * in the waiters specification , and then polls until it determines whether the resource entered the desired state * or not , where polling criteria is bound by either default polling strategy or custom polling strategy . */ public Waiter < DescribeNotebookInstanceRequest > notebookInstanceStopped ( ) { } }
return new WaiterBuilder < DescribeNotebookInstanceRequest , DescribeNotebookInstanceResult > ( ) . withSdkFunction ( new DescribeNotebookInstanceFunction ( client ) ) . withAcceptors ( new NotebookInstanceStopped . IsStoppedMatcher ( ) , new NotebookInstanceStopped . IsFailedMatcher ( ) ) . withDefaultPollingStrategy ( new PollingStrategy ( new MaxAttemptsRetryStrategy ( 60 ) , new FixedDelayStrategy ( 30 ) ) ) . withExecutorService ( executorService ) . build ( ) ;
public class GISTrainer { /** * Use this model to evaluate a context and populate the specified outsums array with the * likelihood of each outcome given that context . * @ param context The integers of the predicates which have been * observed at the present decision point . */ public void eval ( int [ ] context , double [ ] outsums ) { } }
for ( int oid = 0 ; oid < numOutcomes ; oid ++ ) { outsums [ oid ] = iprob ; numfeats [ oid ] = 0 ; } int [ ] activeOutcomes ; double [ ] activeParameters ; for ( int i = 0 ; i < context . length ; i ++ ) { Context predParams = params [ context [ i ] ] ; activeOutcomes = predParams . getOutcomes ( ) ; activeParameters = predParams . getParameters ( ) ; for ( int j = 0 ; j < activeOutcomes . length ; j ++ ) { int oid = activeOutcomes [ j ] ; numfeats [ oid ] ++ ; outsums [ oid ] += constantInverse * activeParameters [ j ] ; } } double SUM = 0.0 ; for ( int oid = 0 ; oid < numOutcomes ; oid ++ ) { outsums [ oid ] = Math . exp ( outsums [ oid ] ) ; if ( _useSlackParameter ) { outsums [ oid ] += ( ( 1.0 - ( ( double ) numfeats [ oid ] / constant ) ) * correctionParam ) ; } SUM += outsums [ oid ] ; } for ( int oid = 0 ; oid < numOutcomes ; oid ++ ) outsums [ oid ] /= SUM ;
public class Neighbours { /** * Recovers the Neighbours from the MessageStore * @ exception MessageStoreException Thrown if there was a failure * recovering the Neighbours . */ protected void recoverNeighbours ( ) throws MessageStoreException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "recoverNeighbours" ) ; NonLockingCursor cursor = null ; try { cursor = _proxyHandler . newNonLockingItemStreamCursor ( new ClassEqualsFilter ( Neighbour . class ) ) ; AbstractItem item = null ; while ( ( item = cursor . next ( ) ) != null ) { Neighbour neighbour = null ; neighbour = ( Neighbour ) item ; // Add the Neighbour into the list of recovered Neighbours synchronized ( _recoveredNeighbours ) { _recoveredNeighbours . put ( neighbour . getUUID ( ) , neighbour ) ; } // Setup the non persistent state for this Neighbour . neighbour . intialiseNonPersistent ( _proxyHandler , this ) ; // Ask the neighbour to recover its subscriptions . neighbour . recoverSubscriptions ( _proxyHandler ) ; // Check that the Bus matches the current Bus . if ( ! neighbour . getBusId ( ) . equals ( _proxyHandler . getMessageProcessor ( ) . getMessagingEngineBus ( ) ) ) { // This neighbour lives on a Foreign Bus . // Make sure this link still exists before putting it into the neighbours list ForeignBusDefinition bus = _proxyHandler . getMessageProcessor ( ) . getForeignBus ( neighbour . getBusId ( ) ) ; // If there is no bus the bus definition will be null if ( bus != null ) { try { // see defect 295990 // This is a bit of a hack , but there is a window wherby the link could have // been deleted but not actually removed before the ME restarted . // We can examine the link ' s locality set to ascertain whether // this link is really ' there ' or not . VirtualLinkDefinition vLinkDef = bus . getLink ( ) ; if ( vLinkDef != null && vLinkDef . getLinkLocalitySet ( ) != null && vLinkDef . getLinkLocalitySet ( ) . contains ( neighbour . getUUID ( ) . toString ( ) ) ) { try { addNeighbour ( neighbour . getUUID ( ) , neighbour . getBusId ( ) , null ) ; } catch ( SIException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.proxyhandler.Neighbours.recoverNeighbours" , "1:1534:1.113" , this ) ; final SIErrorException finalE = new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:1542:1.113" , e } , null ) , e ) ; SibTr . exception ( tc , finalE ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:1551:1.113" , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "recoverNeighbours" , finalE ) ; throw finalE ; } } else { // the link ' s locality set was not correct if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Link no longer exists " + neighbour . getBusId ( ) + " for neighbour " + neighbour . getUUID ( ) ) ; } } catch ( SIBExceptionNoLinkExists e ) { // No FFDC code needed // There is no link whatsoever , this probably means that the foreign bus still // exists , but the link has been deleted if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Link no longer exists " + neighbour . getBusId ( ) + " for neighbour " + neighbour . getUUID ( ) + " : " + e ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Bus no longer exists " + neighbour . getBusId ( ) + " for neighbour " + neighbour . getUUID ( ) ) ; } } } // end while } finally { if ( cursor != null ) cursor . finished ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "recoverNeighbours" ) ;
public class AllConnectConnectionHolder { /** * 从亚健康丢到重试列表 * @ param providerInfo Provider * @ param transport 连接 */ protected void subHealthToRetry ( ProviderInfo providerInfo , ClientTransport transport ) { } }
providerLock . lock ( ) ; try { if ( subHealthConnections . remove ( providerInfo ) != null ) { retryConnections . put ( providerInfo , transport ) ; } } finally { providerLock . unlock ( ) ; }
public class SqlParamUtils { /** * 評価式の探索 * @ param node SQLノード * @ param params パラメータが見つかった場合に格納するSetオブジェクト */ private static void traverseExpression ( final ognl . Node node , final Set < String > params ) { } }
if ( node == null ) { return ; } if ( node instanceof ASTProperty ) { ASTProperty prop = ( ASTProperty ) node ; params . add ( prop . toString ( ) ) ; } else { int childCount = node . jjtGetNumChildren ( ) ; for ( int i = 0 ; i < childCount ; i ++ ) { ognl . Node child = node . jjtGetChild ( i ) ; traverseExpression ( child , params ) ; } }
public class LayerDrawable { /** * Initializes the constant state from the values in the typed array . */ private void updateStateFromTypedArray ( TypedArray a ) { } }
final LayerState state = mLayerState ; // Account for any configuration changes . state . mChangingConfigurations |= TypedArrayCompat . getChangingConfigurations ( a ) ; // Extract the theme attributes , if any . state . mThemeAttrs = TypedArrayCompat . extractThemeAttrs ( a ) ; final int N = a . getIndexCount ( ) ; for ( int i = 0 ; i < N ; i ++ ) { int attr = a . getIndex ( i ) ; if ( attr == R . styleable . LayerDrawable_android_opacity ) { state . mOpacityOverride = a . getInt ( attr , state . mOpacityOverride ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingTop ) { state . mPaddingTop = a . getDimensionPixelOffset ( attr , state . mPaddingTop ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingBottom ) { state . mPaddingBottom = a . getDimensionPixelOffset ( attr , state . mPaddingBottom ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingLeft ) { state . mPaddingLeft = a . getDimensionPixelOffset ( attr , state . mPaddingLeft ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingRight ) { state . mPaddingRight = a . getDimensionPixelOffset ( attr , state . mPaddingRight ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingStart ) { state . mPaddingStart = a . getDimensionPixelOffset ( attr , state . mPaddingStart ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingEnd ) { state . mPaddingEnd = a . getDimensionPixelOffset ( attr , state . mPaddingEnd ) ; } else if ( attr == R . styleable . LayerDrawable_android_autoMirrored ) { state . mAutoMirrored = a . getBoolean ( attr , state . mAutoMirrored ) ; } else if ( attr == R . styleable . LayerDrawable_android_paddingMode ) { state . mPaddingMode = a . getInteger ( attr , state . mPaddingMode ) ; } }
public class ReplicatedHashMap { /** * Creates a synchronized facade for a ReplicatedMap . All methods which * change state are invoked through a monitor . This is similar to * { @ link java . util . Collections . SynchronizedMap # synchronizedMap ( Map ) } , but also includes the replication * methods ( starting with an underscore ) . * @ param map * @ return */ public static < K , V > ReplicatedMap < K , V > synchronizedMap ( ReplicatedMap < K , V > map ) { } }
return new SynchronizedReplicatedMap < > ( map ) ;
public class JazzyHelper { /** * Notifies the OnScrollListener of an onScroll event , since JazzyListView is the primary listener for onScroll events . */ private void notifyAdditionalOnScrollListener ( AbsListView view , int firstVisibleItem , int visibleItemCount , int totalItemCount ) { } }
if ( mAdditionalOnScrollListener != null ) { mAdditionalOnScrollListener . onScroll ( view , firstVisibleItem , visibleItemCount , totalItemCount ) ; }
public class SchemaBuilder { /** * Starts a CREATE TYPE query with the given type name for the given keyspace name . */ @ NonNull public static CreateTypeStart createType ( @ Nullable CqlIdentifier keyspace , @ NonNull CqlIdentifier typeName ) { } }
return new DefaultCreateType ( keyspace , typeName ) ;
public class RequirePluginVersions { /** * Resolve plugin . * @ param plugin the plugin * @ param project the project * @ return the plugin */ protected Plugin resolvePlugin ( Plugin plugin , MavenProject project ) { } }
@ SuppressWarnings ( "unchecked" ) List < ArtifactRepository > pluginRepositories = project . getPluginArtifactRepositories ( ) ; Artifact artifact = factory . createPluginArtifact ( plugin . getGroupId ( ) , plugin . getArtifactId ( ) , VersionRange . createFromVersion ( "LATEST" ) ) ; try { this . resolver . resolve ( artifact , pluginRepositories , this . local ) ; plugin . setVersion ( artifact . getVersion ( ) ) ; } catch ( ArtifactResolutionException e ) { } catch ( ArtifactNotFoundException e ) { } return plugin ;
public class JacksonDBCollection { /** * Wraps a DB collection in a JacksonDBCollection * @ param dbCollection The DB collection to wrap * @ param type The type of objects to deserialise to * @ param keyType The type of the objects key * @ return The wrapped collection */ public static < T , K > JacksonDBCollection < T , K > wrap ( DBCollection dbCollection , Class < T > type , Class < K > keyType ) { } }
return new JacksonDBCollection < T , K > ( dbCollection , DEFAULT_OBJECT_MAPPER . constructType ( type ) , DEFAULT_OBJECT_MAPPER . constructType ( keyType ) , DEFAULT_OBJECT_MAPPER , null ) ;
public class Maze2D { /** * Returns a set of points that are different with respect this maze . * Both mazes must have same size . * @ param to maze to be compared . * @ return set of different points . */ public Set < Point > diff ( Maze2D to ) { } }
char [ ] [ ] maze1 = this . getMazeCharArray ( ) ; char [ ] [ ] maze2 = to . getMazeCharArray ( ) ; Set < Point > differentLocations = new HashSet < Point > ( ) ; for ( int row = 0 ; row < this . rows ; row ++ ) { for ( int column = 0 ; column < this . columns ; column ++ ) { if ( maze1 [ row ] [ column ] != maze2 [ row ] [ column ] ) { differentLocations . add ( new Point ( column , row ) ) ; } } } return differentLocations ;
public class FessMessages { /** * Add the created action message for the key ' errors . property _ type _ double ' with parameters . * < pre > * message : { 0 } should be numeric . * < / pre > * @ param property The property name for the message . ( NotNull ) * @ param arg0 The parameter arg0 for message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addErrorsPropertyTypeDouble ( String property , String arg0 ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( ERRORS_property_type_double , arg0 ) ) ; return this ;
public class DefaultParser { /** * Handles the following tokens : * - - L = V * - L = V * - - l = V * - l = V * @ param token the command line token to handle */ private void handleLongOptionWithEqual ( String token ) throws ParseException { } }
int pos = token . indexOf ( '=' ) ; String value = token . substring ( pos + 1 ) ; String opt = token . substring ( 0 , pos ) ; List < String > matchingOpts = options . getMatchingOptions ( opt ) ; if ( matchingOpts . isEmpty ( ) ) { handleUnknownToken ( currentToken ) ; } else if ( matchingOpts . size ( ) > 1 ) { throw new AmbiguousOptionException ( opt , matchingOpts ) ; } else { Option option = options . getOption ( matchingOpts . get ( 0 ) ) ; if ( option . acceptsArg ( ) ) { handleOption ( option ) ; currentOption . addValueForProcessing ( value ) ; currentOption = null ; } else { handleUnknownToken ( currentToken ) ; } }
public class AbstractDatabase { /** * Purges the given document from the database . This is more drastic than delete ( Document ) , * it removes all traces of the document . The purge will NOT be replicated to other databases . * @ param document */ public void purge ( @ NonNull Document document ) throws CouchbaseLiteException { } }
if ( document == null ) { throw new IllegalArgumentException ( "document cannot be null." ) ; } if ( document . isNewDocument ( ) ) { throw new CouchbaseLiteException ( "Document doesn't exist in the database." , CBLError . Domain . CBLITE , CBLError . Code . NOT_FOUND ) ; } synchronized ( lock ) { prepareDocument ( document ) ; boolean commit = false ; beginTransaction ( ) ; try { // revID : null , all revisions are purged . if ( document . getC4doc ( ) . purgeRevision ( null ) >= 0 ) { document . getC4doc ( ) . save ( 0 ) ; // Reset c4doc : document . replaceC4Document ( null ) ; commit = true ; } } catch ( LiteCoreException e ) { throw CBLStatus . convertException ( e ) ; } finally { endTransaction ( commit ) ; } }
public class sdxtools_image { /** * Use this API to fetch filtered set of sdxtools _ image resources . * set the filter parameter values in filtervalue object . */ public static sdxtools_image [ ] get_filtered ( nitro_service service , filtervalue [ ] filter ) throws Exception { } }
sdxtools_image obj = new sdxtools_image ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; sdxtools_image [ ] response = ( sdxtools_image [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class Sneaky { /** * Wrap a { @ link CheckedFunction } in a { @ link Function } . * Example : * < code > < pre > * map . computeIfAbsent ( " key " , Unchecked . function ( k - > { * if ( k . length ( ) > 10) * throw new Exception ( " Only short strings allowed " ) ; * return 42; * < / pre > < / code > */ public static < T , R > Function < T , R > function ( CheckedFunction < T , R > function ) { } }
return Unchecked . function ( function , Unchecked . RETHROW_ALL ) ;
public class AmazonSimpleEmailServiceClient { /** * Adds an email address to the list of identities for your Amazon SES account in the current AWS Region and * attempts to verify it . As a result of executing this operation , a customized verification email is sent to the * specified address . * To use this operation , you must first create a custom verification email template . For more information about * creating and using custom verification email templates , see < a * href = " http : / / docs . aws . amazon . com / ses / latest / DeveloperGuide / custom - verification - emails . html " > Using Custom * Verification Email Templates < / a > in the < i > Amazon SES Developer Guide < / i > . * You can execute this operation no more than once per second . * @ param sendCustomVerificationEmailRequest * Represents a request to send a custom verification email to a specified recipient . * @ return Result of the SendCustomVerificationEmail operation returned by the service . * @ throws MessageRejectedException * Indicates that the action failed , and the message could not be sent . Check the error stack for more * information about what caused the error . * @ throws ConfigurationSetDoesNotExistException * Indicates that the configuration set does not exist . * @ throws CustomVerificationEmailTemplateDoesNotExistException * Indicates that a custom verification email template with the name you specified does not exist . * @ throws FromEmailAddressNotVerifiedException * Indicates that the sender address specified for a custom verification email is not verified , and is * therefore not eligible to send the custom verification email . * @ throws ProductionAccessNotGrantedException * Indicates that the account has not been granted production access . * @ sample AmazonSimpleEmailService . SendCustomVerificationEmail * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / email - 2010-12-01 / SendCustomVerificationEmail " * target = " _ top " > AWS API Documentation < / a > */ @ Override public SendCustomVerificationEmailResult sendCustomVerificationEmail ( SendCustomVerificationEmailRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSendCustomVerificationEmail ( request ) ;
public class AndExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case SimpleAntlrPackage . AND_EXPRESSION__LEFT : setLeft ( ( Expression ) null ) ; return ; case SimpleAntlrPackage . AND_EXPRESSION__RIGHT : setRight ( ( Expression ) null ) ; return ; } super . eUnset ( featureID ) ;
public class StartOnDemandReplicationRunRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StartOnDemandReplicationRunRequest startOnDemandReplicationRunRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( startOnDemandReplicationRunRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startOnDemandReplicationRunRequest . getReplicationJobId ( ) , REPLICATIONJOBID_BINDING ) ; protocolMarshaller . marshall ( startOnDemandReplicationRunRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JITOptions { /** * Package private method to obtain the keys stored in these * options . Also used on native side . * @ return The keys of these options */ int [ ] getKeys ( ) { } }
Set < Integer > keySet = map . keySet ( ) ; int keys [ ] = new int [ keySet . size ( ) ] ; int index = 0 ; for ( Integer key : keySet ) { keys [ index ] = key ; index ++ ; } return keys ;
public class CountCollection { /** * Gets list . * @ return the list */ public List < T > getList ( ) { } }
final ArrayList < T > list = new ArrayList < T > ( ) ; for ( final Entry < T , AtomicInteger > e : this . map . entrySet ( ) ) { for ( int i = 0 ; i < e . getValue ( ) . get ( ) ; i ++ ) { list . add ( e . getKey ( ) ) ; } } return list ;
public class WorkflowsInner { /** * Enables a workflow . * @ param resourceGroupName The resource group name . * @ param workflowName The workflow name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > enableAsync ( String resourceGroupName , String workflowName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( enableWithServiceResponseAsync ( resourceGroupName , workflowName ) , serviceCallback ) ;
public class CalendarCodeGenerator { /** * The CLDR contains 4 standard pattern types for date and time : short , medium , long and full . * This generates a switch statement to format patterns of this type . * See CLDR " dateFormats " and " timeFormats " nodes . */ private MethodSpec buildTypedPatternMethod ( String methodName , ClassName type , Format format ) { } }
MethodSpec . Builder method = MethodSpec . methodBuilder ( methodName ) . addAnnotation ( Override . class ) . addModifiers ( PUBLIC ) . addParameter ( CALENDAR_FORMAT , "type" ) . addParameter ( ZonedDateTime . class , "d" ) . addParameter ( StringBuilder . class , "b" ) ; method . beginControlFlow ( "if (type == null)" ) ; method . addStatement ( "return" ) ; method . endControlFlow ( ) ; method . beginControlFlow ( "switch (type)" , type ) ; addTypedPattern ( method , "SHORT" , format . short_ ) ; addTypedPattern ( method , "MEDIUM" , format . medium ) ; addTypedPattern ( method , "LONG" , format . long_ ) ; addTypedPattern ( method , "FULL" , format . full ) ; method . endControlFlow ( ) ; return method . build ( ) ;
public class Utils { /** * Decode the given hex string to binary and then re - encoded it as a Base64 string . * @ param hexValue String of hexadecimal characters . * @ return Decoded binary value re - encoded with Base64. * @ throws IllegalArgumentException If the given value is null or invalid . */ public static String base64FromHex ( String hexValue ) throws IllegalArgumentException { } }
byte [ ] binary = DatatypeConverter . parseHexBinary ( hexValue ) ; return base64FromBinary ( binary ) ;
public class HMap { /** * Static factory method for creating an HMap from three given associations . * @ param key1 the first mapped key * @ param value1 the value mapped at key1 * @ param key2 the second mapped key * @ param value2 the value mapped at key2 * @ param key3 the third mapped key * @ param value3 the value mapped at key3 * @ param < V1 > value1 ' s type * @ param < V2 > value2 ' s type * @ param < V3 > value3 ' s type * @ return an HMap with the given associations */ public static < V1 , V2 , V3 > HMap hMap ( TypeSafeKey < ? , V1 > key1 , V1 value1 , TypeSafeKey < ? , V2 > key2 , V2 value2 , TypeSafeKey < ? , V3 > key3 , V3 value3 ) { } }
return hMap ( key1 , value1 , key2 , value2 ) . put ( key3 , value3 ) ;
public class GenericUtils { /** * Finds the generic type for the given interface for the given class element . * For example , for < code > class AProvider implements Provider & lt ; A & gt ; < / code > * element = AProvider * interfaceName = interface javax . inject . Provider * return A * @ param element The class element * @ param interfaceName The interface * @ return The generic type or null */ protected TypeMirror interfaceGenericTypeFor ( TypeElement element , String interfaceName ) { } }
List < ? extends TypeMirror > typeMirrors = interfaceGenericTypesFor ( element , interfaceName ) ; return typeMirrors . isEmpty ( ) ? null : typeMirrors . get ( 0 ) ;
public class Matrix4d { /** * Apply a model transformation to this matrix for a right - handed coordinate system , * that aligns the local < code > + Z < / code > axis with < code > ( dirX , dirY , dirZ ) < / code > . * If < code > M < / code > is < code > this < / code > matrix and < code > L < / code > the lookat matrix , * then the new matrix will be < code > M * L < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > M * L * v < / code > , * the lookat transformation will be applied first ! * In order to set the matrix to a rotation transformation without post - multiplying it , * use { @ link # rotationTowards ( double , double , double , double , double , double ) rotationTowards ( ) } . * This method is equivalent to calling : < code > mulAffine ( new Matrix4d ( ) . lookAt ( 0 , 0 , 0 , - dirX , - dirY , - dirZ , upX , upY , upZ ) . invertAffine ( ) ) < / code > * @ see # rotateTowards ( Vector3dc , Vector3dc ) * @ see # rotationTowards ( double , double , double , double , double , double ) * @ param dirX * the x - coordinate of the direction to rotate towards * @ param dirY * the y - coordinate of the direction to rotate towards * @ param dirZ * the z - coordinate of the direction to rotate towards * @ param upX * the x - coordinate of the up vector * @ param upY * the y - coordinate of the up vector * @ param upZ * the z - coordinate of the up vector * @ return this */ public Matrix4d rotateTowards ( double dirX , double dirY , double dirZ , double upX , double upY , double upZ ) { } }
return rotateTowards ( dirX , dirY , dirZ , upX , upY , upZ , this ) ;
public class ProducerService { /** * Adds publication - specific metadata to the message . * @ param channel The message channel . * @ param message The message . * @ param recipients The message recipients . * @ return The original message . */ private Message prepare ( String channel , Message message , Recipient [ ] recipients ) { } }
message . setMetadata ( "cwf.pub.node" , nodeId ) ; message . setMetadata ( "cwf.pub.channel" , channel ) ; message . setMetadata ( "cwf.pub.event" , UUID . randomUUID ( ) . toString ( ) ) ; message . setMetadata ( "cwf.pub.when" , System . currentTimeMillis ( ) ) ; message . setMetadata ( "cwf.pub.recipients" , recipients ) ; return message ;
public class AutoFormatView { /** * Return the first view with the given content type . * @ param contentType * @ return */ public View viewByContentType ( String contentType ) { } }
for ( View view : views . values ( ) ) { if ( view . getContentType ( ) . equals ( contentType ) ) { return view ; } } return null ;
public class ImportInstanceTaskDetails { /** * The volumes . * @ return The volumes . */ public java . util . List < ImportInstanceVolumeDetailItem > getVolumes ( ) { } }
if ( volumes == null ) { volumes = new com . amazonaws . internal . SdkInternalList < ImportInstanceVolumeDetailItem > ( ) ; } return volumes ;
public class FunctionsInner { /** * Updates an existing function under an existing streaming job . This can be used to partially update ( ie . update one or two properties ) a function without affecting the rest the job or function definition . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ param functionName The name of the function . * @ param function A function object . The properties specified here will overwrite the corresponding properties in the existing function ( ie . Those properties will be updated ) . Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the FunctionInner object if successful . */ public FunctionInner update ( String resourceGroupName , String jobName , String functionName , FunctionInner function ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , jobName , functionName , function ) . toBlocking ( ) . single ( ) . body ( ) ;
public class RESTAuthHelper { /** * Ensures that the given { @ code principal } has specified { @ code permission } on the given { @ code resource } . * @ param authHeader contents of an HTTP Authorization header * @ param resource representation of the resource being accessed * @ param principal the identity of the subject accessing the resource * @ param permission the permission * @ throws AuthException if authentication or authorization fails */ public void authorize ( List < String > authHeader , String resource , Principal principal , AuthHandler . Permissions permission ) throws AuthException { } }
if ( ! isAuthorized ( authHeader , resource , principal , permission ) ) { throw new AuthorizationException ( String . format ( "Failed to authorize for resource [%s]" , resource ) , Response . Status . FORBIDDEN . getStatusCode ( ) ) ; }
public class NumberFormatterDemo { /** * Format numbers in this locale for several currencies . */ private static void money ( CLDR . Locale locale , CLDR . Currency [ ] currencies , String [ ] numbers , CurrencyFormatOptions opts ) { } }
for ( CLDR . Currency currency : currencies ) { System . out . println ( "Currency " + currency ) ; for ( String num : numbers ) { BigDecimal n = new BigDecimal ( num ) ; NumberFormatter fmt = CLDR . get ( ) . getNumberFormatter ( locale ) ; StringBuilder buf = new StringBuilder ( " " ) ; fmt . formatCurrency ( n , currency , buf , opts ) ; System . out . println ( buf . toString ( ) ) ; } System . out . println ( ) ; } System . out . println ( ) ;
public class JobUploader { /** * Creates the Job folder on the DFS . * @ param applicationId * @ return a reference to the JobFolder that can be used to upload files to it . * @ throws IOException */ public JobFolder createJobFolderWithApplicationId ( final String applicationId ) throws IOException { } }
final Path jobFolderPath = jobSubmissionDirectoryProvider . getJobSubmissionDirectoryPath ( applicationId ) ; final String finalJobFolderPath = jobFolderPath . toString ( ) ; LOG . log ( Level . FINE , "Final job submission Directory: " + finalJobFolderPath ) ; return createJobFolder ( finalJobFolderPath ) ;
public class JerseyClientBuilder { /** * Uses the given { @ link ExecutorService } and { @ link ObjectMapper } . * @ param executorService a thread pool * @ param objectMapper an object mapper * @ return { @ code this } * @ see # using ( io . dropwizard . setup . Environment ) */ public JerseyClientBuilder using ( ExecutorService executorService , ObjectMapper objectMapper ) { } }
this . executorService = executorService ; this . objectMapper = objectMapper ; return this ;
public class DeferredLintHandler { /** * Invoke all { @ link LintLogger } s that were associated with the provided { @ code pos } . */ public void flush ( DiagnosticPosition pos ) { } }
ListBuffer < LintLogger > loggers = loggersQueue . get ( pos ) ; if ( loggers != null ) { for ( LintLogger lintLogger : loggers ) { lintLogger . report ( ) ; } loggersQueue . remove ( pos ) ; }
public class Table { /** * Open the table , flush all rows from start , but do not freeze the table * @ param util a XMLUtil instance for writing XML * @ param appendable where to write * @ throws IOException if an I / O error occurs during the flush */ public void flushAllAvailableRows ( final XMLUtil util , final Appendable appendable ) throws IOException { } }
this . appender . flushAllAvailableRows ( util , appendable ) ;
public class DescribeVpcEndpointServicesResult { /** * A list of supported services . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setServiceNames ( java . util . Collection ) } or { @ link # withServiceNames ( java . util . Collection ) } if you want to * override the existing values . * @ param serviceNames * A list of supported services . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeVpcEndpointServicesResult withServiceNames ( String ... serviceNames ) { } }
if ( this . serviceNames == null ) { setServiceNames ( new com . amazonaws . internal . SdkInternalList < String > ( serviceNames . length ) ) ; } for ( String ele : serviceNames ) { this . serviceNames . add ( ele ) ; } return this ;
public class AlgorithmValidationSpecification { /** * An array of < code > AlgorithmValidationProfile < / code > objects , each of which specifies a training job and batch * transform job that Amazon SageMaker runs to validate your algorithm . * @ param validationProfiles * An array of < code > AlgorithmValidationProfile < / code > objects , each of which specifies a training job and * batch transform job that Amazon SageMaker runs to validate your algorithm . */ public void setValidationProfiles ( java . util . Collection < AlgorithmValidationProfile > validationProfiles ) { } }
if ( validationProfiles == null ) { this . validationProfiles = null ; return ; } this . validationProfiles = new java . util . ArrayList < AlgorithmValidationProfile > ( validationProfiles ) ;
public class JdbcCpoXaAdapter { /** * Iterates through a collection of Objects , creates and stores them in the datasource . The assumption is that the * objects contained in the collection do not exist in the datasource . * This method creates and stores the objects in the datasource . The objects in the collection will be treated as one * transaction , assuming the datasource supports transactions . * This means that if one of the objects fail being created in the datasource then the CpoAdapter will stop processing * the remainder of the collection and rollback all the objects created thus far . Rollback is on the underlying * datasource ' s support of rollback . * < pre > Example : * < code > * class SomeObject so = null ; * class CpoAdapter cpo = null ; * try { * cpo = new JdbcCpoAdapter ( new JdbcDataSourceInfo ( driver , url , user , password , 1,1 , false ) ) ; * } catch ( CpoException ce ) { * / / Handle the error * cpo = null ; * if ( cpo ! = null ) { * ArrayList al = new ArrayList ( ) ; * for ( int i = 0 ; i < 3 ; i + + ) { * so = new SomeObject ( ) ; * so . setId ( 1 ) ; * so . setName ( " SomeName " ) ; * al . add ( so ) ; * try { * cpo . insertObjects ( al ) ; * } catch ( CpoException ce ) { * / / Handle the error * < / code > * < / pre > * @ param coll This is a collection of objects that have been defined within the metadata of the datasource . If the * class is not defined an exception will be thrown . * @ return The number of objects created in the datasource * @ throws CpoException Thrown if there are errors accessing the datasource */ @ Override public < T > long insertObjects ( Collection < T > coll ) throws CpoException { } }
return getCurrentResource ( ) . insertObjects ( coll ) ;
public class DefaultOpenAPIModelFilter { /** * { @ inheritDoc } */ @ Override public Link visitLink ( Context context , String key , Link link ) { } }
return link ;
public class Sheet { /** * Adds eval scripts to update the bad data array in the sheet to render validation failures produced by the most recent ajax update attempt . * @ param context the FacesContext */ public void renderBadUpdateScript ( final FacesContext context ) { } }
final String widgetVar = resolveWidgetVar ( ) ; final String invalidValue = getInvalidDataValue ( ) ; StringBuilder sb = new StringBuilder ( "PF('" + widgetVar + "')" ) ; sb . append ( ".cfg.errors=" ) ; sb . append ( invalidValue ) ; sb . append ( ";" ) ; sb . append ( "PF('" + widgetVar + "')" ) ; sb . append ( ".ht.render();" ) ; PrimeFaces . current ( ) . executeScript ( sb . toString ( ) ) ; sb = new StringBuilder ( ) ; sb . append ( "PF('" ) . append ( widgetVar ) . append ( "')" ) ; sb . append ( ".sheetDiv.removeClass('ui-state-error')" ) ; if ( ! getInvalidUpdates ( ) . isEmpty ( ) ) { sb . append ( ".addClass('ui-state-error')" ) ; } PrimeFaces . current ( ) . executeScript ( sb . toString ( ) ) ;
public class Preconditions { /** * An { @ code int } specialized version of { @ link # checkPrecondition ( Object , * boolean , Function ) } . * @ param value The value * @ param condition The predicate * @ param describer The describer for the predicate * @ return value * @ throws PreconditionViolationException If the predicate is false */ public static int checkPreconditionI ( final int value , final boolean condition , final IntFunction < String > describer ) { } }
return innerCheckI ( value , condition , describer ) ;
public class InferenceSpecificationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InferenceSpecification inferenceSpecification , ProtocolMarshaller protocolMarshaller ) { } }
if ( inferenceSpecification == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inferenceSpecification . getContainers ( ) , CONTAINERS_BINDING ) ; protocolMarshaller . marshall ( inferenceSpecification . getSupportedTransformInstanceTypes ( ) , SUPPORTEDTRANSFORMINSTANCETYPES_BINDING ) ; protocolMarshaller . marshall ( inferenceSpecification . getSupportedRealtimeInferenceInstanceTypes ( ) , SUPPORTEDREALTIMEINFERENCEINSTANCETYPES_BINDING ) ; protocolMarshaller . marshall ( inferenceSpecification . getSupportedContentTypes ( ) , SUPPORTEDCONTENTTYPES_BINDING ) ; protocolMarshaller . marshall ( inferenceSpecification . getSupportedResponseMIMETypes ( ) , SUPPORTEDRESPONSEMIMETYPES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PropertiesPropertyResolver { /** * Resolves a property based on it ' s name . * @ param propertyName property name to be resolved * @ return value of property or null if property is not set or is empty . */ public String findProperty ( final String propertyName ) { } }
String value = null ; if ( m_properties != null ) { value = m_properties . getProperty ( propertyName ) ; } return value ;
public class DeleteMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Delete delete , ProtocolMarshaller protocolMarshaller ) { } }
if ( delete == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( delete . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( delete . getTableName ( ) , TABLENAME_BINDING ) ; protocolMarshaller . marshall ( delete . getConditionExpression ( ) , CONDITIONEXPRESSION_BINDING ) ; protocolMarshaller . marshall ( delete . getExpressionAttributeNames ( ) , EXPRESSIONATTRIBUTENAMES_BINDING ) ; protocolMarshaller . marshall ( delete . getExpressionAttributeValues ( ) , EXPRESSIONATTRIBUTEVALUES_BINDING ) ; protocolMarshaller . marshall ( delete . getReturnValuesOnConditionCheckFailure ( ) , RETURNVALUESONCONDITIONCHECKFAILURE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }