signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SQLDroidDatabaseMetaData { /** * Applies SQL escapes for special characters in a given string .
* @ param val The string to escape .
* @ return The SQL escaped string . */
private String escape ( final String val ) { } } | // TODO : this function is ugly , pass this work off to SQLite , then we
// don ' t have to worry about Unicode 4 , other characters needing
// escaping , etc .
int len = val . length ( ) ; StringBuilder buf = new StringBuilder ( len ) ; for ( int i = 0 ; i < len ; i ++ ) { if ( val . charAt ( i ) == '\'' ) { buf . append ( '\'' ) ; } buf . append ( val . charAt ( i ) ) ; } return buf . toString ( ) ; |
public class ItemGroupMixIn { /** * Copies an existing { @ link TopLevelItem } to a new name . */
@ SuppressWarnings ( { } } | "unchecked" } ) public synchronized < T extends TopLevelItem > T copy ( T src , String name ) throws IOException { acl . checkPermission ( Item . CREATE ) ; src . checkPermission ( Item . EXTENDED_READ ) ; XmlFile srcConfigFile = Items . getConfigFile ( src ) ; if ( ! src . hasPermission ( Item . CONFIGURE ) ) { Matcher matcher = AbstractItem . SECRET_PATTERN . matcher ( srcConfigFile . asString ( ) ) ; while ( matcher . find ( ) ) { if ( Secret . decrypt ( matcher . group ( 1 ) ) != null ) { // AccessDeniedException2 does not permit a custom message , and anyway redirecting the user to the login screen is obviously pointless .
throw new AccessDeniedException ( Messages . ItemGroupMixIn_may_not_copy_as_it_contains_secrets_and_ ( src . getFullName ( ) , Jenkins . getAuthentication ( ) . getName ( ) , Item . PERMISSIONS . title , Item . EXTENDED_READ . name , Item . CONFIGURE . name ) ) ; } } } src . getDescriptor ( ) . checkApplicableIn ( parent ) ; acl . getACL ( ) . checkCreatePermission ( parent , src . getDescriptor ( ) ) ; ItemListener . checkBeforeCopy ( src , parent ) ; T result = ( T ) createProject ( src . getDescriptor ( ) , name , false ) ; // copy config
Files . copy ( Util . fileToPath ( srcConfigFile . getFile ( ) ) , Util . fileToPath ( Items . getConfigFile ( result ) . getFile ( ) ) , StandardCopyOption . COPY_ATTRIBUTES , StandardCopyOption . REPLACE_EXISTING ) ; // reload from the new config
final File rootDir = result . getRootDir ( ) ; result = Items . whileUpdatingByXml ( new NotReallyRoleSensitiveCallable < T , IOException > ( ) { @ Override public T call ( ) throws IOException { return ( T ) Items . load ( parent , rootDir ) ; } } ) ; result . onCopiedFrom ( src ) ; add ( result ) ; ItemListener . fireOnCopied ( src , result ) ; Jenkins . getInstance ( ) . rebuildDependencyGraphAsync ( ) ; return result ; |
public class PuiInputTextareaRenderer { /** * Generates the HTML code . */
@ Override public void encodeBegin ( FacesContext context , UIComponent component ) throws IOException { } } | ResponseWriter writer = context . getResponseWriter ( ) ; PuiTextarea input = ( PuiTextarea ) component ; writer . startElement ( "pui-textarea" , component ) ; renderNonEmptyAttribute ( writer , "label" , input . getLabel ( ) ) ; renderNGModel ( input , writer ) ; renderJSR303Constraints ( writer , input ) ; writer . endElement ( "pui-textarea" ) ; |
public class SIB { /** * Initialize clusters with KMeans + + algorithm . */
private static int [ ] seed ( SparseDataset data , int k ) { } } | int n = data . size ( ) ; int [ ] y = new int [ n ] ; SparseArray centroid = data . get ( Math . randomInt ( n ) ) . x ; double [ ] D = new double [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { D [ i ] = Double . MAX_VALUE ; } // pick the next center
for ( int i = 1 ; i < k ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { double dist = Math . JensenShannonDivergence ( data . get ( j ) . x , centroid ) ; if ( dist < D [ j ] ) { D [ j ] = dist ; y [ j ] = i - 1 ; } } double cutoff = Math . random ( ) * Math . sum ( D ) ; double cost = 0.0 ; int index = 0 ; for ( ; index < n ; index ++ ) { cost += D [ index ] ; if ( cost >= cutoff ) { break ; } } centroid = data . get ( index ) . x ; } for ( int j = 0 ; j < n ; j ++ ) { // compute the distance between this sample and the current center
double dist = Math . JensenShannonDivergence ( data . get ( j ) . x , centroid ) ; if ( dist < D [ j ] ) { D [ j ] = dist ; y [ j ] = k - 1 ; } } return y ; |
public class ServiceConfigSupplier { /** * Create a { @ link ServiceConfigSupplier } instance .
* @ return a { @ code ServiceConfigSuppler } */
public static ServiceConfigSupplier create ( ) { } } | NetHttpTransport httpTransport = new NetHttpTransport ( ) ; JacksonFactory jsonFactory = new JacksonFactory ( ) ; GoogleCredential credential ; try { credential = GoogleCredential . getApplicationDefault ( httpTransport , jsonFactory ) . createScoped ( SCOPES ) ; } catch ( IOException e ) { throw new IllegalStateException ( "could not get credentials for fetching service config!" ) ; } return new ServiceConfigSupplier ( new SystemEnvironment ( ) , httpTransport , jsonFactory , credential ) ; |
public class Node { /** * Returns whether a node matches a simple or a qualified name , such as
* < code > x < / code > or < code > a . b . c < / code > or < code > this . a < / code > . */
private boolean matchesQualifiedName ( String qname , int endIndex ) { } } | int start = qname . lastIndexOf ( '.' , endIndex - 1 ) + 1 ; switch ( this . getToken ( ) ) { case NAME : String name = getString ( ) ; return start == 0 && ! name . isEmpty ( ) && name . length ( ) == endIndex && qname . startsWith ( name ) ; case THIS : return start == 0 && 4 == endIndex && qname . startsWith ( "this" ) ; case SUPER : return start == 0 && 5 == endIndex && qname . startsWith ( "super" ) ; case GETPROP : String prop = getLastChild ( ) . getString ( ) ; return start > 1 && prop . length ( ) == endIndex - start && prop . regionMatches ( 0 , qname , start , endIndex - start ) && getFirstChild ( ) . matchesQualifiedName ( qname , start - 1 ) ; case MEMBER_FUNCTION_DEF : // These are explicitly * not * qualified name components .
default : return false ; } |
public class XTreeHeader { /** * Initializes this header from the specified file . Reads the integer values
* < code > version < / code > , < code > pageSize < / code > , { @ link # dirCapacity } ,
* { @ link # leafCapacity } , { @ link # dirMinimum } , and { @ link # leafMinimum } , as
* well as the minimum fanout { @ link # min _ fanout } , the tree ' s dimension
* { @ link # dimensionality } , the maximum overlap
* { @ link # max _ overlap } and the supernode offset { @ link # supernode _ offset }
* from the file . */
@ Override public void readHeader ( RandomAccessFile file ) throws IOException { } } | super . readHeader ( file ) ; this . min_fanout = file . readInt ( ) ; this . num_elements = file . readLong ( ) ; this . dimensionality = file . readInt ( ) ; this . max_overlap = file . readFloat ( ) ; this . supernode_offset = file . readLong ( ) ; |
public class NativeGenerator { /** * # string _ id _ map # */
@ Override protected int findPrototypeId ( String s ) { } } | int id ; // # generated # Last update : 2007-06-14 13:13:03 EDT
L0 : { id = 0 ; String X = null ; int c ; int s_length = s . length ( ) ; if ( s_length == 4 ) { c = s . charAt ( 0 ) ; if ( c == 'n' ) { X = "next" ; id = Id_next ; } else if ( c == 's' ) { X = "send" ; id = Id_send ; } } else if ( s_length == 5 ) { c = s . charAt ( 0 ) ; if ( c == 'c' ) { X = "close" ; id = Id_close ; } else if ( c == 't' ) { X = "throw" ; id = Id_throw ; } } else if ( s_length == 12 ) { X = "__iterator__" ; id = Id___iterator__ ; } if ( X != null && X != s && ! X . equals ( s ) ) id = 0 ; break L0 ; } // # / generated #
return id ; |
public class InternalSARLParser { /** * InternalSARL . g : 11457:1 : ruleRichStringElseIf returns [ EObject current = null ] : ( otherlv _ 0 = ' ELSEIF ' ( ( lv _ if _ 1_0 = ruleXExpression ) ) ( ( lv _ then _ 2_0 = ruleInternalRichString ) ) ) ; */
public final EObject ruleRichStringElseIf ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_0 = null ; EObject lv_if_1_0 = null ; EObject lv_then_2_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 11463:2 : ( ( otherlv _ 0 = ' ELSEIF ' ( ( lv _ if _ 1_0 = ruleXExpression ) ) ( ( lv _ then _ 2_0 = ruleInternalRichString ) ) ) )
// InternalSARL . g : 11464:2 : ( otherlv _ 0 = ' ELSEIF ' ( ( lv _ if _ 1_0 = ruleXExpression ) ) ( ( lv _ then _ 2_0 = ruleInternalRichString ) ) )
{ // InternalSARL . g : 11464:2 : ( otherlv _ 0 = ' ELSEIF ' ( ( lv _ if _ 1_0 = ruleXExpression ) ) ( ( lv _ then _ 2_0 = ruleInternalRichString ) ) )
// InternalSARL . g : 11465:3 : otherlv _ 0 = ' ELSEIF ' ( ( lv _ if _ 1_0 = ruleXExpression ) ) ( ( lv _ then _ 2_0 = ruleInternalRichString ) )
{ otherlv_0 = ( Token ) match ( input , 104 , FOLLOW_45 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_0 , grammarAccess . getRichStringElseIfAccess ( ) . getELSEIFKeyword_0 ( ) ) ; } // InternalSARL . g : 11469:3 : ( ( lv _ if _ 1_0 = ruleXExpression ) )
// InternalSARL . g : 11470:4 : ( lv _ if _ 1_0 = ruleXExpression )
{ // InternalSARL . g : 11470:4 : ( lv _ if _ 1_0 = ruleXExpression )
// InternalSARL . g : 11471:5 : lv _ if _ 1_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getRichStringElseIfAccess ( ) . getIfXExpressionParserRuleCall_1_0 ( ) ) ; } pushFollow ( FOLLOW_104 ) ; lv_if_1_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getRichStringElseIfRule ( ) ) ; } set ( current , "if" , lv_if_1_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 11488:3 : ( ( lv _ then _ 2_0 = ruleInternalRichString ) )
// InternalSARL . g : 11489:4 : ( lv _ then _ 2_0 = ruleInternalRichString )
{ // InternalSARL . g : 11489:4 : ( lv _ then _ 2_0 = ruleInternalRichString )
// InternalSARL . g : 11490:5 : lv _ then _ 2_0 = ruleInternalRichString
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getRichStringElseIfAccess ( ) . getThenInternalRichStringParserRuleCall_2_0 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; lv_then_2_0 = ruleInternalRichString ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getRichStringElseIfRule ( ) ) ; } set ( current , "then" , lv_then_2_0 , "org.eclipse.xtend.core.Xtend.InternalRichString" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class HttpHeaders { /** * Returns the subset of the headers in { @ code requestHeaders } that impact the content of
* response ' s body . */
public static Headers varyHeaders ( Headers requestHeaders , Headers responseHeaders ) { } } | Set < String > varyFields = varyFields ( responseHeaders ) ; if ( varyFields . isEmpty ( ) ) return EMPTY_HEADERS ; Headers . Builder result = new Headers . Builder ( ) ; for ( int i = 0 , size = requestHeaders . size ( ) ; i < size ; i ++ ) { String fieldName = requestHeaders . name ( i ) ; if ( varyFields . contains ( fieldName ) ) { result . add ( fieldName , requestHeaders . value ( i ) ) ; } } return result . build ( ) ; |
public class ProteinNameLister { /** * Here is a more elegant way of doing the previous method !
* @ param model BioPAX object Model */
public static void listUnificationXrefsPerPathway ( Model model ) { } } | // This is a visitor for elements in a pathway - direct and indirect
Visitor visitor = new Visitor ( ) { public void visit ( BioPAXElement domain , Object range , Model model , PropertyEditor editor ) { if ( range instanceof physicalEntity ) { // Do whatever you want with the pe and xref here
physicalEntity pe = ( physicalEntity ) range ; ClassFilterSet < xref , unificationXref > unis = new ClassFilterSet < xref , unificationXref > ( pe . getXREF ( ) , unificationXref . class ) ; for ( unificationXref uni : unis ) { System . out . println ( "pe.getNAME() = " + pe . getNAME ( ) ) ; System . out . println ( "uni = " + uni . getID ( ) ) ; } } } } ; Traverser traverser = new Traverser ( SimpleEditorMap . L2 , visitor ) ; Set < pathway > pathways = model . getObjects ( pathway . class ) ; for ( pathway pathway : pathways ) { traverser . traverse ( pathway , model ) ; } |
public class OidcAuthorizationRequestSupport { /** * Is cas authentication old for max age authorization request ?
* @ param context the context
* @ param profile the profile
* @ return true / false */
public static boolean isCasAuthenticationOldForMaxAgeAuthorizationRequest ( final WebContext context , final UserProfile profile ) { } } | val authTime = profile . getAttribute ( CasProtocolConstants . VALIDATION_CAS_MODEL_ATTRIBUTE_NAME_AUTHENTICATION_DATE ) ; if ( authTime == null ) { return false ; } val dt = ZonedDateTime . parse ( authTime . toString ( ) ) ; return isCasAuthenticationOldForMaxAgeAuthorizationRequest ( context , dt ) ; |
public class MultiNote { /** * Returns the strict durations composing this multi notes .
* @ return The strict durations composing this multi notes . The array is sorted
* from shortest durations to longest ones . */
public short [ ] getStrictDurations ( ) { } } | Vector durations = new Vector ( ) ; short currentDuration = 0 ; for ( int i = 0 ; i < m_notes . size ( ) ; i ++ ) { currentDuration = ( ( Note ) ( m_notes . elementAt ( i ) ) ) . getStrictDuration ( ) ; if ( durations . indexOf ( new Short ( currentDuration ) ) == - 1 ) durations . addElement ( currentDuration ) ; } if ( durations . size ( ) == 0 ) return null ; else { // sort the durations
Vector sortedDurations = new Vector ( ) ; for ( int i = 0 ; i < durations . size ( ) ; i ++ ) { int j = 0 ; while ( j < sortedDurations . size ( ) && ( Short ) sortedDurations . elementAt ( j ) < ( ( Short ) durations . elementAt ( i ) ) ) j ++ ; sortedDurations . insertElementAt ( durations . elementAt ( i ) , j ) ; } short [ ] durationsAsArray = new short [ sortedDurations . size ( ) ] ; for ( int i = 0 ; i < sortedDurations . size ( ) ; i ++ ) durationsAsArray [ i ] = ( Short ) sortedDurations . elementAt ( i ) ; return durationsAsArray ; } |
public class CreateJobRequest { /** * If you specify a preset in < code > PresetId < / code > for which the value of < code > Container < / code > is fmp4
* ( Fragmented MP4 ) or ts ( MPEG - TS ) , Playlists contains information about the master playlists that you want Elastic
* Transcoder to create .
* The maximum number of master playlists in a job is 30.
* @ return If you specify a preset in < code > PresetId < / code > for which the value of < code > Container < / code > is fmp4
* ( Fragmented MP4 ) or ts ( MPEG - TS ) , Playlists contains information about the master playlists that you want
* Elastic Transcoder to create . < / p >
* The maximum number of master playlists in a job is 30. */
public java . util . List < CreateJobPlaylist > getPlaylists ( ) { } } | if ( playlists == null ) { playlists = new com . amazonaws . internal . SdkInternalList < CreateJobPlaylist > ( ) ; } return playlists ; |
public class FirestoreAdminClient { /** * Exports a copy of all or a subset of documents from Google Cloud Firestore to another storage
* system , such as Google Cloud Storage . Recent updates to documents may not be reflected in the
* export . The export occurs in the background and its progress can be monitored and managed via
* the Operation resource that is created . The output of an export may only be used once the
* associated operation is done . If an export operation is cancelled before completion it may
* leave partial data behind in Google Cloud Storage .
* < p > Sample code :
* < pre > < code >
* try ( FirestoreAdminClient firestoreAdminClient = FirestoreAdminClient . create ( ) ) {
* DatabaseName name = DatabaseName . of ( " [ PROJECT ] " , " [ DATABASE ] " ) ;
* Operation response = firestoreAdminClient . exportDocuments ( name ) ;
* < / code > < / pre >
* @ param name Database to export . Should be of the form :
* ` projects / { project _ id } / databases / { database _ id } ` .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Operation exportDocuments ( DatabaseName name ) { } } | ExportDocumentsRequest request = ExportDocumentsRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return exportDocuments ( request ) ; |
public class BloatedAssignmentScope { /** * processes a static call or initializer by checking to see if the call is risky , and returning a OpcodeStack item user value saying so .
* @ return the user object to place on the OpcodeStack */
@ Nullable private UserObject sawStaticCall ( ) { } } | if ( getSigConstantOperand ( ) . endsWith ( Values . SIG_VOID ) ) { return null ; } return new UserObject ( isRiskyMethodCall ( ) ) ; |
public class JspWriterImpl { /** * Write a single character . */
public void write ( int c ) throws IOException { } } | // defect 312981 begin
// ensureOpen ( ) ;
if ( closed ) { throw new IOException ( "Stream closed" ) ; } // defect 312981 end
if ( bufferSize == 0 ) { initOut ( ) ; out . write ( c ) ; } else { if ( nextChar >= bufferSize ) if ( autoFlush ) flushBuffer ( ) ; else bufferOverflow ( ) ; cb [ nextChar ++ ] = ( char ) c ; } |
public class AbstractParser { /** * This method differs from { @ link # getBoolean ( String , JSONObject ) } in that
* the value is not be the standard JSON true / false but rather the string
* yes / no .
* @ param key name of the field to fetch from the json object
* @ param jsonObject object from which to fetch the value
* @ return boolean value corresponding to the key or false */
private Boolean getNonStandardBoolean ( final String key , final JSONObject jsonObject ) { } } | Boolean value = false ; try { String stringValue = jsonObject . getString ( key ) ; LOGGER . debug ( "got value: {} for key {}" , stringValue , key ) ; if ( isTruthy ( stringValue ) ) { LOGGER . debug ( "value is truthy" ) ; value = true ; } } catch ( JSONException e ) { LOGGER . error ( "Could not get boolean (not even truthy) from JSONObject for key: " + key , e ) ; } return value ; |
public class TableInformation { /** * Appends the column information for given values for column
* < code > _ colName < / code > .
* @ param _ colName name of the column
* @ param _ colTypes eFaps types of the column
* @ param _ size size ( for character )
* @ param _ scale scale ( for number )
* @ param _ isNullable is the column nullable ?
* @ see # colMap */
public void addColInfo ( final String _colName , final Set < AbstractDatabase . ColumnType > _colTypes , final int _size , final int _scale , final boolean _isNullable ) { } } | this . colMap . put ( _colName . toUpperCase ( ) , new ColumnInformation ( _colName , _colTypes , _size , _scale , _isNullable ) ) ; |
public class MethodSimulator { /** * Merges the { @ code returnElement } with the given element which was popped from the stack .
* If the { @ code returnElement } existed before , the values are merged .
* @ param stackElement The popped element */
private void mergeReturnElement ( final Element stackElement ) { } } | if ( returnElement != null ) stackElement . merge ( returnElement ) ; returnElement = stackElement ; |
public class MOEADD { /** * Calculate the dot product of two vectors */
public double innerproduct ( double [ ] vec1 , double [ ] vec2 ) { } } | double sum = 0 ; for ( int i = 0 ; i < vec1 . length ; i ++ ) { sum += vec1 [ i ] * vec2 [ i ] ; } return sum ; |
public class ClassLoaders { /** * Loads the class with the given class name with the given classloader . If it is { @ code null } ,
* load the class with the context ClassLoader of current thread if it presents , otherwise load the class
* with the ClassLoader of the caller object .
* @ param className Name of the class to load .
* @ param classLoader Classloader for loading the class . It could be { @ code null } .
* @ param caller The object who call this method .
* @ return The loaded class .
* @ throws ClassNotFoundException If failed to load the given class . */
public static Class < ? > loadClass ( String className , @ Nullable ClassLoader classLoader , Object caller ) throws ClassNotFoundException { } } | ClassLoader cl = Objects . firstNonNull ( classLoader , Objects . firstNonNull ( Thread . currentThread ( ) . getContextClassLoader ( ) , caller . getClass ( ) . getClassLoader ( ) ) ) ; return cl . loadClass ( className ) ; |
public class Executor { /** * Returns the current { @ link WorkUnit } ( of { @ link # getCurrentExecutable ( ) the current executable } )
* that this executor is running .
* @ return
* null if the executor is idle . */
@ CheckForNull public WorkUnit getCurrentWorkUnit ( ) { } } | lock . readLock ( ) . lock ( ) ; try { return workUnit ; } finally { lock . readLock ( ) . unlock ( ) ; } |
public class MatrixVectorWriter { /** * Prints an array to the underlying stream . One entry per line . */
public void printArray ( double [ ] data ) { } } | for ( int i = 0 ; i < data . length ; ++ i ) format ( Locale . ENGLISH , "% .12e%n" , data [ i ] ) ; |
public class Counters { /** * Return the L1 norm of a counter . < i > Implementation note : < / i > The method
* name favors legibility of the L over the convention of using lowercase
* names for methods .
* @ param c
* The Counter
* @ return Its length */
public static < E , C extends Counter < E > > double L1Norm ( C c ) { } } | double sumAbs = 0.0 ; for ( E key : c . keySet ( ) ) { double count = c . getCount ( key ) ; if ( count != 0.0 ) { sumAbs += Math . abs ( count ) ; } } return sumAbs ; |
public class TileDaoUtils { /** * Get the closest zoom level for the provided width and height in the
* default units
* @ param widths
* sorted widths
* @ param heights
* sorted heights
* @ param tileMatrices
* tile matrices
* @ param width
* in default units
* @ param height
* in default units
* @ return tile matrix zoom level
* @ since 1.2.1 */
public static Long getClosestZoomLevel ( double [ ] widths , double [ ] heights , List < TileMatrix > tileMatrices , double width , double height ) { } } | return getZoomLevel ( widths , heights , tileMatrices , width , height , false ) ; |
public class DefaultAccess { /** * Initializes the module .
* @ throws ModuleInitializationException
* If the module cannot be initialized . */
@ Override public void initModule ( ) throws ModuleInitializationException { } } | String dsMediation = getParameter ( "doMediateDatastreams" ) ; if ( dsMediation == null ) { throw new ModuleInitializationException ( "doMediateDatastreams parameter must be specified." , getRole ( ) ) ; } |
public class DOMReader { /** * Parses an XML document and passes it on to the given handler , or to
* a lambda expression for handling .
* @ param xml
* the URL of the XML to parse , as a string .
* @ param xsd
* the URL of the XML ' s schema definition , as a string .
* @ param handler
* the handler that will handle the document .
* @ param validate
* whether validation against the given schema should be performed .
* @ throws IOException
* @ throws InvalidArgumentException
* @ throws SAXException
* @ throws ParserConfigurationException
* @ throws Exception
* any handler - related exceptions . */
public static void loadDocument ( String xml , String xsd , DOMHandler handler , boolean validate ) throws IOException , InvalidArgumentException , SAXException , ParserConfigurationException , Exception { } } | URL xmlUrl = URLFactory . makeURL ( xml ) ; URL xsdUrl = URLFactory . makeURL ( xsd ) ; loadDocument ( xmlUrl , xsdUrl , handler , validate ) ; |
public class GetCrawlerMetricsResult { /** * A list of metrics for the specified crawler .
* @ param crawlerMetricsList
* A list of metrics for the specified crawler . */
public void setCrawlerMetricsList ( java . util . Collection < CrawlerMetrics > crawlerMetricsList ) { } } | if ( crawlerMetricsList == null ) { this . crawlerMetricsList = null ; return ; } this . crawlerMetricsList = new java . util . ArrayList < CrawlerMetrics > ( crawlerMetricsList ) ; |
public class Main { /** * Scan the arguments to find an option that specifies a directory .
* Create the directory if necessary . */
private static File findDirectoryOption ( String [ ] args , String option , String name , boolean needed , boolean allow_dups , boolean create ) throws ProblemException , ProblemException { } } | File dir = null ; for ( int i = 0 ; i < args . length ; ++ i ) { if ( args [ i ] . equals ( option ) ) { if ( dir != null ) { throw new ProblemException ( "You have already specified the " + name + " dir!" ) ; } if ( i + 1 >= args . length ) { throw new ProblemException ( "You have to specify a directory following " + option + "." ) ; } if ( args [ i + 1 ] . indexOf ( File . pathSeparatorChar ) != - 1 ) { throw new ProblemException ( "You must only specify a single directory for " + option + "." ) ; } dir = new File ( args [ i + 1 ] ) ; if ( ! dir . exists ( ) ) { if ( ! create ) { throw new ProblemException ( "This directory does not exist: " + dir . getPath ( ) ) ; } else if ( ! makeSureExists ( dir ) ) { throw new ProblemException ( "Cannot create directory " + dir . getPath ( ) ) ; } } if ( ! dir . isDirectory ( ) ) { throw new ProblemException ( "\"" + args [ i + 1 ] + "\" is not a directory." ) ; } } } if ( dir == null && needed ) { throw new ProblemException ( "You have to specify " + option ) ; } try { if ( dir != null ) return dir . getCanonicalFile ( ) ; } catch ( IOException e ) { throw new ProblemException ( "" + e ) ; } return null ; |
public class HtmlSelectManyCheckbox { /** * < p > Set the value of the < code > unselectedClass < / code > property . < / p > */
public void setUnselectedClass ( java . lang . String unselectedClass ) { } } | getStateHelper ( ) . put ( PropertyKeys . unselectedClass , unselectedClass ) ; |
public class BaseModelGenerator { /** * base model 覆盖写入 */
protected void writeToFile ( TableMeta tableMeta ) throws IOException { } } | File dir = new File ( baseModelOutputDir ) ; if ( ! dir . exists ( ) ) { dir . mkdirs ( ) ; } String target = baseModelOutputDir + File . separator + tableMeta . baseModelName + ".java" ; OutputStreamWriter osw = null ; try { osw = new OutputStreamWriter ( new FileOutputStream ( target ) , "UTF-8" ) ; osw . write ( tableMeta . baseModelContent ) ; } finally { if ( osw != null ) { osw . close ( ) ; } } |
public class LocalSession { /** * Delete a queue
* @ param queueName
* @ throws JMSException */
protected final void deleteQueue ( String queueName ) throws JMSException { } } | transactionSet . removeUpdatesForQueue ( queueName ) ; engine . deleteQueue ( queueName ) ; |
public class CentralDogmaBeanFactory { /** * Returns a newly - created bean instance with the settings specified by { @ link CentralDogmaBean } annotation .
* @ param defaultValue a Java bean annotated with { @ link CentralDogmaBean } . The default value is used before
* initialization .
* @ param beanType the type of { @ code bean }
* @ return a new Java bean whose getters return the latest known values mirrored from Central Dogma */
public < T > T get ( T defaultValue , Class < T > beanType ) { } } | return get ( defaultValue , beanType , ( T x ) -> { } , CentralDogmaBeanConfig . EMPTY ) ; |
public class PipelineInterpreter { /** * < msgid , stream > that should not be run again ( which prevents re - running pipelines over and over again ) */
private boolean updateStreamBlacklist ( Set < Tuple2 < String , String > > processingBlacklist , Message message , Set < String > initialStreamIds ) { } } | boolean addedStreams = false ; for ( Stream stream : message . getStreams ( ) ) { if ( ! initialStreamIds . remove ( stream . getId ( ) ) ) { addedStreams = true ; } else { // only add pre - existing streams to blacklist , this has the effect of only adding already processed streams ,
// not newly added ones .
processingBlacklist . add ( tuple ( message . getId ( ) , stream . getId ( ) ) ) ; } } return addedStreams ; |
public class DualCache { /** * Return the object of the corresponding key from the cache . In no object is available ,
* return null .
* @ param key is the key of the object .
* @ return the object of the corresponding key from the cache . In no object is available ,
* return null . */
public T get ( String key ) { } } | Object ramResult = null ; String diskResult = null ; DiskLruCache . Snapshot snapshotObject = null ; // Try to get the object from RAM .
boolean isRamSerialized = ramMode . equals ( DualCacheRamMode . ENABLE_WITH_SPECIFIC_SERIALIZER ) ; boolean isRamReferenced = ramMode . equals ( DualCacheRamMode . ENABLE_WITH_REFERENCE ) ; if ( isRamSerialized || isRamReferenced ) { ramResult = ramCacheLru . get ( key ) ; } if ( ramResult == null ) { // Try to get the cached object from disk .
loggerHelper . logEntryForKeyIsNotInRam ( key ) ; if ( diskMode . equals ( DualCacheDiskMode . ENABLE_WITH_SPECIFIC_SERIALIZER ) ) { try { dualCacheLock . lockDiskEntryWrite ( key ) ; snapshotObject = diskLruCache . get ( key ) ; } catch ( IOException e ) { logger . logError ( e ) ; } finally { dualCacheLock . unLockDiskEntryWrite ( key ) ; } if ( snapshotObject != null ) { loggerHelper . logEntryForKeyIsOnDisk ( key ) ; try { diskResult = snapshotObject . getString ( 0 ) ; } catch ( IOException e ) { logger . logError ( e ) ; } } else { loggerHelper . logEntryForKeyIsNotOnDisk ( key ) ; } } T objectFromStringDisk = null ; if ( diskResult != null ) { // Load object , no need to check disk configuration since diskresult ! = null .
objectFromStringDisk = diskSerializer . fromString ( diskResult ) ; // Refresh object in ram .
if ( ramMode . equals ( DualCacheRamMode . ENABLE_WITH_REFERENCE ) ) { if ( diskMode . equals ( DualCacheDiskMode . ENABLE_WITH_SPECIFIC_SERIALIZER ) ) { ramCacheLru . put ( key , objectFromStringDisk ) ; } } else if ( ramMode . equals ( DualCacheRamMode . ENABLE_WITH_SPECIFIC_SERIALIZER ) ) { if ( diskSerializer == ramSerializer ) { ramCacheLru . put ( key , diskResult ) ; } else { ramCacheLru . put ( key , ramSerializer . toString ( objectFromStringDisk ) ) ; } } return objectFromStringDisk ; } } else { loggerHelper . logEntryForKeyIsInRam ( key ) ; if ( ramMode . equals ( DualCacheRamMode . ENABLE_WITH_REFERENCE ) ) { return ( T ) ramResult ; } else if ( ramMode . equals ( DualCacheRamMode . ENABLE_WITH_SPECIFIC_SERIALIZER ) ) { return ramSerializer . fromString ( ( String ) ramResult ) ; } } // No data is available .
return null ; |
public class Language { /** * Get the name of the language translated to the current locale ,
* if available . Otherwise , get the untranslated name . */
public final String getTranslatedName ( ResourceBundle messages ) { } } | try { return messages . getString ( getShortCodeWithCountryAndVariant ( ) ) ; } catch ( MissingResourceException e ) { try { return messages . getString ( getShortCode ( ) ) ; } catch ( MissingResourceException e1 ) { return getName ( ) ; } } |
public class ControlBean { /** * Finds all of the EventSets extended by the input EventSet , and adds them to
* the provided list .
* @ param eventSet
* @ param superEventSets */
private void getSuperEventSets ( Class eventSet , List < Class > superEventSets ) { } } | Class [ ] superInterfaces = eventSet . getInterfaces ( ) ; if ( superInterfaces != null ) { for ( int i = 0 ; i < superInterfaces . length ; i ++ ) { Class superInterface = superInterfaces [ i ] ; if ( superInterface . isAnnotationPresent ( EventSet . class ) ) { superEventSets . add ( superInterface ) ; // Continue traversing up the EventSet inheritance hierarchy
getSuperEventSets ( superInterface , superEventSets ) ; } } } |
public class SamlEndpoint { /** * Creates a { @ link SamlEndpoint } of the specified { @ code uri } and the HTTP Redirect binding protocol . */
public static SamlEndpoint ofHttpRedirect ( String uri ) { } } | requireNonNull ( uri , "uri" ) ; try { return ofHttpRedirect ( new URI ( uri ) ) ; } catch ( URISyntaxException e ) { return Exceptions . throwUnsafely ( e ) ; } |
public class CommerceNotificationQueueEntryPersistenceImpl { /** * Returns a range of all the commerce notification queue entries where groupId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationQueueEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param start the lower bound of the range of commerce notification queue entries
* @ param end the upper bound of the range of commerce notification queue entries ( not inclusive )
* @ return the range of matching commerce notification queue entries */
@ Override public List < CommerceNotificationQueueEntry > findByGroupId ( long groupId , int start , int end ) { } } | return findByGroupId ( groupId , start , end , null ) ; |
public class DescribeHapgRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeHapgRequest describeHapgRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeHapgRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeHapgRequest . getHapgArn ( ) , HAPGARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class EditShape { /** * index allows to store an integer user value on the path . */
int createPathUserIndex ( ) { } } | if ( m_pathindices == null ) m_pathindices = new ArrayList < AttributeStreamOfInt32 > ( 0 ) ; // Try getting existing index . Use linear search . We do not expect many
// indices to be created .
for ( int i = 0 ; i < m_pathindices . size ( ) ; i ++ ) { if ( m_pathindices . get ( i ) == null ) { m_pathindices . set ( i , ( AttributeStreamOfInt32 ) ( AttributeStreamBase . createIndexStream ( 0 ) ) ) ; return i ; } } m_pathindices . add ( ( AttributeStreamOfInt32 ) ( AttributeStreamBase . createIndexStream ( 0 ) ) ) ; return ( int ) ( m_pathindices . size ( ) - 1 ) ; |
public class KeyStore { /** * Returns the certificate chain associated with the given alias .
* The certificate chain must have been associated with the alias
* by a call to < code > setKeyEntry < / code > ,
* or by a call to < code > setEntry < / code > with a
* < code > PrivateKeyEntry < / code > .
* @ param alias the alias name
* @ return the certificate chain ( ordered with the user ' s certificate first
* followed by zero or more certificate authorities ) , or null if the given alias
* does not exist or does not contain a certificate chain
* @ exception KeyStoreException if the keystore has not been initialized
* ( loaded ) . */
public final Certificate [ ] getCertificateChain ( String alias ) throws KeyStoreException { } } | if ( ! initialized ) { throw new KeyStoreException ( "Uninitialized keystore" ) ; } return keyStoreSpi . engineGetCertificateChain ( alias ) ; |
public class ProtoContext { /** * Register user type in symbol table . Full name should start with " . " . */
public < T extends Type & Element > void register ( String fullyQualifiedName , T type ) { } } | if ( resolve ( fullyQualifiedName ) != null ) { throw new ParserException ( type , "Cannot register duplicate type: %s" , fullyQualifiedName ) ; } symbolTable . put ( fullyQualifiedName , type ) ; |
public class RawQueryExecutor { /** * Synchronously perform a { @ link SearchQuery } and apply a user function to deserialize the raw JSON
* FTS response , which is represented as a { @ link String } .
* Note that the query is executed " as is " , without any processing comparable to what is done in
* { @ link Bucket # query ( SearchQuery ) } ( like enforcing a server side timeout ) .
* @ param query the query to execute .
* @ param deserializer a deserializer function that transforms the String representation of the response into a custom type T .
* @ param < T > the type of the response , once deserialized by the user - provided function .
* @ return the FTS response as a T . */
public < T > T ftsToRawCustom ( final SearchQuery query , final Func1 < String , T > deserializer ) { } } | return Blocking . blockForSingle ( async . ftsToRawCustom ( query , deserializer ) , env . searchTimeout ( ) , TimeUnit . MILLISECONDS ) ; |
public class TOTPBuilder { /** * Returns this { @ code TOTPBuilder } instance initialized with the specified
* { @ code digits } .
* @ param digits
* the number of digits the generated TOTP value should contain
* ( must be between { @ link # MIN _ ALLOWED _ DIGITS } and
* { @ link # MAX _ ALLOWED _ DIGITS } inclusive )
* @ return this { @ code TOTPBuilder } instance initialized with the specified
* { @ code digits } .
* @ throws IllegalArgumentException
* if { @ code digits } is not in [ { @ link # MIN _ ALLOWED _ DIGITS } ,
* { @ link # MAX _ ALLOWED _ DIGITS } ] . */
public TOTPBuilder digits ( int digits ) { } } | Preconditions . checkArgument ( Range . closed ( MIN_ALLOWED_DIGITS , MAX_ALLOWED_DIGITS ) . contains ( digits ) ) ; this . digits = digits ; return this ; |
public class InvoiceDispatcher { /** * Return a map of subscriptionId / localDate identifying what is the next upcoming billing transition ( PHASE , PAUSE , . . ) */
private Map < UUID , DateTime > getNextTransitionsForSubscriptions ( final BillingEventSet billingEvents ) { } } | final DateTime now = clock . getUTCNow ( ) ; final Map < UUID , DateTime > result = new HashMap < UUID , DateTime > ( ) ; for ( final BillingEvent evt : billingEvents ) { final UUID subscriptionId = evt . getSubscriptionId ( ) ; final DateTime evtEffectiveDate = evt . getEffectiveDate ( ) ; if ( evtEffectiveDate . compareTo ( now ) <= 0 ) { continue ; } final DateTime nextUpcomingPerSubscriptionDate = result . get ( subscriptionId ) ; if ( nextUpcomingPerSubscriptionDate == null || nextUpcomingPerSubscriptionDate . compareTo ( evtEffectiveDate ) > 0 ) { result . put ( subscriptionId , evtEffectiveDate ) ; } } return result ; |
public class ReconnectionManager { /** * Get a instance of ReconnectionManager for the given connection .
* @ param connection
* @ return a ReconnectionManager for the connection . */
public static synchronized ReconnectionManager getInstanceFor ( AbstractXMPPConnection connection ) { } } | ReconnectionManager reconnectionManager = INSTANCES . get ( connection ) ; if ( reconnectionManager == null ) { reconnectionManager = new ReconnectionManager ( connection ) ; INSTANCES . put ( connection , reconnectionManager ) ; } return reconnectionManager ; |
public class SameJSONAs { /** * Creates a matcher that compares { @ code JSONObject } s .
* @ param expected the expected { @ code JSONObject } instance
* @ return the { @ code Matcher } instance */
@ Factory public static SameJSONAs < JSONObject > sameJSONObjectAs ( JSONObject expected ) { } } | return new SameJSONAs < JSONObject > ( expected , modalComparatorFor ( jsonObjectComparison ( ) ) ) ; |
public class ImageLoader { /** * Slices up an image in to a mini batch .
* @ param f the file to load from
* @ param numMiniBatches the number of images in a mini batch
* @ param numRowsPerSlice the number of rows for each image
* @ return a tensor representing one image as a mini batch */
public INDArray asImageMiniBatches ( File f , int numMiniBatches , int numRowsPerSlice ) { } } | try { INDArray d = asMatrix ( f ) ; return Nd4j . create ( numMiniBatches , numRowsPerSlice , d . columns ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class Utils { /** * read / skip bytes from stream based on a type */
public static void skip ( RecordInput rin , String tag , TypeID typeID ) throws IOException { } } | switch ( typeID . typeVal ) { case TypeID . RIOType . BOOL : rin . readBool ( tag ) ; break ; case TypeID . RIOType . BUFFER : rin . readBuffer ( tag ) ; break ; case TypeID . RIOType . BYTE : rin . readByte ( tag ) ; break ; case TypeID . RIOType . DOUBLE : rin . readDouble ( tag ) ; break ; case TypeID . RIOType . FLOAT : rin . readFloat ( tag ) ; break ; case TypeID . RIOType . INT : rin . readInt ( tag ) ; break ; case TypeID . RIOType . LONG : rin . readLong ( tag ) ; break ; case TypeID . RIOType . MAP : org . apache . hadoop . record . Index midx1 = rin . startMap ( tag ) ; MapTypeID mtID = ( MapTypeID ) typeID ; for ( ; ! midx1 . done ( ) ; midx1 . incr ( ) ) { skip ( rin , tag , mtID . getKeyTypeID ( ) ) ; skip ( rin , tag , mtID . getValueTypeID ( ) ) ; } rin . endMap ( tag ) ; break ; case TypeID . RIOType . STRING : rin . readString ( tag ) ; break ; case TypeID . RIOType . STRUCT : rin . startRecord ( tag ) ; // read past each field in the struct
StructTypeID stID = ( StructTypeID ) typeID ; Iterator < FieldTypeInfo > it = stID . getFieldTypeInfos ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { FieldTypeInfo tInfo = it . next ( ) ; skip ( rin , tag , tInfo . getTypeID ( ) ) ; } rin . endRecord ( tag ) ; break ; case TypeID . RIOType . VECTOR : org . apache . hadoop . record . Index vidx1 = rin . startVector ( tag ) ; VectorTypeID vtID = ( VectorTypeID ) typeID ; for ( ; ! vidx1 . done ( ) ; vidx1 . incr ( ) ) { skip ( rin , tag , vtID . getElementTypeID ( ) ) ; } rin . endVector ( tag ) ; break ; default : // shouldn ' t be here
throw new IOException ( "Unknown typeID when skipping bytes" ) ; } |
public class NativeInterface { public static void storeFieldA ( int holderValKind , Object holder , Object newVal , Object oldVal , String holderClass , String fieldName , String type , String callerClass , String callerMethod , int callerValKind , Object caller ) { } } | System . err . println ( "storeFieldA( " + valAndValKindToString ( holder , holderClass , holderValKind ) + " . " + fieldName + " = " + newVal + ", " + "oldVal=" + oldVal + ", " + // " holderClass = " + holderClass + " , " +
"fieldType=" + type + ", " + // " callerClass = " + callerClass + " , " +
"callerMethod=" + callerMethod + ", " + "caller=" + valAndValKindToString ( caller , callerClass , callerValKind ) + ", " + ")" ) ; |
public class Cursor { /** * Reset this cursor for the provided tree , to iterate over its entire range
* @ param btree the tree to iterate over
* @ param forwards if false , the cursor will start at the end and move backwards */
public void reset ( Object [ ] btree , boolean forwards ) { } } | _reset ( btree , null , NEGATIVE_INFINITY , false , POSITIVE_INFINITY , false , forwards ) ; |
public class XMLSerializer { /** * Start tag .
* @ param namespace the namespace
* @ param name the name
* @ return serializer
* @ throws IOException Signals that an I / O exception has occurred . */
public XMLSerializer startTag ( String namespace , String name ) throws IOException { } } | if ( startTagIncomplete ) { closeStartTag ( ) ; } seenBracket = seenBracketBracket = false ; ++ depth ; if ( doIndent && depth > 0 && seenTag ) { writeIndent ( ) ; } seenTag = true ; setPrefixCalled = false ; startTagIncomplete = true ; if ( ( depth + 1 ) >= elName . length ) { ensureElementsCapacity ( ) ; } // / / assert namespace ! = null ;
if ( checkNamesInterned && namesInterned ) checkInterning ( namespace ) ; elNamespace [ depth ] = ( namesInterned || namespace == null ) ? namespace : namespace . intern ( ) ; // assert name ! = null ;
// elName [ depth ] = name ;
if ( checkNamesInterned && namesInterned ) checkInterning ( name ) ; elName [ depth ] = ( namesInterned || name == null ) ? name : name . intern ( ) ; if ( out == null ) { throw new IllegalStateException ( "setOutput() must called set before serialization can start" ) ; } out . write ( '<' ) ; if ( namespace != null ) { if ( namespace . length ( ) > 0 ) { // ALEK : in future make this algo a feature on serializer
String prefix = null ; if ( depth > 0 && ( namespaceEnd - elNamespaceCount [ depth - 1 ] ) == 1 ) { // if only one prefix was declared un - declare it if the
// prefix is already declared on parent el with the same URI
String uri = namespaceUri [ namespaceEnd - 1 ] ; if ( uri == namespace || uri . equals ( namespace ) ) { String elPfx = namespacePrefix [ namespaceEnd - 1 ] ; // 2 = = to skip predefined namesapces ( xml and xmlns
for ( int pos = elNamespaceCount [ depth - 1 ] - 1 ; pos >= 2 ; -- pos ) { String pf = namespacePrefix [ pos ] ; if ( pf == elPfx || pf . equals ( elPfx ) ) { String n = namespaceUri [ pos ] ; if ( n == uri || n . equals ( uri ) ) { -- namespaceEnd ; // un - declare namespace :
// this is kludge !
prefix = elPfx ; } break ; } } } } if ( prefix == null ) { prefix = lookupOrDeclarePrefix ( namespace ) ; } // assert prefix ! = null ;
// make sure that default ( " " ) namespace to not print " : "
if ( prefix . length ( ) > 0 ) { elPrefix [ depth ] = prefix ; out . write ( prefix ) ; out . write ( ':' ) ; } else { elPrefix [ depth ] = "" ; } } else { // make sure that default namespace can be declared
for ( int i = namespaceEnd - 1 ; i >= 0 ; -- i ) { if ( "" . equals ( namespacePrefix [ i ] ) ) { final String uri = namespaceUri [ i ] ; if ( uri == null ) { // declare default namespace
setPrefix ( "" , "" ) ; } else if ( uri . length ( ) > 0 ) { throw new IllegalStateException ( "start tag can not be written in empty default namespace " + "as default namespace is currently bound to '" + uri + "'" + getLocation ( ) ) ; } break ; } } elPrefix [ depth ] = "" ; } } else { elPrefix [ depth ] = "" ; } out . write ( name ) ; return this ; |
public class GetPendingJobExecutionsResult { /** * A list of JobExecutionSummary objects with status IN _ PROGRESS .
* @ param inProgressJobs
* A list of JobExecutionSummary objects with status IN _ PROGRESS . */
public void setInProgressJobs ( java . util . Collection < JobExecutionSummary > inProgressJobs ) { } } | if ( inProgressJobs == null ) { this . inProgressJobs = null ; return ; } this . inProgressJobs = new java . util . ArrayList < JobExecutionSummary > ( inProgressJobs ) ; |
public class UpdateSqlInjectionMatchSetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateSqlInjectionMatchSetRequest updateSqlInjectionMatchSetRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateSqlInjectionMatchSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateSqlInjectionMatchSetRequest . getSqlInjectionMatchSetId ( ) , SQLINJECTIONMATCHSETID_BINDING ) ; protocolMarshaller . marshall ( updateSqlInjectionMatchSetRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; protocolMarshaller . marshall ( updateSqlInjectionMatchSetRequest . getUpdates ( ) , UPDATES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GenericObject { /** * This is used to represent DBCollection objects as GenericObjects .
* @ param collection collection object */
public void setDbCollection ( Object collection ) { } } | if ( collection != null ) { dbCollectionData = collection ; isDbCollection = true ; } else { dbCollectionData = null ; isDbCollection = false ; } |
public class OriginPreferenceMessage { /** * Returns the length of this message when encoded according to the
* Client - World Model protocol .
* @ return the length , in bytes , of the encoded form of this message . */
public int getMessageLength ( ) { } } | // Message ID
int length = 1 ; if ( this . weights != null ) { for ( String origin : this . weights . keySet ( ) ) { // String prefix , weight
length += 8 ; try { length += origin . getBytes ( "UTF-16BE" ) . length ; } catch ( UnsupportedEncodingException uee ) { log . error ( "Unable to encode to UTF-16BE." ) ; } } } return length ; |
public class CharValue { @ Override public int compareTo ( CharValue o ) { } } | final int other = o . value ; return this . value < other ? - 1 : this . value > other ? 1 : 0 ; |
public class ListVolumeRecoveryPointsResult { /** * An array of < a > VolumeRecoveryPointInfo < / a > objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setVolumeRecoveryPointInfos ( java . util . Collection ) } or
* { @ link # withVolumeRecoveryPointInfos ( java . util . Collection ) } if you want to override the existing values .
* @ param volumeRecoveryPointInfos
* An array of < a > VolumeRecoveryPointInfo < / a > objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListVolumeRecoveryPointsResult withVolumeRecoveryPointInfos ( VolumeRecoveryPointInfo ... volumeRecoveryPointInfos ) { } } | if ( this . volumeRecoveryPointInfos == null ) { setVolumeRecoveryPointInfos ( new com . amazonaws . internal . SdkInternalList < VolumeRecoveryPointInfo > ( volumeRecoveryPointInfos . length ) ) ; } for ( VolumeRecoveryPointInfo ele : volumeRecoveryPointInfos ) { this . volumeRecoveryPointInfos . add ( ele ) ; } return this ; |
public class FindingReplacing { /** * Sets the substrings in given left tag and right tag as the search string
* < B > May multiple substring matched . < / B >
* @ see Betner # between ( String , String )
* @ param leftSameWithRight
* @ return */
public S betns ( String left , String right ) { } } | return betn ( left , right ) . late ( ) ; |
public class CProductUtil { /** * Returns all the c products where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ return the matching c products */
public static List < CProduct > findByUuid_C ( String uuid , long companyId ) { } } | return getPersistence ( ) . findByUuid_C ( uuid , companyId ) ; |
public class AmazonDirectConnectClient { /** * Disassociates a connection from a link aggregation group ( LAG ) . The connection is interrupted and re - established
* as a standalone connection ( the connection is not deleted ; to delete the connection , use the
* < a > DeleteConnection < / a > request ) . If the LAG has associated virtual interfaces or hosted connections , they remain
* associated with the LAG . A disassociated connection owned by an AWS Direct Connect Partner is automatically
* converted to an interconnect .
* If disassociating the connection would cause the LAG to fall below its setting for minimum number of operational
* connections , the request fails , except when it ' s the last member of the LAG . If all connections are
* disassociated , the LAG continues to exist as an empty LAG with no physical connections .
* @ param disassociateConnectionFromLagRequest
* @ return Result of the DisassociateConnectionFromLag operation returned by the service .
* @ throws DirectConnectServerException
* A server - side error occurred .
* @ throws DirectConnectClientException
* One or more parameters are not valid .
* @ sample AmazonDirectConnect . DisassociateConnectionFromLag
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / DisassociateConnectionFromLag "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DisassociateConnectionFromLagResult disassociateConnectionFromLag ( DisassociateConnectionFromLagRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDisassociateConnectionFromLag ( request ) ; |
public class ArgumentMatchers { /** * Matches any object of given type , excluding nulls .
* This matcher will perform a type check with the given type , thus excluding values .
* See examples in javadoc for { @ link ArgumentMatchers } class .
* This is an alias of : { @ link # isA ( Class ) } }
* Since Mockito 2.1.0 , only allow non - null instance of < code > < / code > , thus < code > null < / code > is not anymore a valid value .
* As reference are nullable , the suggested API to < strong > match < / strong > < code > null < / code >
* would be { @ link # isNull ( ) } . We felt this change would make tests harness much safer that it was with Mockito
* 1 . x .
* < p > < strong > Notes : < / strong > < br / >
* < ul >
* < li > For primitive types use { @ link # anyChar ( ) } family . < / li >
* < li > Since Mockito 2.1.0 this method will perform a type check thus < code > null < / code > values are not authorized . < / li >
* < li > Since mockito 2.1.0 { @ link # any ( ) } and { @ link # anyObject ( ) } are not anymore aliases of this method . < / li >
* < / ul >
* @ param < T > The accepted type
* @ param type the class of the accepted type .
* @ return < code > null < / code > .
* @ see # any ( )
* @ see # anyObject ( )
* @ see # anyVararg ( )
* @ see # isA ( Class )
* @ see # notNull ( )
* @ see # notNull ( Class )
* @ see # isNull ( )
* @ see # isNull ( Class ) */
public static < T > T any ( Class < T > type ) { } } | reportMatcher ( new InstanceOf . VarArgAware ( type , "<any " + type . getCanonicalName ( ) + ">" ) ) ; return defaultValue ( type ) ; |
public class DefaultAnnotationMetadata { /** * Adds an annotation directly declared on the element and its member values , if the annotation already exists the
* data will be merged with existing values replaced .
* @ param annotation The annotation
* @ param values The values */
protected void addDeclaredAnnotation ( String annotation , Map < CharSequence , Object > values ) { } } | if ( annotation != null ) { String repeatedName = getRepeatedName ( annotation ) ; if ( repeatedName != null ) { Object v = values . get ( AnnotationMetadata . VALUE_MEMBER ) ; if ( v instanceof io . micronaut . core . annotation . AnnotationValue [ ] ) { io . micronaut . core . annotation . AnnotationValue [ ] avs = ( io . micronaut . core . annotation . AnnotationValue [ ] ) v ; for ( io . micronaut . core . annotation . AnnotationValue av : avs ) { addDeclaredRepeatable ( annotation , av ) ; } } else if ( v instanceof Iterable ) { Iterable i = ( Iterable ) v ; for ( Object o : i ) { if ( o instanceof io . micronaut . core . annotation . AnnotationValue ) { addDeclaredRepeatable ( annotation , ( ( io . micronaut . core . annotation . AnnotationValue ) o ) ) ; } } } } else { Map < String , Map < CharSequence , Object > > declaredAnnotations = getDeclaredAnnotationsInternal ( ) ; Map < String , Map < CharSequence , Object > > allAnnotations = getAllAnnotations ( ) ; addAnnotation ( annotation , values , declaredAnnotations , allAnnotations , true ) ; } } |
public class InmemoryNodeTypeRepository { /** * { @ inheritDoc } */
public List < NodeTypeData > getAllNodeTypes ( ) { } } | if ( ! haveTypes ) try { return super . getAllNodeTypes ( ) ; } catch ( RepositoryException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return hierarchy . getAllNodeTypes ( ) ; |
public class Primitives { /** * Parses the char sequence argument as a boolean . The boolean returned represents
* the value true if the char sequence argument is not null and is equal , ignoring
* case , to the string " true " .
* @ param cs
* @ param beginIndex the index to the first char of the text range .
* @ param endIndex the index after the last char of the text range .
* @ return
* @ throws IllegalArgumentException if input length is not 4.
* @ see java . lang . Boolean # parseBoolean ( java . lang . String ) */
public static final boolean parseBoolean ( CharSequence cs , int beginIndex , int endIndex ) { } } | return endIndex - beginIndex == 4 && Character . codePointCount ( cs , beginIndex , endIndex ) == 4 && Character . toUpperCase ( Character . codePointAt ( cs , beginIndex ) ) == 'T' && Character . toUpperCase ( Character . codePointAt ( cs , beginIndex + 1 ) ) == 'R' && Character . toUpperCase ( Character . codePointAt ( cs , beginIndex + 2 ) ) == 'U' && Character . toUpperCase ( Character . codePointAt ( cs , beginIndex + 3 ) ) == 'E' ; |
public class EditableNamespaceContext { /** * Returns the URI for a namespace binding . */
@ Override public String getNamespaceURI ( String prefix ) { } } | // per javax . xml . namespace . NamespaceContext doc
if ( prefix == null ) throw new IllegalArgumentException ( "Cannot get namespace URI for null prefix" ) ; if ( XMLConstants . XML_NS_PREFIX . equals ( prefix ) ) return XMLConstants . XML_NS_URI ; if ( XMLConstants . XMLNS_ATTRIBUTE . equals ( prefix ) ) return XMLConstants . XMLNS_ATTRIBUTE_NS_URI ; String namespaceURI = bindings . get ( prefix ) ; // per javax . xml . namespace . NamespaceContext doc
return ( namespaceURI != null ) ? namespaceURI : XMLConstants . NULL_NS_URI ; |
public class Traverson { /** * Configures the { @ link RestOperations } to use . If { @ literal null } is provided a default { @ link RestTemplate } will be
* used .
* @ param operations
* @ return */
public Traverson setRestOperations ( @ Nullable RestOperations operations ) { } } | this . operations = operations == null ? createDefaultTemplate ( this . mediaTypes ) : operations ; return this ; |
public class TupleValueExpression { /** * Given an input schema , resolve this TVE expression . */
public int setColumnIndexUsingSchema ( NodeSchema inputSchema ) { } } | int index = inputSchema . getIndexOfTve ( this ) ; if ( index < 0 ) { // * enable to debug * / System . out . println ( " DEBUG : setColumnIndex miss : " + this ) ;
// * enable to debug * / System . out . println ( " DEBUG : setColumnIndex candidates : " + inputSchema ) ;
return index ; } setColumnIndex ( index ) ; if ( getValueType ( ) == null ) { // In case of sub - queries the TVE may not have its
// value type and size resolved yet . Try to resolve it now
SchemaColumn inputColumn = inputSchema . getColumn ( index ) ; setTypeSizeAndInBytes ( inputColumn ) ; } return index ; |
public class DiscreteDistributions { /** * Returns the a good approximation of cumulative probability of k of a specific number of tries n and probability p
* @ param k
* @ param p
* @ param n
* @ return */
private static double approxBinomialCdf ( int k , double p , int n ) { } } | // use an approximation as described at http : / / www . math . ucla . edu / ~ tom / distributions / binomial . html
double Z = p ; double A = k + 1 ; double B = n - k ; double S = A + B ; double BT = Math . exp ( ContinuousDistributions . logGamma ( S ) - ContinuousDistributions . logGamma ( B ) - ContinuousDistributions . logGamma ( A ) + A * Math . log ( Z ) + B * Math . log ( 1 - Z ) ) ; double probabilitySum ; if ( Z < ( A + 1 ) / ( S + 2 ) ) { probabilitySum = BT * ContinuousDistributions . betinc ( Z , A , B ) ; } else { probabilitySum = 1.0 - BT * ContinuousDistributions . betinc ( 1.0 - Z , B , A ) ; } probabilitySum = 1.0 - probabilitySum ; return probabilitySum ; |
public class EditText { /** * Returns the total start padding of the view , including the start
* Drawable if any . */
@ TargetApi ( Build . VERSION_CODES . JELLY_BEAN_MR1 ) public int getTotalPaddingStart ( ) { } } | if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN_MR1 ) return getPaddingStart ( ) + mInputView . getTotalPaddingStart ( ) ; return getTotalPaddingLeft ( ) ; |
public class Heritrix3Wrapper { /** * TODO
* @ param jobname
* @ param state
* @ param tries
* @ param interval
* @ return */
public JobResult waitForJobState ( String jobname , CrawlControllerState state , int tries , int interval ) { } } | JobResult jobResult = null ; if ( tries <= 0 ) { tries = 1 ; } if ( interval <= 99 ) { interval = 1000 ; } boolean bLoop = true ; while ( bLoop && tries > 0 ) { jobResult = job ( jobname ) ; // debug
// System . out . println ( jobResult . status + " - " + ResultStatus . OK ) ;
if ( jobResult . status == ResultStatus . OK ) { // debug
// System . out . println ( jobResult . job . crawlControllerState + " - " + state . name ( ) ) ;
if ( ( state == null && jobResult . job . crawlControllerState == null ) || ( state != null && state . name ( ) . equals ( jobResult . job . crawlControllerState ) ) ) { bLoop = false ; } } -- tries ; if ( bLoop && tries > 0 ) { try { Thread . sleep ( interval ) ; } catch ( InterruptedException e ) { } } } return jobResult ; |
public class CmsJspDateSeriesBean { /** * Returns a lazy map from the start time of a single instance of the series to the date information on the single instance . < p >
* Start time can be provided as Long , as a String representation of the long value or as Date . < p >
* If no event exists for the start time , the information for the first event of the series is returned .
* @ return a lazy map from the start time of a single instance of the series to the date information on the single instance . */
public Map < Object , CmsJspInstanceDateBean > getInstanceInfo ( ) { } } | if ( m_singleEvents == null ) { m_singleEvents = CmsCollectionsGenericWrapper . createLazyMap ( new CmsSeriesSingleEventTransformer ( ) ) ; } return m_singleEvents ; |
public class ElemAttribute { /** * Construct a node in the result tree . This method is overloaded by
* xsl : attribute . At this class level , this method creates an element .
* @ param nodeName The name of the node , which may be null .
* @ param prefix The prefix for the namespace , which may be null .
* @ param nodeNamespace The namespace of the node , which may be null .
* @ param transformer non - null reference to the the current transform - time state .
* @ param sourceNode non - null reference to the < a href = " http : / / www . w3 . org / TR / xslt # dt - current - node " > current source node < / a > .
* @ param mode reference , which may be null , to the < a href = " http : / / www . w3 . org / TR / xslt # modes " > current mode < / a > .
* @ throws TransformerException */
void constructNode ( String nodeName , String prefix , String nodeNamespace , TransformerImpl transformer ) throws TransformerException { } } | if ( null != nodeName && nodeName . length ( ) > 0 ) { SerializationHandler rhandler = transformer . getSerializationHandler ( ) ; // Evaluate the value of this attribute
String val = transformer . transformToString ( this ) ; try { // Let the result tree handler add the attribute and its String value .
String localName = QName . getLocalPart ( nodeName ) ; if ( prefix != null && prefix . length ( ) > 0 ) { rhandler . addAttribute ( nodeNamespace , localName , nodeName , "CDATA" , val , true ) ; } else { rhandler . addAttribute ( "" , localName , nodeName , "CDATA" , val , true ) ; } } catch ( SAXException e ) { } } |
public class Iterate { /** * Filters a collection into a PartitionIterable based on a predicate .
* Example using a Java 8 lambda expression :
* < pre >
* PartitionIterable & lt ; Person > & gt newYorkersAndNonNewYorkers =
* Iterate . < b > partitionWith < / b > ( people , ( Person person , String state ) - > person . getAddress ( ) . getState ( ) . getName ( ) . equals ( state ) , " New York " ) ;
* < / pre >
* Example using an anonymous inner class :
* < pre >
* PartitionIterable & lt ; Person & gt ; newYorkersAndNonNewYorkers =
* Iterate . < b > partitionWith < / b > ( people , new Predicate & lt ; Person , String & gt ; ( )
* public boolean value ( Person person , String state )
* return person . getAddress ( ) . getState ( ) . getName ( ) . equals ( state ) ;
* } , " New York " ) ;
* < / pre >
* @ since 5.0. */
public static < T , P > PartitionIterable < T > partitionWith ( Iterable < T > iterable , Predicate2 < ? super T , ? super P > predicate , P parameter ) { } } | if ( iterable instanceof RichIterable < ? > ) { return ( ( RichIterable < T > ) iterable ) . partitionWith ( predicate , parameter ) ; } if ( iterable instanceof ArrayList ) { return ArrayListIterate . partitionWith ( ( ArrayList < T > ) iterable , predicate , parameter ) ; } if ( iterable instanceof List ) { return ListIterate . partitionWith ( ( List < T > ) iterable , predicate , parameter ) ; } if ( iterable != null ) { return IterableIterate . partitionWith ( iterable , predicate , parameter ) ; } throw new IllegalArgumentException ( "Cannot perform a partition on null" ) ; |
public class CryptoServiceImpl { /** * { @ inheritDoc } */
@ Override public String decrypt ( String cryptoKey , String encrypted ) throws TechnicalException { } } | if ( ! encrypted . startsWith ( getPrefix ( ) ) ) { logger . error ( TechnicalException . TECHNICAL_ERROR_MESSAGE_DECRYPT_EXCEPTION ) ; throw new TechnicalException ( Messages . getMessage ( TechnicalException . TECHNICAL_ERROR_MESSAGE_DECRYPT_EXCEPTION ) ) ; } Key aesKey = null ; if ( cryptoKey != null && ! "" . equals ( cryptoKey ) ) { aesKey = buildKey16char ( cryptoKey ) ; } if ( aesKey == null ) { logger . error ( TechnicalException . TECHNICAL_ERROR_MESSAGE_DECRYPT_CONFIGURATION_EXCEPTION ) ; throw new TechnicalException ( Messages . getMessage ( TechnicalException . TECHNICAL_ERROR_MESSAGE_DECRYPT_CONFIGURATION_EXCEPTION ) ) ; } try { Cipher cipher = Cipher . getInstance ( "AES" ) ; cipher . init ( Cipher . DECRYPT_MODE , aesKey ) ; return new String ( cipher . doFinal ( Base64 . decodeBase64 ( encrypted . substring ( getPrefix ( ) . length ( ) , encrypted . length ( ) ) ) ) ) ; } catch ( NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException e ) { throw new TechnicalException ( Messages . getMessage ( TechnicalException . TECHNICAL_ERROR_MESSAGE_DECRYPT_EXCEPTION ) , e ) ; } |
public class X509CRLImpl { /** * Utility method to convert an arbitrary instance of X509CRL
* to a X509CRLImpl . Does a cast if possible , otherwise reparses
* the encoding . */
public static X509CRLImpl toImpl ( X509CRL crl ) throws CRLException { } } | if ( crl instanceof X509CRLImpl ) { return ( X509CRLImpl ) crl ; } else { return X509Factory . intern ( crl ) ; } |
public class ApiOvhDedicatedceph { /** * Update cluster details
* REST : PUT / dedicated / ceph / { serviceName }
* @ param serviceName [ required ] ID of cluster
* @ param crushTunables [ required ] Tunables of cluster
* @ param label [ required ] Name of the cluster
* API beta */
public String serviceName_PUT ( String serviceName , OvhCrushTunablesEnum crushTunables , String label ) throws IOException { } } | String qPath = "/dedicated/ceph/{serviceName}" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "crushTunables" , crushTunables ) ; addBody ( o , "label" , label ) ; String resp = exec ( qPath , "PUT" , sb . toString ( ) , o ) ; return convertTo ( resp , String . class ) ; |
public class XMLDatabase { /** * Internalizes a < code > Document < / code > . The registered database
* listeners will < b > not < / b > be notified during this method .
* @ param document the document to internalize */
private void internalizeDocument ( Document document ) throws ParseException { } } | init ( ) ; // temporarily remove the database listeners
List < IDatabaseListener > tmpListeners = new ArrayList < > ( listeners ) ; listeners . clear ( ) ; Hashtable < String , String > keyMap = new Hashtable < > ( ) ; try { Element rootElement = document . getRootElement ( ) ; List < Attribute > rootAttrs = rootElement . getAttributes ( ) ; // extract the master language and put all other root element
// attributes aside
for ( Attribute attribute : rootAttrs ) { if ( MASTER_LANGUAGE_ATTRIBUTE_NAME . equals ( attribute . getName ( ) ) ) { masterLanguage = attribute . getValue ( ) ; } else { additionalRootAttrs . add ( attribute ) ; } } // store additional namespaces . the xsi namespace is used for the attribute xsi : noSchemaNamespaceLocation
for ( Object namespace : rootElement . getAdditionalNamespaces ( ) ) { // The list is not type save , therefore a cast is done
if ( namespace instanceof Namespace ) { additionalNamespaces . add ( ( Namespace ) namespace ) ; } else { throw new RuntimeException ( "Namespace List contains Objects that are not of type Namespace" ) ; } } if ( masterLanguage == null ) { // since we cannot determine the line numbers in this methods
// we pass 0 to occuring ParseExceptions
throw new ParseException ( "Master language missing." , 0 ) ; } List < Element > textList = rootElement . getChildren ( TEXT_ELEMENT_NAME ) ; // the validation in here is kind of unnecessary when the xsd is used for validation .
// however the xsd validation was added later and it cant hurt to doublecheck some things , there might be cases
// where the xsd cannot be used
for ( Element textElement : textList ) { String key = textElement . getAttributeValue ( KEY_ATTRIBUTE_NAME ) ; if ( key == null ) { throw new ParseException ( "No key found for text." , ( ( LineNumberElement ) textElement ) . getStartLine ( ) ) ; } if ( keyMap . containsKey ( key ) ) { parseWarnings . add ( new ParseWarning ( "Duplicate key: " + key , ( ( LineNumberElement ) textElement ) . getStartLine ( ) ) ) ; } else { keyMap . put ( key , "" ) ; } String context = textElement . getChildText ( CONTEXT_ELEMENT_NAME ) ; if ( context == null ) { throw new ParseException ( "No context found for key \"" + key + "\"." , ( ( LineNumberElement ) textElement ) . getStartLine ( ) ) ; } ITextNode textNode = new XMLTextNode ( key , context ) ; addTextNode ( textNode ) ; List < Element > valueList = textElement . getChildren ( VALUE_ELEMENT_NAME ) ; for ( Element valueElement : valueList ) { String language = valueElement . getAttributeValue ( LANGUAGE_ATTRIBUTE_NAME ) ; if ( language == null ) { throw new ParseException ( "No language found for value of key \"" + key + "\"." , ( ( LineNumberElement ) valueElement ) . getStartLine ( ) ) ; } String statusName = valueElement . getAttributeValue ( STATUS_ATTRIBUTE_NAME ) ; Status status = Status . valueOf ( statusName ) ; if ( status == null ) { throw new ParseException ( "Invalid status for key \"" + key + "\": " + statusName , ( ( LineNumberElement ) valueElement ) . getStartLine ( ) ) ; } String value = valueElement . getText ( ) ; textNode . addValueNode ( new XMLValueNode ( language , status , value ) ) ; } } } finally { listeners . addAll ( tmpListeners ) ; } |
public class CircuitBreakerStatus { /** * Copy the current immutable object by setting a value for the { @ link AbstractCircuitBreakerStatus # getState ( ) state } attribute .
* A shallow reference equality check is used to prevent copying of the same value by returning { @ code this } .
* @ param value A new value for state
* @ return A modified copy of the { @ code this } object */
public final CircuitBreakerStatus withState ( String value ) { } } | if ( this . state == value ) return this ; String newValue = Preconditions . checkNotNull ( value , "state" ) ; return new CircuitBreakerStatus ( this . id , this . timestamp , newValue , this . totalSuccessCount , this . totalFailureCount , this . latencyMicros , this . throughputOneMinute , this . failedThroughputOneMinute ) ; |
public class SceneObject { /** * Updates this object ' s origin tile coordinate . Its bounds and other
* cached screen coordinate information are updated . */
public void relocateObject ( MisoSceneMetrics metrics , int tx , int ty ) { } } | // Log . info ( " Relocating object " + this + " to " +
// StringUtil . coordsToString ( tx , ty ) ) ;
info . x = tx ; info . y = ty ; computeInfo ( metrics ) ; |
public class dnssoarec { /** * Use this API to fetch all the dnssoarec resources that are configured on netscaler .
* This uses dnssoarec _ args which is a way to provide additional arguments while fetching the resources . */
public static dnssoarec [ ] get ( nitro_service service , dnssoarec_args args ) throws Exception { } } | dnssoarec obj = new dnssoarec ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; dnssoarec [ ] response = ( dnssoarec [ ] ) obj . get_resources ( service , option ) ; return response ; |
public class FileTree { /** * Looks up the given names against the given base file . If the file is not a directory , the
* lookup fails . */
@ Nullable private DirectoryEntry lookUp ( File dir , Iterable < Name > names , Set < ? super LinkOption > options , int linkDepth ) throws IOException { } } | Iterator < Name > nameIterator = names . iterator ( ) ; Name name = nameIterator . next ( ) ; while ( nameIterator . hasNext ( ) ) { Directory directory = toDirectory ( dir ) ; if ( directory == null ) { return null ; } DirectoryEntry entry = directory . get ( name ) ; if ( entry == null ) { return null ; } File file = entry . file ( ) ; if ( file . isSymbolicLink ( ) ) { DirectoryEntry linkResult = followSymbolicLink ( dir , ( SymbolicLink ) file , linkDepth ) ; if ( linkResult == null ) { return null ; } dir = linkResult . fileOrNull ( ) ; } else { dir = file ; } name = nameIterator . next ( ) ; } return lookUpLast ( dir , name , options , linkDepth ) ; |
public class ExecutionEnvironment { /** * Generic method to create an input DataSet with in { @ link InputFormat } . The DataSet will not be
* immediately created - instead , this method returns a DataSet that will be lazily created from
* the input format once the program is executed .
* The data set is typed to the given TypeInformation . This method is intended for input formats that
* where the return type cannot be determined by reflection analysis , and that do not implement the
* { @ link ResultTypeQueryable } interface .
* @ param inputFormat The input format used to create the data set .
* @ return A DataSet that represents the data created by the input format .
* @ see # createInput ( InputFormat ) */
public < X > DataSource < X > createInput ( InputFormat < X , ? > inputFormat , TypeInformation < X > producedType ) { } } | if ( inputFormat == null ) { throw new IllegalArgumentException ( "InputFormat must not be null." ) ; } if ( producedType == null ) { throw new IllegalArgumentException ( "Produced type information must not be null." ) ; } return new DataSource < X > ( this , inputFormat , producedType ) ; |
public class CreateAuthorizerRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreateAuthorizerRequest createAuthorizerRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( createAuthorizerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createAuthorizerRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getProviderARNs ( ) , PROVIDERARNS_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getAuthType ( ) , AUTHTYPE_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getAuthorizerUri ( ) , AUTHORIZERURI_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getAuthorizerCredentials ( ) , AUTHORIZERCREDENTIALS_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getIdentitySource ( ) , IDENTITYSOURCE_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getIdentityValidationExpression ( ) , IDENTITYVALIDATIONEXPRESSION_BINDING ) ; protocolMarshaller . marshall ( createAuthorizerRequest . getAuthorizerResultTtlInSeconds ( ) , AUTHORIZERRESULTTTLINSECONDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ReflectUtils { /** * 调用方法
* @ param object 对象
* @ param methodName 方法名
* @ param parameters 参数
* @ return 方法返回的结果
* @ throws NoSuchMethodException 异常
* @ throws InvocationTargetException 异常
* @ throws IllegalAccessException 异常 */
public static Object invokeMethod ( Object object , String methodName , Object [ ] parameters ) throws NoSuchMethodException , InvocationTargetException , IllegalAccessException { } } | return invokeMethod ( object , methodName , getTypes ( parameters ) , parameters ) ; |
public class IconicsAnimatedDrawable { /** * FI - LO */
@ Override public void draw ( @ NonNull Canvas canvas ) { } } | for ( int i = 0 ; i < mProcessors . size ( ) ; i ++ ) { IconicsAnimationProcessor p = mProcessors . get ( i ) ; p . processPreDraw ( canvas , mIconBrush , mContourBrush , mBackgroundBrush , mBackgroundContourBrush ) ; } super . draw ( canvas ) ; for ( int i = mProcessors . size ( ) - 1 ; i >= 0 ; i -- ) { IconicsAnimationProcessor p = mProcessors . get ( i ) ; p . processPostDraw ( canvas ) ; } |
public class CommentProcessorRegistry { /** * Finds all processor expressions within the specified paragraph and tries
* to evaluate it against all registered { @ link ICommentProcessor } s .
* @ param proxyBuilder a builder for a proxy around the context root object to customize its interface
* @ param paragraphCoordinates the paragraph to process .
* @ param < T > type of the context root object */
private < T > void runProcessorsOnInlineContent ( ProxyBuilder < T > proxyBuilder , ParagraphCoordinates paragraphCoordinates ) { } } | ParagraphWrapper paragraph = new ParagraphWrapper ( paragraphCoordinates . getParagraph ( ) ) ; List < String > processorExpressions = expressionUtil . findProcessorExpressions ( paragraph . getText ( ) ) ; for ( String processorExpression : processorExpressions ) { String strippedExpression = expressionUtil . stripExpression ( processorExpression ) ; for ( final ICommentProcessor processor : commentProcessors ) { Class < ? > commentProcessorInterface = commentProcessorInterfaces . get ( processor ) ; proxyBuilder . withInterface ( commentProcessorInterface , processor ) ; processor . setCurrentParagraphCoordinates ( paragraphCoordinates ) ; } try { T contextRootProxy = proxyBuilder . build ( ) ; expressionResolver . resolveExpression ( strippedExpression , contextRootProxy ) ; placeholderReplacer . replace ( paragraph , processorExpression , null ) ; logger . debug ( String . format ( "Processor expression '%s' has been successfully processed by a comment processor." , processorExpression ) ) ; } catch ( SpelEvaluationException | SpelParseException e ) { if ( failOnInvalidExpression ) { throw new UnresolvedExpressionException ( strippedExpression , e ) ; } else { logger . warn ( String . format ( "Skipping processor expression '%s' because it can not be resolved by any comment processor. Reason: %s. Set log level to TRACE to view Stacktrace." , processorExpression , e . getMessage ( ) ) ) ; logger . trace ( "Reason for skipping processor expression: " , e ) ; } } catch ( ProxyException e ) { throw new DocxStamperException ( "Could not create a proxy around context root object" , e ) ; } } |
public class Lexer { /** * Tokenizes the specified input string .
* @ param input the input string to tokenize
* @ return a stream of tokens
* @ throws LexerException when encounters an illegal character */
Stream < Token > tokenize ( String input ) { } } | List < Token > tokens = new ArrayList < Token > ( ) ; int tokenPos = 0 ; while ( ! input . isEmpty ( ) ) { boolean matched = false ; for ( Token . Type tokenType : Token . Type . values ( ) ) { Matcher matcher = tokenType . pattern . matcher ( input ) ; if ( matcher . find ( ) ) { matched = true ; input = matcher . replaceFirst ( "" ) ; if ( tokenType != Token . Type . WHITESPACE ) { tokens . add ( new Token ( tokenType , matcher . group ( ) , tokenPos ) ) ; } tokenPos += matcher . end ( ) ; break ; } } if ( ! matched ) { throw new LexerException ( input ) ; } } tokens . add ( new Token ( Token . Type . EOI , null , tokenPos ) ) ; return new Stream < Token > ( tokens . toArray ( new Token [ tokens . size ( ) ] ) ) ; |
public class SQLExpressions { /** * As an aggregate function , PERCENT _ RANK calculates , for a hypothetical row r identified by the
* arguments of the function and a corresponding sort specification , the rank of row r minus 1
* divided by the number of rows in the aggregate group . This calculation is made as if the
* hypothetical row r were inserted into the group of rows over which Oracle Database is to aggregate .
* The arguments of the function identify a single hypothetical row within each aggregate group .
* Therefore , they must all evaluate to constant expressions within each aggregate group . The
* constant argument expressions and the expressions in the ORDER BY clause of the aggregate match
* by position . Therefore the number of arguments must be the same and their types must be compatible .
* @ param args arguments
* @ return percent _ rank ( args ) */
public static WithinGroup < Double > percentRank ( Expression < ? > ... args ) { } } | return new WithinGroup < Double > ( Double . class , SQLOps . PERCENTRANK2 , args ) ; |
public class GroovyScriptMessageBuilder { /** * Build the control message from script code . */
public String buildMessagePayload ( TestContext context , String messageType ) { } } | try { // construct control message payload
String messagePayload = "" ; if ( scriptResourcePath != null ) { messagePayload = buildMarkupBuilderScript ( context . replaceDynamicContentInString ( FileUtils . readToString ( FileUtils . getFileResource ( scriptResourcePath , context ) , Charset . forName ( context . resolveDynamicValue ( scriptResourceCharset ) ) ) ) ) ; } else if ( scriptData != null ) { messagePayload = buildMarkupBuilderScript ( context . replaceDynamicContentInString ( scriptData ) ) ; } return messagePayload ; } catch ( IOException e ) { throw new CitrusRuntimeException ( "Failed to build control message payload" , e ) ; } |
public class AssertEquals { /** * Asserts that the element ' s text in a particular cell equals the provided
* expected text . If the element isn ' t present , or a table , this will
* constitute a failure , same as a mismatch . This information will be logged
* and recorded , with a screenshot for traceability and added debugging
* support .
* @ param row - the number of the row in the table - note , row numbering
* starts at 1 , NOT 0
* @ param col - the number of the column in the table - note , column
* numbering starts at 1 , NOT 0
* @ param expectedText - what text do we expect to be in the table cell */
public void text ( int row , int col , String expectedText ) { } } | String text = checkText ( row , col , expectedText , 0 , 0 ) ; String reason = NO_ELEMENT_FOUND ; if ( text == null && getElement ( ) . is ( ) . present ( ) ) { reason = "Element not table" ; } assertNotNull ( reason , text ) ; assertEquals ( "Text Mismatch" , expectedText , text ) ; |
public class Hit { /** * The highlights returned from a document that matches the search request .
* @ return The highlights returned from a document that matches the search request . */
public java . util . Map < String , String > getHighlights ( ) { } } | if ( highlights == null ) { highlights = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return highlights ; |
public class Normalizer { /** * Test if a string is in a given normalization form .
* This is semantically equivalent to source . equals ( normalize ( source , mode ) ) .
* Unlike quickCheck ( ) , this function returns a definitive result ,
* never a " maybe " .
* For NFD , NFKD , and FCD , both functions work exactly the same .
* For NFC and NFKC where quickCheck may return " maybe " , this function will
* perform further tests to arrive at a true / false result .
* @ param str the input string to be checked to see if it is
* normalized
* @ param mode the normalization mode
* @ param options Options for use with exclusion set and tailored Normalization
* The only option that is currently recognized is UNICODE _ 3_2
* @ see # isNormalized
* @ deprecated ICU 56 Use { @ link Normalizer2 } instead .
* @ hide original deprecated declaration */
@ Deprecated public static boolean isNormalized ( String str , Mode mode , int options ) { } } | return mode . getNormalizer2 ( options ) . isNormalized ( str ) ; |
public class HoiioFaxClientSpi { /** * Hook for extending classes .
* @ param configuration
* The response handler configuration */
@ Override protected void updateHTTPResponseHandlerConfiguration ( Map < String , String > configuration ) { } } | // get property part
String propertyPart = this . getPropertyPart ( ) ; // modify configuration
configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . SUBMIT_JSON_OUTPUT_PROPERTY_KEY . toString ( ) , propertyPart ) , "txn_ref" ) ; configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . ERROR_DETECTION_PATH_PROPERTY_KEY . toString ( ) , propertyPart ) , "status" ) ; configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . ERROR_DETECTION_VALUE_PROPERTY_KEY . toString ( ) , propertyPart ) , "error_" ) ; configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . GET_STATUS_JSON_OUTPUT_PROPERTY_KEY . toString ( ) , propertyPart ) , "fax_status" ) ; configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . IN_PROGRESS_STATUS_MAPPING_PROPERTY_KEY . toString ( ) , propertyPart ) , "ongoing" ) ; configuration . put ( MessageFormat . format ( JSONHTTPResponseHandlerConfigurationConstants . ERROR_STATUS_MAPPING_PROPERTY_KEY . toString ( ) , propertyPart ) , "unanswered;failed;busy" ) ; |
public class LookupService { /** * Reads meta - data from the database file .
* @ throws IOException
* if an error occurs reading from the database file . */
private synchronized void init ( ) throws IOException { } } | if ( file == null ) { return ; } if ( ( dboptions & GEOIP_CHECK_CACHE ) != 0 ) { mtime = databaseFile . lastModified ( ) ; } file . seek ( file . length ( ) - 3 ) ; byte [ ] delim = new byte [ 3 ] ; byte [ ] buf = new byte [ SEGMENT_RECORD_LENGTH ] ; for ( int i = 0 ; i < STRUCTURE_INFO_MAX_SIZE ; i ++ ) { file . readFully ( delim ) ; if ( delim [ 0 ] == - 1 && delim [ 1 ] == - 1 && delim [ 2 ] == - 1 ) { databaseType = file . readByte ( ) ; if ( databaseType >= 106 ) { // Backward compatibility with databases from April 2003 and
// earlier
databaseType -= 105 ; } // Determine the database type .
if ( databaseType == DatabaseInfo . REGION_EDITION_REV0 ) { databaseSegments = new int [ 1 ] ; databaseSegments [ 0 ] = STATE_BEGIN_REV0 ; recordLength = STANDARD_RECORD_LENGTH ; } else if ( databaseType == DatabaseInfo . REGION_EDITION_REV1 ) { databaseSegments = new int [ 1 ] ; databaseSegments [ 0 ] = STATE_BEGIN_REV1 ; recordLength = STANDARD_RECORD_LENGTH ; } else if ( databaseType == DatabaseInfo . CITY_EDITION_REV0 || databaseType == DatabaseInfo . CITY_EDITION_REV1 || databaseType == DatabaseInfo . ORG_EDITION || databaseType == DatabaseInfo . ORG_EDITION_V6 || databaseType == DatabaseInfo . ISP_EDITION || databaseType == DatabaseInfo . ISP_EDITION_V6 || databaseType == DatabaseInfo . DOMAIN_EDITION || databaseType == DatabaseInfo . DOMAIN_EDITION_V6 || databaseType == DatabaseInfo . ASNUM_EDITION || databaseType == DatabaseInfo . ASNUM_EDITION_V6 || databaseType == DatabaseInfo . NETSPEED_EDITION_REV1 || databaseType == DatabaseInfo . NETSPEED_EDITION_REV1_V6 || databaseType == DatabaseInfo . CITY_EDITION_REV0_V6 || databaseType == DatabaseInfo . CITY_EDITION_REV1_V6 ) { databaseSegments = new int [ 1 ] ; databaseSegments [ 0 ] = 0 ; if ( databaseType == DatabaseInfo . CITY_EDITION_REV0 || databaseType == DatabaseInfo . CITY_EDITION_REV1 || databaseType == DatabaseInfo . ASNUM_EDITION_V6 || databaseType == DatabaseInfo . NETSPEED_EDITION_REV1 || databaseType == DatabaseInfo . NETSPEED_EDITION_REV1_V6 || databaseType == DatabaseInfo . CITY_EDITION_REV0_V6 || databaseType == DatabaseInfo . CITY_EDITION_REV1_V6 || databaseType == DatabaseInfo . ASNUM_EDITION ) { recordLength = STANDARD_RECORD_LENGTH ; } else { recordLength = ORG_RECORD_LENGTH ; } file . readFully ( buf ) ; for ( int j = 0 ; j < SEGMENT_RECORD_LENGTH ; j ++ ) { databaseSegments [ 0 ] += ( unsignedByteToInt ( buf [ j ] ) << ( j * 8 ) ) ; } } break ; } else { file . seek ( file . getFilePointer ( ) - 4 ) ; } } if ( ( databaseType == DatabaseInfo . COUNTRY_EDITION ) || ( databaseType == DatabaseInfo . COUNTRY_EDITION_V6 ) || ( databaseType == DatabaseInfo . PROXY_EDITION ) || ( databaseType == DatabaseInfo . NETSPEED_EDITION ) ) { databaseSegments = new int [ 1 ] ; databaseSegments [ 0 ] = COUNTRY_BEGIN ; recordLength = STANDARD_RECORD_LENGTH ; } if ( ( dboptions & GEOIP_MEMORY_CACHE ) == 1 ) { int l = ( int ) file . length ( ) ; dbbuffer = new byte [ l ] ; file . seek ( 0 ) ; file . readFully ( dbbuffer , 0 , l ) ; databaseInfo = getDatabaseInfo ( ) ; file . close ( ) ; } if ( ( dboptions & GEOIP_INDEX_CACHE ) != 0 ) { int l = databaseSegments [ 0 ] * recordLength * 2 ; index_cache = new byte [ l ] ; file . seek ( 0 ) ; file . readFully ( index_cache , 0 , l ) ; } else { index_cache = null ; } |
public class DateUtils { /** * Get specify days back from given date .
* @ param daysBack how many days want to be back .
* @ param date date to be handled .
* @ return a new Date object . */
public static Date getDateOfDaysBack ( final int daysBack , final Date date ) { } } | return dateBack ( Calendar . DAY_OF_MONTH , daysBack , date ) ; |
public class Dbi { /** * Iterate the database in accordance with the provided { @ link KeyRange } and
* { @ link Comparator } .
* If a comparator is provided , it must reflect the same ordering as LMDB uses
* for cursor operations ( eg first , next , last , previous etc ) .
* If a null comparator is provided , any comparator provided when opening the
* database is used . If no database comparator was specified , the buffer ' s
* default comparator is used . Such buffer comparators reflect LMDB ' s default
* lexicographical order .
* @ param txn transaction handle ( not null ; not committed )
* @ param range range of acceptable keys ( not null )
* @ param comparator custom comparator for keys ( may be null )
* @ return iterator ( never null ) */
public CursorIterator < T > iterate ( final Txn < T > txn , final KeyRange < T > range , final Comparator < T > comparator ) { } } | if ( SHOULD_CHECK ) { requireNonNull ( txn ) ; requireNonNull ( range ) ; txn . checkReady ( ) ; } final Comparator < T > useComp ; if ( comparator == null ) { useComp = compFunc == null ? txn . comparator ( ) : compFunc ; } else { useComp = comparator ; } return new CursorIterator < > ( txn , this , range , useComp ) ; |
public class RtfHeaderFooterGroup { /** * Write the content of this RtfHeaderFooterGroup . */
public void writeContent ( final OutputStream result ) throws IOException { } } | if ( this . mode == MODE_SINGLE ) { headerAll . writeContent ( result ) ; } else if ( this . mode == MODE_MULTIPLE ) { if ( headerFirst != null ) { headerFirst . writeContent ( result ) ; } if ( headerLeft != null ) { headerLeft . writeContent ( result ) ; } if ( headerRight != null ) { headerRight . writeContent ( result ) ; } if ( headerAll != null ) { headerAll . writeContent ( result ) ; } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.