signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DoubleArrayFunctionsND { /** * Applies the given unary operator to the elements from the given array , * and stores the result in the given result array . < br > * < br > * If the given result array is < code > null < / code > , then a new array * will be created and returned . < br > * < br > * The source array and the target array may be identical . * @ param a0 The array * @ param op The operator to apply * @ param result The array that will store the result * @ return The result * @ throws IllegalArgumentException If the given arrays do not have * equal sizes . */ public static MutableDoubleArrayND apply ( DoubleArrayND a0 , DoubleUnaryOperator op , MutableDoubleArrayND result ) { } }
MutableDoubleArrayND finalResult = validate ( a0 , result ) ; finalResult . coordinates ( ) . parallel ( ) . forEach ( t -> { double operand0 = a0 . get ( t ) ; double r = op . applyAsDouble ( operand0 ) ; finalResult . set ( t , r ) ; } ) ; return finalResult ;
public class SiteProperties { /** * Returns the fallback target ID . The fallback target ID is used in case none of the resolved candidate targeted * URLs map to existing content . */ public static String getFallbackTargetId ( ) { } }
Configuration config = ConfigUtils . getCurrentConfig ( ) ; if ( config != null ) { return config . getString ( FALLBACK_ID_CONFIG_KEY ) ; } else { return null ; }
public class ST_OffSetCurve { /** * Compute the offset curve for a polygon , a point or a collection of geometries * @ param list * @ param geometry * @ param offset * @ param bufferParameters */ public static void geometryOffSetCurve ( ArrayList < LineString > list , Geometry geometry , double offset , BufferParameters bufferParameters ) { } }
final List curves = new OffsetCurveSetBuilder ( geometry , offset , new OffsetCurveBuilder ( geometry . getFactory ( ) . getPrecisionModel ( ) , bufferParameters ) ) . getCurves ( ) ; final Iterator < SegmentString > iterator = curves . iterator ( ) ; while ( iterator . hasNext ( ) ) { list . add ( geometry . getFactory ( ) . createLineString ( iterator . next ( ) . getCoordinates ( ) ) ) ; }
public class WikiPageUtil { /** * Returns < code > true < / code > if the given value is a valid XML character . * See http : / / www . w3 . org / TR / xml / # charsets * @ param ch the value to check * @ return < code > true < / code > if the given value is a valid XML character . */ public static boolean isValidXmlChar ( int ch ) { } }
return ( ch == 0x9 ) || ( ch == 0xA ) || ( ch == 0xD ) || ( ch >= 0x20 && ch <= 0xD7FF ) || ( ch >= 0xE000 && ch <= 0xFFFD ) || ( ch >= 0x10000 && ch <= 0x10FFFF ) ;
public class DefaultTokenManager { /** * Retrieve the access token String from the OAuth2 token object */ @ Override public String getToken ( ) { } }
log . debug ( "DefaultTokenManager getToken()" ) ; if ( ! checkCache ( ) ) { retrieveToken ( ) ; } // retrieve from cache if ( token == null ) { token = retrieveTokenFromCache ( ) ; } // check if expired if ( hasTokenExpired ( token ) ) { token = retrieveTokenFromCache ( ) ; } // check if token should be refreshed . If a refreshtoken is not present , the token manager will call upon the original tokenprovider retrieve a fresh token if ( isTokenExpiring ( token ) && ! isAsyncInProgress ( ) ) { if ( null != token . getRefresh_token ( ) ) { this . asyncInProgress = true ; submitRefreshTask ( ) ; } else { retrieveToken ( ) ; token = retrieveTokenFromCache ( ) ; } } if ( token . getAccess_token ( ) != null && ! token . getAccess_token ( ) . isEmpty ( ) ) { return token . getAccess_token ( ) ; } else if ( token . getDelegated_refresh_token ( ) != null && ! token . getDelegated_refresh_token ( ) . isEmpty ( ) ) { return token . getDelegated_refresh_token ( ) ; } else if ( token . getIms_token ( ) != null && ! token . getIms_token ( ) . isEmpty ( ) ) { return token . getIms_token ( ) ; } else { return token . getUaa_token ( ) ; }
public class Strings { /** * 重复拼接字符串 * @ param string * 重复拼接的字符串 * @ param separator * 拼接的分隔符 * @ param count * 拼接次数 * @ return */ public static String repeat ( String string , String separator , int count ) { } }
// 参数验证 $ . checkNotNull ( string ) ; Preconditions . checkArgument ( count >= 0 , "invalid count: %s" , count ) ; String sep = null == separator ? "" : separator ; StringBuffer sb = new StringBuffer ( ) ; int forCount = count - 1 ; for ( int i = 0 ; i < forCount ; i ++ ) { sb . append ( string ) . append ( sep ) ; } sb . append ( string ) ; return sb . toString ( ) ;
public class RestController { /** * Checks if an entity exists . */ @ GetMapping ( value = "/{entityTypeId}/exist" , produces = APPLICATION_JSON_VALUE ) public boolean entityExists ( @ PathVariable ( "entityTypeId" ) String entityTypeId ) { } }
return dataService . hasRepository ( entityTypeId ) ;
public class AmqpMessageHandlerService { /** * Method to create a new target or to find the target if it already exists . * @ param targetID * the ID of the target / thing * @ param ip * the ip of the target / thing */ private void registerTarget ( final Message message , final String virtualHost ) { } }
final String thingId = getStringHeaderKey ( message , MessageHeaderKey . THING_ID , "ThingId is null" ) ; final String replyTo = message . getMessageProperties ( ) . getReplyTo ( ) ; if ( StringUtils . isEmpty ( replyTo ) ) { logAndThrowMessageError ( message , "No ReplyTo was set for the createThing message." ) ; } final URI amqpUri = IpUtil . createAmqpUri ( virtualHost , replyTo ) ; final Target target = controllerManagement . findOrRegisterTargetIfItDoesNotExist ( thingId , amqpUri ) ; LOG . debug ( "Target {} reported online state." , thingId ) ; lookIfUpdateAvailable ( target ) ;
public class BureauRegistry { /** * Called when something goes wrong with launching a bureau . */ protected void handleLaunchError ( Bureau bureau , Exception error , String cause ) { } }
if ( cause == null && error != null ) { cause = error . getMessage ( ) ; } log . info ( "Bureau failed to launch" , "bureau" , bureau , "cause" , cause ) ; // clean up any agents attached to this bureau for ( AgentObject agent : bureau . agentStates . keySet ( ) ) { agent . getLocal ( AgentData . class ) . launchError = error ; _omgr . destroyObject ( agent . getOid ( ) ) ; } bureau . agentStates . clear ( ) ; _bureaus . remove ( bureau . bureauId ) ;
public class DefaultGroovyMethods { /** * Obtains a MetaClass for an object either from the registry or in the case of * a GroovyObject from the object itself . * @ param obj The object in question * @ return The MetaClass * @ since 1.5.0 */ public static MetaClass getMetaClass ( Object obj ) { } }
MetaClass mc = InvokerHelper . getMetaClass ( obj ) ; return new HandleMetaClass ( mc , obj ) ;
public class TextToSpeechWebSocketListener { /** * ( non - Javadoc ) * @ see okhttp3 . WebSocketListener # onMessage ( okhttp3 . WebSocket , java . lang . String ) */ @ Override public void onMessage ( WebSocket webSocket , String message ) { } }
JsonObject json = new JsonParser ( ) . parse ( message ) . getAsJsonObject ( ) ; if ( json . has ( ERROR ) ) { String error = json . get ( ERROR ) . getAsString ( ) ; callback . onError ( new RuntimeException ( error ) ) ; } else if ( json . has ( WARNINGS ) ) { String warning = json . get ( WARNINGS ) . getAsString ( ) ; callback . onWarning ( new RuntimeException ( warning ) ) ; } else if ( json . has ( BINARY_STREAMS ) ) { String contentType = json . get ( BINARY_STREAMS ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( CONTENT_TYPE ) . getAsString ( ) ; callback . onContentType ( contentType ) ; } else if ( json . has ( WORDS ) ) { callback . onTimings ( GSON . fromJson ( message , Timings . class ) ) ; } else if ( json . has ( MARKS ) ) { callback . onMarks ( GSON . fromJson ( message , Marks . class ) ) ; }
public class ExceptionUtils { /** * Returns a { @ link Mono } containing an { @ link IllegalStateException } with the configured message * @ param format A < a href = " . . / util / Formatter . html # syntax " > format string < / a > * @ param args Arguments referenced by the format specifiers in the format string . If there are more arguments than format specifiers , the extra arguments are ignored . The number of arguments * is variable and may be zero . The maximum number of arguments is limited by the maximum dimension of a Java array as defined by < cite > The Java & trade ; Virtual Machine * Specification < / cite > . The behaviour on a { @ code null } argument depends on the < a href = " . . / util / Formatter . html # syntax " > conversion < / a > . * @ param < T > the type of the { @ link Mono } being converted * @ return a { @ link Mono } containing the error */ public static < T > Mono < T > illegalState ( String format , Object ... args ) { } }
String message = String . format ( format , args ) ; return Mono . error ( new IllegalStateException ( message ) ) ;
public class ValidationHelper { /** * Checks whether the value of the given property key of the given node * if not null and matches the given regular expression . * @ param node * @ param key * @ param expression * @ param errorBuffer * @ return true if string matches expression */ public static boolean isValidStringMatchingRegex ( final String value , final String expression ) { } }
Pattern pattern = patterns . get ( expression ) ; if ( pattern == null ) { pattern = Pattern . compile ( expression ) ; patterns . put ( expression , pattern ) ; } return ( value != null && pattern . matcher ( value ) . matches ( ) ) ;
public class RowColumnOps { /** * Updates the values of row < tt > i < / tt > in the given matrix to be A [ i , : ] = A [ i , : ] + c * @ param A the matrix to perform he update on * @ param i the row to update * @ param start the first index of the row to update from ( inclusive ) * @ param to the last index of the row to update ( exclusive ) * @ param c the constant to add to each element */ public static void addRow ( Matrix A , int i , int start , int to , double c ) { } }
for ( int j = start ; j < to ; j ++ ) A . increment ( i , j , c ) ;
public class LocalCachedMapOptions { /** * Sets eviction policy . * @ param evictionPolicy * < p > < code > LRU < / code > - uses local cache with LRU ( least recently used ) eviction policy . * < p > < code > LFU < / code > - uses local cache with LFU ( least frequently used ) eviction policy . * < p > < code > SOFT < / code > - uses local cache with soft references . The garbage collector will evict items from the local cache when the JVM is running out of memory . * < p > < code > NONE < / code > - doesn ' t use eviction policy , but timeToLive and maxIdleTime params are still working . * @ return LocalCachedMapOptions instance */ public LocalCachedMapOptions < K , V > evictionPolicy ( EvictionPolicy evictionPolicy ) { } }
if ( evictionPolicy == null ) { throw new NullPointerException ( "evictionPolicy can't be null" ) ; } this . evictionPolicy = evictionPolicy ; return this ;
public class GetDocumentAnalysisRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetDocumentAnalysisRequest getDocumentAnalysisRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getDocumentAnalysisRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDocumentAnalysisRequest . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( getDocumentAnalysisRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( getDocumentAnalysisRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Base64Slow { /** * Decode a Base64 encoded String . Characters that are not part of the Base64 alphabet are ignored * in the input . * @ param string The data to decode . * @ param enc Character encoding to use when converting from bytes . * @ return decoded data . * @ throws UnsupportedEncodingException if the character encoding specified is not supported . * @ since ostermillerutils 1.02.16 */ public static byte [ ] decodeToBytes ( String string , String enc ) throws UnsupportedEncodingException { } }
return decode ( string . getBytes ( enc ) ) ;
public class PropertyReflector { /** * Checks if object has a property with specified name . . * @ param obj an object to introspect . * @ param name a name of the property to check . * @ return true if the object has the property and false if it doesn ' t . */ public static boolean hasProperty ( Object obj , String name ) { } }
if ( obj == null ) throw new NullPointerException ( "Object cannot be null" ) ; if ( name == null ) throw new NullPointerException ( "Property name cannot be null" ) ; Class < ? > objClass = obj . getClass ( ) ; // Search in fields for ( Field field : objClass . getFields ( ) ) { if ( matchField ( field , name ) ) return true ; } // Search in properties name = "get" + name ; for ( Method method : objClass . getMethods ( ) ) { if ( matchPropertyGetter ( method , name ) ) return true ; } return false ;
public class Matrix4d { /** * Pre - multiply scaling to this matrix by scaling all three base axes by the given < code > factor < / code > * while using < code > ( ox , oy , oz ) < / code > as the scaling origin . * If < code > M < / code > is < code > this < / code > matrix and < code > S < / code > the scaling matrix , * then the new matrix will be < code > S * M < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > S * M * v < / code > , the * scaling will be applied last ! * This method is equivalent to calling : < code > new Matrix4d ( ) . translate ( ox , oy , oz ) . scale ( factor ) . translate ( - ox , - oy , - oz ) . mul ( this , this ) < / code > * @ param factor * the scaling factor for all three axes * @ param ox * the x coordinate of the scaling origin * @ param oy * the y coordinate of the scaling origin * @ param oz * the z coordinate of the scaling origin * @ return this */ public Matrix4d scaleAroundLocal ( double factor , double ox , double oy , double oz ) { } }
return scaleAroundLocal ( factor , factor , factor , ox , oy , oz , this ) ;
public class TokenizerAdapter { /** * Internally fetches the next token . * @ return the next token in the token stream , or null if none exists . */ @ Override public String getNext ( ) { } }
try { int nextTok = st . nextToken ( ) ; switch ( nextTok ) { case java . io . StreamTokenizer . TT_EOL : return eolString ; case java . io . StreamTokenizer . TT_EOF : return null ; case java . io . StreamTokenizer . TT_WORD : return st . sval ; case java . io . StreamTokenizer . TT_NUMBER : return Double . toString ( st . nval ) ; default : char [ ] t = { ( char ) nextTok } ; // ( array initialization ) return new String ( t ) ; } } catch ( IOException ioe ) { // do nothing , return null return null ; }
public class LocalTransactionCurrentService { /** * { @ inheritDoc } */ @ Override public void end ( int arg0 ) throws InconsistentLocalTranException , RolledbackException , IllegalStateException { } }
if ( ltc != null ) { ltc . end ( arg0 ) ; }
public class AbstrCFMLScriptTransformer { /** * Liest ein Case Statement ein * @ return case Statement * @ throws TemplateException */ private final boolean caseStatement ( Data data , Switch swit ) throws TemplateException { } }
if ( ! data . srcCode . forwardIfCurrentAndNoWordAfter ( "case" ) ) return false ; // int line = data . srcCode . getLine ( ) ; comments ( data ) ; Expression expr = super . expression ( data ) ; comments ( data ) ; if ( ! data . srcCode . forwardIfCurrent ( ':' ) ) throw new TemplateException ( data . srcCode , "case body must start with [:]" ) ; Body body = new BodyBase ( data . factory ) ; switchBlock ( data , body ) ; swit . addCase ( expr , body ) ; return true ;
public class DefaultWhenFileSystem { /** * Unlinks the link on the file system represented by the path { @ code link } , asynchronously . * @ param link the link * @ return a promise for completion */ @ Override public Promise < Void > unlink ( String link ) { } }
return adapter . toPromise ( handler -> vertx . fileSystem ( ) . unlink ( link , handler ) ) ;
import java . util . * ; class PerformXOR { /** * Given two strings of binary ' 0 ' s and ' 1 ' s , conduct a binary XOR operation on them . * Return the result in string format as well . * Args : * first _ string : A binary string * second _ string : A binary string * Returns : * A string that represents the binary XOR of first _ string and second _ string . * Examples : * perform _ xor ( ' 010 ' , ' 110 ' ) - > ' 100' */ public static String performXOR ( String firstString , String secondString ) { } }
StringBuilder result = new StringBuilder ( ) ; for ( int i = 0 ; i < firstString . length ( ) ; i ++ ) { char firstChar = firstString . charAt ( i ) ; char secondChar = secondString . charAt ( i ) ; if ( firstChar == secondChar ) { result . append ( '0' ) ; } else { result . append ( '1' ) ; } } return result . toString ( ) ;
public class HBaseRequestAdapter { /** * < p > adapt . < / p > * @ param delete a { @ link org . apache . hadoop . hbase . client . Delete } object . * @ param mutationApi a { @ link com . google . cloud . bigtable . data . v2 . models . MutationApi } object . */ @ InternalApi public void adapt ( Delete delete , MutationApi < ? > mutationApi ) { } }
Adapters . DELETE_ADAPTER . adapt ( delete , mutationApi ) ;
public class ProtocolConnectionManager { /** * Notification that a connection was closed . * @ param closed the closed connection */ private void onConnectionClose ( final Connection closed ) { } }
synchronized ( this ) { if ( connection == closed ) { connection = null ; if ( shutdown ) { connectTask = DISCONNECTED ; return ; } final ConnectTask previous = connectTask ; connectTask = previous . connectionClosed ( ) ; } }
public class IncorrectInternalClassUse { /** * determines if the class in question is an internal class by looking at package prefixes * @ param clsName * the name of the class to check * @ return whether the class is internal */ private static boolean isInternal ( String clsName ) { } }
boolean internal = false ; for ( String internalPackage : internalPackages ) { if ( clsName . startsWith ( internalPackage ) ) { internal = true ; break ; } } if ( internal ) { for ( String externalPackage : externalPackages ) { if ( clsName . startsWith ( externalPackage ) ) { internal = false ; break ; } } } return internal ;
public class YearQuarter { /** * Returns a copy of this { @ code YearQuarter } with the quarter - of - year altered . * This instance is immutable and unaffected by this method call . * @ param quarter the quarter - of - year to set in the returned year - quarter , from 1 to 4 * @ return a { @ code YearQuarter } based on this year - quarter with the requested quarter , not null * @ throws DateTimeException if the quarter - of - year value is invalid */ public YearQuarter withQuarter ( int quarter ) { } }
QUARTER_OF_YEAR . range ( ) . checkValidValue ( quarter , QUARTER_OF_YEAR ) ; return with ( year , Quarter . of ( quarter ) ) ;
public class DFSContentProvider { /** * / * @ inheritDoc */ public Object getParent ( Object element ) { } }
if ( element instanceof DFSPath ) { return ( ( DFSPath ) element ) . getParent ( ) ; } else if ( element instanceof HadoopServer ) { return locationsRoot ; } return null ;
public class RtfRow { /** * Performs a second pass over all cells to handle cell row / column spanning . */ protected void handleCellSpanning ( ) { } }
RtfCell deletedCell = new RtfCell ( true ) ; for ( int i = 0 ; i < this . cells . size ( ) ; i ++ ) { RtfCell rtfCell = ( RtfCell ) this . cells . get ( i ) ; if ( rtfCell . getColspan ( ) > 1 ) { int cSpan = rtfCell . getColspan ( ) ; for ( int j = i + 1 ; j < i + cSpan ; j ++ ) { if ( j < this . cells . size ( ) ) { RtfCell rtfCellMerge = ( RtfCell ) this . cells . get ( j ) ; rtfCell . setCellRight ( rtfCell . getCellRight ( ) + rtfCellMerge . getCellWidth ( ) ) ; rtfCell . setCellWidth ( rtfCell . getCellWidth ( ) + rtfCellMerge . getCellWidth ( ) ) ; this . cells . set ( j , deletedCell ) ; } } } if ( rtfCell . getRowspan ( ) > 1 ) { ArrayList rows = this . parentTable . getRows ( ) ; for ( int j = 1 ; j < rtfCell . getRowspan ( ) ; j ++ ) { RtfRow mergeRow = ( RtfRow ) rows . get ( this . rowNumber + j ) ; if ( this . rowNumber + j < rows . size ( ) ) { RtfCell rtfCellMerge = ( RtfCell ) mergeRow . getCells ( ) . get ( i ) ; rtfCellMerge . setCellMergeChild ( rtfCell ) ; } if ( rtfCell . getColspan ( ) > 1 ) { int cSpan = rtfCell . getColspan ( ) ; for ( int k = i + 1 ; k < i + cSpan ; k ++ ) { if ( k < mergeRow . getCells ( ) . size ( ) ) { mergeRow . getCells ( ) . set ( k , deletedCell ) ; } } } } } }
public class ContinuableFuture { private < R > ContinuableFuture < R > execute ( final Callable < R > command ) { } }
return execute ( command , null ) ;
public class CounterGroup { /** * Looks up key in the ResourceBundle and returns the corresponding value . * If the bundle or the key doesn ' t exist , returns the default value . */ private String localize ( String key , String defaultValue ) { } }
String result = defaultValue ; if ( bundle != null ) { try { result = bundle . getString ( key ) ; } catch ( MissingResourceException mre ) { } } return result ;
public class BookmarksApi { /** * List corporation bookmark folders A list of your corporation & # 39 ; s * bookmark folders - - - This route is cached for up to 3600 seconds SSO * Scope : esi - bookmarks . read _ corporation _ bookmarks . v1 * @ param corporationId * An EVE corporation ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param token * Access token to use if unable to set a header ( optional ) * @ return ApiResponse & lt ; List & lt ; CorporationBookmarkFoldersResponse & gt ; & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < List < CorporationBookmarkFoldersResponse > > getCorporationsCorporationIdBookmarksFoldersWithHttpInfo ( Integer corporationId , String datasource , String ifNoneMatch , Integer page , String token ) throws ApiException { } }
com . squareup . okhttp . Call call = getCorporationsCorporationIdBookmarksFoldersValidateBeforeCall ( corporationId , datasource , ifNoneMatch , page , token , null ) ; Type localVarReturnType = new TypeToken < List < CorporationBookmarkFoldersResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class AbstractSorter { /** * Configures the Comparator to use during the sort operation by the calling Thread . * @ param < T > the Class type of the elements in the list . * @ param sortable the Sortable implementing object specifying the Comparator to use to order elements in the list * during the sort operation . * @ return the Sortable implementing object to method chaining purposes . * @ see # configureComparator ( org . cp . elements . util . sort . annotation . Sortable ) * @ see # isCustomComparatorAllowed ( ) * @ see ComparatorHolder # set ( java . util . Comparator ) * @ see org . cp . elements . util . sort . Sortable # getOrderBy ( ) * @ see java . util . Comparator */ protected < T > Sortable < T > configureComparator ( final Sortable < T > sortable ) { } }
if ( isCustomComparatorAllowed ( ) ) { Comparator < ? > comparator = sortable . getOrderBy ( ) ; if ( comparator != null ) { ComparatorHolder . set ( comparator ) ; } } return sortable ;
public class HowlLogger { /** * Sub - classes call this method to write log records with * a specific record type . * @ param type a record type defined in LogRecordType . * @ param data record data to be logged . * @ return a log key that can be used to reference * the record . */ public long write ( short type , byte [ ] [ ] data ) throws InterruptedException , IOException { } }
try { return super . put ( type , data , true ) ; } catch ( InterruptedException e ) { throw e ; } catch ( IOException e ) { throw e ; } catch ( Throwable e ) { throw new DelegatedRuntimeException ( e ) ; }
public class AptControlInterface { /** * Does the method have a Java Beans getter method signature ( is varient ) . * @ param method AptMethod instance . * @ return true if ' is ' getter . */ private boolean isIsGetter ( AptMethod method ) { } }
String methodName = method . getName ( ) ; if ( methodName . length ( ) < 3 ) return false ; if ( ! methodName . startsWith ( "is" ) ) return false ; if ( method . getArgList ( ) . length ( ) > 0 ) return false ; if ( ! "boolean" . equals ( method . getReturnType ( ) ) ) return false ; return true ;
public class QueueUtil { /** * Drains the provided queue by the size of the queue as it is known * upfront draining . * The rational for using this method is the same as for using * { @ link Queue # clear ( ) } : to remove all the elements from the queue . * There are two major differences to highlight : * < ol > * < li > This method doesn ' t guarantee that the queue is empty on * return if it is written concurrently . < / li > * < li > This method returns the number of drained elements , while * { @ link Queue # clear ( ) } doesn ' t . < / li > * < / ol > * These points makes this method more applicable than * { @ link Queue # clear ( ) } is in conditions where the queue is written * concurrently without blocking the writer threads for the time of * draining the queue and the caller needs to know the number of * elements removed from the queue . * You may provide a predicate which will allow some elements to be drained * but not be counted against the returned number of drained events . * @ param queue The queue to be drained * @ param drainedCountFilter filter which determines if the drained element * is counted against the returned count . The filter * may be { @ code null } in which case all elements * match * @ param < E > the type of the elements in the queue * @ return The number of elements drained from the queue which pass the * given predicate */ public static < E > int drainQueue ( Queue < E > queue , Predicate < E > drainedCountFilter ) { } }
return drainQueue ( queue , queue . size ( ) , drainedCountFilter ) ;
public class DateFormatException { /** * / * ( non - Javadoc ) * @ see java . lang . Throwable # getMessage ( ) */ @ Override public String getMessage ( ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Illegal xsd:dateTime format: " ) ; sb . append ( super . getMessage ( ) ) ; sb . append ( " at index " ) ; sb . append ( index ) ; sb . append ( " of '" ) ; sb . append ( input ) ; sb . append ( "'" ) ; return sb . toString ( ) ;
public class DatabasesInner { /** * Gets a database inside of a recommented elastic pool . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param recommendedElasticPoolName The name of the elastic pool to be retrieved . * @ param databaseName The name of the database to be retrieved . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DatabaseInner object if successful . */ public DatabaseInner getByRecommendedElasticPool ( String resourceGroupName , String serverName , String recommendedElasticPoolName , String databaseName ) { } }
return getByRecommendedElasticPoolWithServiceResponseAsync ( resourceGroupName , serverName , recommendedElasticPoolName , databaseName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class SparkLauncher { /** * Set a custom Spark installation location for the application . * @ param sparkHome Path to the Spark installation to use . * @ return This launcher . */ public SparkLauncher setSparkHome ( String sparkHome ) { } }
checkNotNull ( sparkHome , "sparkHome" ) ; builder . childEnv . put ( ENV_SPARK_HOME , sparkHome ) ; return this ;
public class PeerNodeInt { /** * Call this method when new candidate is received * @ param index candidate index * @ param id candidate id * @ param sdp candidate sdp */ public void onCandidate ( long sessionId , int index , String id , String sdp ) { } }
send ( new RTCCandidate ( deviceId , sessionId , index , id , sdp ) ) ;
public class Mappings { /** * ( mapping ) convert to map of values * @ param vBase base mapping for map key * @ param constraints constraints * @ param < V > base value type * @ return new created mapping */ public static < V > Mapping < Map < String , V > > map ( Mapping < V > vBase , Constraint ... constraints ) { } }
return map ( text ( ) , vBase , constraints ) ;
public class RestClient { /** * Creates a head request connection to the specified path * @ param path The relative URL path * @ return An HttpURLConnection to the specified path * @ throws IOException If there was a problem creating the connection or URL */ private HttpURLConnection createHeadConnection ( String path ) throws IOException { } }
HttpURLConnection connection = createHttpURLConnectionToMassive ( path ) ; connection . setRequestMethod ( "HEAD" ) ; return connection ;
public class TableForm { /** * Add a Submit Button . * @ param tag The form name of the element * @ param label The label for the Button */ public Input addButton ( String tag , String label ) { } }
if ( buttons == null ) buttonsAtBottom ( ) ; Input e = new Input ( Input . Submit , tag , label ) ; if ( extendRow ) addField ( null , e ) ; else buttons . add ( e ) ; return e ;
public class GetCheckerIpRangesResult { /** * A complex type that contains sorted list of IP ranges in CIDR format for Amazon Route 53 health checkers . * @ return A complex type that contains sorted list of IP ranges in CIDR format for Amazon Route 53 health checkers . */ public java . util . List < String > getCheckerIpRanges ( ) { } }
if ( checkerIpRanges == null ) { checkerIpRanges = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return checkerIpRanges ;
public class HttpChain { /** * Initialize this chain manager : Channel and chain names shouldn ' t fluctuate as config changes , * so come up with names associated with this set of channels / chains that will be reused regardless * of start / stop / enable / disable / modify * @ param endpointId The id of the httpEndpoint * @ param componentId The DS component id * @ param cfw Channel framework */ public void init ( String endpointId , Object componentId , CHFWBundle cfBundle ) { } }
final String root = endpointId + ( isHttps ? "-ssl" : "" ) ; cfw = cfBundle . getFramework ( ) ; endpointMgr = cfBundle . getEndpointManager ( ) ; endpointName = root ; tcpName = root ; sslName = "SSL-" + root ; httpName = "HTTP-" + root ; dispatcherName = "HTTPD-" + root ; chainName = "CHAIN-" + root ; // If there is a chain that is in the CFW with this name , it was potentially // left over from a previous instance of the endpoint . There is no way to get // the state of the existing ( old ) CFW chain to set our chainState accordingly . . . // ( in addition to the old chain pointing to old services and things . . ) // * IF * there is an old chain , stop , destroy , and remove it . try { ChainData cd = cfw . getChain ( chainName ) ; if ( cd != null ) { cfw . stopChain ( cd , 0L ) ; // no timeout : FORCE the stop . cfw . destroyChain ( cd ) ; cfw . removeChain ( cd ) ; } } catch ( ChannelException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Error stopping chain " + chainName , this , e ) ; } } catch ( ChainException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Error stopping chain " + chainName , this , e ) ; } }
public class MongoServerSelector { /** * Returns an SSLSocketFactory needed to connect to an SSL mongo server . The socket factory is created using the testTruststore . jks . * @ return * @ throws KeyStoreException * @ throws FileNotFoundException * @ throws IOException * @ throws NoSuchAlgorithmException * @ throws CertificateException * @ throws KeyManagementException */ private static SSLSocketFactory getSocketFactory ( File trustStore ) throws KeyStoreException , FileNotFoundException , IOException , NoSuchAlgorithmException , CertificateException , KeyManagementException { } }
KeyStore keystore = KeyStore . getInstance ( "JKS" ) ; InputStream truststoreInputStream = null ; try { truststoreInputStream = new FileInputStream ( trustStore ) ; keystore . load ( truststoreInputStream , KEYSTORE_PW ) ; } finally { if ( truststoreInputStream != null ) { truststoreInputStream . close ( ) ; } } TrustManagerFactory trustManagerFactory = TrustManagerFactory . getInstance ( "PKIX" ) ; trustManagerFactory . init ( keystore ) ; TrustManager [ ] trustMangers = trustManagerFactory . getTrustManagers ( ) ; SSLContext sslContext = SSLContext . getInstance ( "TLS" ) ; sslContext . init ( null , trustMangers , null ) ; SSLSocketFactory sslSocketFactory = sslContext . getSocketFactory ( ) ; return sslSocketFactory ;
public class Paging { /** * 解析page对象 , 若参数错误 , 默认desc * @ param thirdFunction 解析函数 * @ param < R > 解析后的对象 * @ param < F > function * @ return 返回解析结果 */ public < R , F extends ThirdFunction < R , Integer , Integer , List < Sort > > > R get ( F thirdFunction ) { } }
if ( StringUtils . isBlank ( sort ) ) { return thirdFunction . apply ( PageUtils . page ( page ) , PageUtils . size ( size ) , Lists . newArrayList ( ) ) ; } List < Sort > sorts = Arrays . stream ( sort . split ( "," ) ) . map ( s -> { try { String [ ] split = s . split ( "-" ) ; if ( split . length == 1 ) { return new Sort ( this . apply ( split [ 0 ] ) , DESC ) ; } String direction = split [ 1 ] ; direction = ! DESC . equalsIgnoreCase ( direction ) && ! ASC . equalsIgnoreCase ( direction ) ? DESC : direction ; return new Sort ( this . apply ( split [ 0 ] ) , direction ) ; } catch ( Exception e ) { log . error ( e . getMessage ( ) , e ) ; return null ; } } ) . collect ( Collectors . toList ( ) ) ; return thirdFunction . apply ( PageUtils . page ( page ) , PageUtils . size ( size ) , sorts ) ;
public class Rectangle { /** * Returns the intersection with the other rectangle or null if the two rectangles do not intersect . * @ param other the other rectangle . * @ return intersection rectangle or null . */ public Rectangle intersection ( Rectangle other ) { } }
int left = Math . max ( this . left , other . left ) ; int top = Math . max ( this . top , other . top ) ; int right = Math . min ( this . right , other . right ) ; int bottom = Math . min ( this . bottom , other . bottom ) ; if ( right >= left && bottom >= top ) { int height = bottom - top ; int width = right - left ; return new Rectangle ( top , left , bottom , right , width , height ) ; } else { return null ; }
public class SynchronizeFXWebsocketChannel { /** * Inform this channel that one of its client has send a message . * @ param message The message that was send . * @ param session The client that send the message . */ void newMessage ( final byte [ ] message , final Session session ) { } }
callback . recive ( serializer . deserialize ( message ) , session ) ;
public class Helper { /** * Retrieve the meta data for a given document of a corpus . * @ param toplevelCorpusName specifies the toplevel corpus * @ param documentName specifies the document . * @ return returns only the meta data for a single document . */ public static List < Annotation > getMetaDataDoc ( String toplevelCorpusName , String documentName ) { } }
List < Annotation > result = new ArrayList < Annotation > ( ) ; WebResource res = Helper . getAnnisWebResource ( ) ; try { res = res . path ( "meta" ) . path ( "doc" ) . path ( urlPathEscape . escape ( toplevelCorpusName ) ) ; res = res . path ( urlPathEscape . escape ( documentName ) ) ; result = res . get ( new GenericType < List < Annotation > > ( ) { } ) ; } catch ( UniformInterfaceException | ClientHandlerException ex ) { log . error ( null , ex ) ; if ( ! AnnisBaseUI . handleCommonError ( ex , "retrieve metadata" ) ) { Notification . show ( "Remote exception: " + ex . getLocalizedMessage ( ) , Notification . Type . WARNING_MESSAGE ) ; } } return result ;
public class CmsListMetadata { /** * Sets the search action . < p > * @ param searchAction the search action to set */ public void setSearchAction ( CmsListSearchAction searchAction ) { } }
m_searchAction = searchAction ; if ( m_searchAction != null ) { m_searchAction . setListId ( getListId ( ) ) ; }
public class SQLMergeClause { /** * Execute the clause and return the generated keys as a ResultSet * @ return result set with generated keys */ public ResultSet executeWithKeys ( ) { } }
context = startContext ( connection ( ) , metadata , entity ) ; try { if ( configuration . getTemplates ( ) . isNativeMerge ( ) ) { PreparedStatement stmt = null ; if ( batches . isEmpty ( ) ) { stmt = createStatement ( true ) ; listeners . notifyMerge ( entity , metadata , keys , columns , values , subQuery ) ; listeners . preExecute ( context ) ; stmt . executeUpdate ( ) ; listeners . executed ( context ) ; } else { Collection < PreparedStatement > stmts = createStatements ( true ) ; if ( stmts != null && stmts . size ( ) > 1 ) { throw new IllegalStateException ( "executeWithKeys called with batch statement and multiple SQL strings" ) ; } stmt = stmts . iterator ( ) . next ( ) ; listeners . notifyMerges ( entity , metadata , batches ) ; listeners . preExecute ( context ) ; stmt . executeBatch ( ) ; listeners . executed ( context ) ; } final Statement stmt2 = stmt ; ResultSet rs = stmt . getGeneratedKeys ( ) ; return new ResultSetAdapter ( rs ) { @ Override public void close ( ) throws SQLException { try { super . close ( ) ; } finally { stmt2 . close ( ) ; reset ( ) ; endContext ( context ) ; } } } ; } else { if ( hasRow ( ) ) { // update SQLUpdateClause update = new SQLUpdateClause ( connection ( ) , configuration , entity ) ; update . addListener ( listeners ) ; populate ( update ) ; addKeyConditions ( update ) ; reset ( ) ; endContext ( context ) ; return EmptyResultSet . DEFAULT ; } else { // insert SQLInsertClause insert = new SQLInsertClause ( connection ( ) , configuration , entity ) ; insert . addListener ( listeners ) ; populate ( insert ) ; return insert . executeWithKeys ( ) ; } } } catch ( SQLException e ) { onException ( context , e ) ; reset ( ) ; endContext ( context ) ; throw configuration . translate ( queryString , constants , e ) ; }
public class Serial { /** * Compares two numbers using serial arithmetic . The numbers are assumed * to be 32 bit unsigned integers stored in longs . * @ param serial1 The first integer * @ param serial2 The second integer * @ return 0 if the 2 numbers are equal , a positive number if serial1 is greater * than serial2 , and a negative number if serial2 is greater than serial1. * @ throws IllegalArgumentException serial1 or serial2 is out of range */ public static int compare ( long serial1 , long serial2 ) { } }
if ( serial1 < 0 || serial1 > MAX32 ) throw new IllegalArgumentException ( serial1 + " out of range" ) ; if ( serial2 < 0 || serial2 > MAX32 ) throw new IllegalArgumentException ( serial2 + " out of range" ) ; long diff = serial1 - serial2 ; if ( diff >= MAX32 ) diff -= ( MAX32 + 1 ) ; else if ( diff < - MAX32 ) diff += ( MAX32 + 1 ) ; return ( int ) diff ;
public class SpiUtil { /** * This function returns the file parameter value based on the file * content . * @ param faxJob * The fax job object * @ return The file parameter value */ private static String getFileParameterValue ( FaxJob faxJob ) { } }
String value = null ; File file = faxJob . getFile ( ) ; if ( file != null ) { try { // read file ( only text files supported ) value = IOHelper . readTextFile ( faxJob . getFile ( ) ) ; } catch ( IOException exception ) { throw new FaxException ( "Error while reading file." , exception ) ; } } return value ;
public class AddTagAction { /** * { @ inheritDoc } */ @ Override public ActionCommand execute ( ActionMapping mapping , FormBean formBean , HttpServletRequest request , HttpServletResponse response ) throws Exception { } }
// Getting parameters for new tag String tagName = request . getParameter ( PARAM_TAG_NAME ) ; String attributeSource = request . getParameter ( PARAM_ATTRIBUTE_SOURCE ) ; String attributeName = request . getParameter ( PARAM_ATTRIBUTE_NAME ) ; CustomTag tag = new CustomTag ( ) ; tag . setName ( tagName ) ; tag . setAttribute ( attributeSource + '.' + attributeName ) ; TaggingConfig taggingConfig = MoskitoConfigurationHolder . getConfiguration ( ) . getTaggingConfig ( ) ; CustomTag [ ] tags = taggingConfig . getCustomTags ( ) ; // Enlarging tags array int tagsArraySize = tags . length ; tags = Arrays . copyOf ( tags , tagsArraySize + 1 ) ; tags [ tagsArraySize ] = tag ; taggingConfig . setCustomTags ( tags ) ; return mapping . redirect ( ) ;
public class UnitFactorMap { /** * Return an array of factors to convert the array of units to the given base . */ public List < UnitValue > getFactors ( Unit base , RoundingMode mode , Unit ... units ) { } }
List < UnitValue > result = new ArrayList < > ( ) ; for ( int i = 0 ; i < units . length ; i ++ ) { UnitFactor factor = resolve ( units [ i ] , base ) ; if ( factor != null ) { BigDecimal n = factor . rational ( ) . compute ( mode ) ; result . add ( new UnitValue ( n , units [ i ] ) ) ; } } return result ;
public class JDefaultAddress { /** * random 2 letter state * @ param allStates include all states * @ return state code string */ public static String stateAbbr ( boolean allStates ) { } }
String state = fetchString ( "address.state_abbr" ) ; if ( allStates == true ) { return state ; } else { while ( state . equalsIgnoreCase ( "FM" ) || state . equalsIgnoreCase ( "FL" ) || state . equalsIgnoreCase ( "GU" ) || state . equalsIgnoreCase ( "PW" ) || state . equalsIgnoreCase ( "PA" ) || state . equalsIgnoreCase ( "PR" ) || state . equalsIgnoreCase ( "AE" ) || state . equalsIgnoreCase ( "AA" ) || state . equalsIgnoreCase ( "AP" ) || state . equalsIgnoreCase ( "MP" ) || state . equalsIgnoreCase ( "VI" ) || state . equalsIgnoreCase ( "AS" ) || state . equalsIgnoreCase ( "MH" ) ) { state = stateAbbr ( true ) ; } } return state ;
public class Transformers { /** * Transform to ticket . * @ param ticketAck the ticket ack * @ return the ticket */ @ Transformer public Ticket transformToTicket ( TicketAck ticketAck ) { } }
Ticket ticket = new Ticket ( ) ; ticket . setId ( ticketAck . getId ( ) ) ; return ticket ;
public class AdministrationDao { /** * Get the real schema name and version as used by the database . * @ return */ @ Transactional ( readOnly = true , propagation = Propagation . REQUIRED ) public String getDatabaseSchemaVersion ( ) { } }
try { List < Map < String , Object > > result = getJdbcTemplate ( ) . queryForList ( "SELECT \"value\" FROM repository_metadata WHERE \"name\"='schema-version'" ) ; String schema = result . size ( ) > 0 ? ( String ) result . get ( 0 ) . get ( "value" ) : "" ; return schema ; } catch ( DataAccessException ex ) { String error = "Wrong database schema (too old to get the exact number), " + "please initialize the database." ; log . error ( error ) ; } return "" ;
public class Connection { /** * { @ inheritDoc } * @ throws java . sql . SQLFeatureNotSupportedException if | resultSetHoldability | is not ResultSet . CLOSE _ CURSORS _ AT _ COMMIT */ public PreparedStatement prepareStatement ( final String sql , final int resultSetType , final int resultSetConcurrency , final int resultSetHoldability ) throws SQLException { } }
if ( resultSetHoldability != ResultSet . CLOSE_CURSORS_AT_COMMIT ) { throw new SQLFeatureNotSupportedException ( "Unsupported result set holdability" ) ; } // end of if return prepareStatement ( sql , resultSetType , resultSetConcurrency ) ;
public class CmsObject { /** * Returns the first ancestor folder matching the filter criteria . < p > * If no folder matching the filter criteria is found , null is returned . < p > * @ param resourcename the name of the resource to start ( full current site relative path ) * @ param filter the resource filter to match while reading the ancestors * @ return the first ancestor folder matching the filter criteria or null if no folder was found * @ throws CmsException if something goes wrong */ public CmsFolder readAncestor ( String resourcename , CmsResourceFilter filter ) throws CmsException { } }
CmsResource resource = readResource ( resourcename , CmsResourceFilter . ALL ) ; return m_securityManager . readAncestor ( m_context , resource , filter ) ;
public class SystemInputJson { /** * Returns the between condition represented by the given JSON object or an empty * value if no such condition is found . */ private static Optional < ICondition > asBetween ( JsonObject json ) { } }
return Optional . of ( json ) . filter ( j -> j . containsKey ( BETWEEN_KEY ) ) . map ( j -> j . getJsonObject ( BETWEEN_KEY ) ) . map ( b -> new Between ( b . containsKey ( EXCLUSIVE_MIN_KEY ) ? new AssertMore ( b . getString ( PROPERTY_KEY ) , b . getInt ( EXCLUSIVE_MIN_KEY ) ) : new AssertNotLess ( b . getString ( PROPERTY_KEY ) , b . getInt ( MIN_KEY ) ) , b . containsKey ( EXCLUSIVE_MAX_KEY ) ? new AssertLess ( b . getString ( PROPERTY_KEY ) , b . getInt ( EXCLUSIVE_MAX_KEY ) ) : new AssertNotMore ( b . getString ( PROPERTY_KEY ) , b . getInt ( MAX_KEY ) ) ) ) ;
public class JSONValue { /** * Parse JSON text into java object from the input source . * Please use parseWithException ( ) if you don ' t want to ignore the exception . * @ see org . json . simple . parser . JSONParser # parse ( java . io . Reader ) * @ see # parseWithException ( java . io . Reader ) * @ param in * @ return Instance of the following : * org . json . simple . JSONObject , * org . json . simple . JSONArray , * java . lang . String , * java . lang . Number , * java . lang . Boolean , * null * @ deprecated this method may throw an { @ code Error } instead of returning * { @ code null } ; please use { @ link org . s1 . format . json . org _ json _ simple . JSONValue # parseWithException ( java . io . Reader ) } * instead */ public static Object parse ( Reader in ) { } }
try { JSONParser parser = new JSONParser ( ) ; return parser . parse ( in ) ; } catch ( Exception e ) { return null ; }
public class RecognizerErrorHandler { /** * Format an error message as expected by ANTLR . It is basically the * same error message that ANTL BaseRecognizer generates with some * additional data . * Also used to log debugging information . * @ param log the logger to use at debug time * @ param recognizer the lexer or parser who generated the error * @ param e the exception that occured * @ param superMessage the error message that the super class generated * @ param tokenNames list of token names * @ return a formatted error message */ public static String getErrorMessage ( final Logger log , final BaseRecognizer recognizer , final RecognitionException e , final String superMessage , final String [ ] tokenNames ) { } }
if ( log . isDebugEnabled ( ) ) { List < ? > stack = BaseRecognizer . getRuleInvocationStack ( e , recognizer . getClass ( ) . getSuperclass ( ) . getName ( ) ) ; String debugMsg = recognizer . getErrorHeader ( e ) + " " + e . getClass ( ) . getSimpleName ( ) + ": " + superMessage + ":" ; if ( e instanceof NoViableAltException ) { NoViableAltException nvae = ( NoViableAltException ) e ; debugMsg += " (decision=" + nvae . decisionNumber + " state=" + nvae . stateNumber + ")" + " decision=<<" + nvae . grammarDecisionDescription + ">>" ; } else if ( e instanceof UnwantedTokenException ) { UnwantedTokenException ute = ( UnwantedTokenException ) e ; debugMsg += " (unexpected token=" + toString ( ute . getUnexpectedToken ( ) , tokenNames ) + ")" ; } else if ( e instanceof EarlyExitException ) { EarlyExitException eea = ( EarlyExitException ) e ; debugMsg += " (decision=" + eea . decisionNumber + ")" ; } debugMsg += " ruleStack=" + stack . toString ( ) ; log . debug ( debugMsg ) ; } return makeUserMsg ( e , superMessage ) ;
public class DeviceAttribute_3DAODefaultImpl { public int getNbRead ( ) throws DevFailed { } }
manageExceptions ( "getNbRead" ) ; return attrval . r_dim . dim_x * DIM_MINI ( attrval . r_dim . dim_y ) ;
public class PackageIndexWriter { /** * Depending upon the grouping information and their titles , add * separate table indices for each package group . * @ param body the documentation tree to which the index will be added */ protected void addIndex ( Content body ) { } }
for ( int i = 0 ; i < groupList . size ( ) ; i ++ ) { String groupname = groupList . get ( i ) ; List < PackageDoc > list = groupPackageMap . get ( groupname ) ; if ( list != null && list . size ( ) > 0 ) { addIndexContents ( list . toArray ( new PackageDoc [ list . size ( ) ] ) , groupname , configuration . getText ( "doclet.Member_Table_Summary" , groupname , configuration . getText ( "doclet.packages" ) ) , body ) ; } }
public class BatchScheduleActionDeleteRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchScheduleActionDeleteRequest batchScheduleActionDeleteRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchScheduleActionDeleteRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchScheduleActionDeleteRequest . getActionNames ( ) , ACTIONNAMES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ContextUtil { /** * Returns a map of resource - id , result based on an XACML response . * @ param resCtx * the XACML response * @ return the Map of resource - id and result */ public Map < String , Result > makeResultMap ( ResponseCtx resCtx ) { } }
@ SuppressWarnings ( "unchecked" ) Iterator < Result > i = resCtx . getResults ( ) . iterator ( ) ; Map < String , Result > resultMap = new HashMap < String , Result > ( ) ; while ( i . hasNext ( ) ) { Result r = i . next ( ) ; resultMap . put ( r . getResource ( ) , r ) ; } return resultMap ;
public class Bruch { /** * Multiplikation zweier Brueche . * @ param operand der zweite Bruch , mit dem multipliziert wird . * @ return mulitiplizierter Bruch , evtl . gekuerzt */ public AbstractNumber multiply ( Bruch operand ) { } }
BigInteger z = getZaehler ( ) . multiply ( operand . getZaehler ( ) ) ; BigInteger n = getNenner ( ) . multiply ( operand . getNenner ( ) ) ; return Bruch . of ( z , n ) . kuerzen ( ) ;
public class CacheManagerJmxRegistration { /** * On stop , the mbeans are unregistered . */ public void stop ( ) { } }
// This method might get called several times . if ( stopped ) return ; if ( needToUnregister ) { try { unregisterMBeans ( resourceDMBeans ) ; needToUnregister = false ; } catch ( Exception e ) { log . problemsUnregisteringMBeans ( e ) ; } } stopped = true ;
public class JobApi { /** * Download a single artifact file from within the job ' s artifacts archive . * Only a single file is going to be extracted from the archive and streamed to a client . * < pre > < code > GitLab Endpoint : GET / projects / : id / jobs / : job _ id / artifacts / * artifact _ path < / code > < / pre > * @ param projectIdOrPath id , path of the project , or a Project instance holding the project ID or path * @ param jobId the unique job identifier * @ param artifactPath the Path to a file inside the artifacts archive * @ param directory the File instance of the directory to save the file to , if null will use " java . io . tmpdir " * @ return a File instance pointing to the download of the specified artifacts file * @ throws GitLabApiException if any exception occurs */ public File downloadSingleArtifactsFile ( Object projectIdOrPath , Integer jobId , Path artifactPath , File directory ) throws GitLabApiException { } }
String path = artifactPath . toString ( ) . replace ( "\\" , "/" ) ; Response response = get ( Response . Status . OK , getDefaultPerPageParam ( ) , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "jobs" , jobId , "artifacts" , path ) ; try { if ( directory == null ) directory = new File ( System . getProperty ( "java.io.tmpdir" ) ) ; String filename = artifactPath . getFileName ( ) . toString ( ) ; File file = new File ( directory , filename ) ; InputStream in = response . readEntity ( InputStream . class ) ; Files . copy ( in , file . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; return ( file ) ; } catch ( IOException ioe ) { throw new GitLabApiException ( ioe ) ; }
public class Spinner { /** * Call the OnItemClickListener , if it is defined . Performs all normal actions associated with * clicking : reporting accessibility event , playing a sound , etc . * @ param view * The view within the AdapterView that was clicked . * @ param position * The position of the view in the adapter . * @ param id * The row id of the item that was clicked . * @ return True if there was an assigned OnItemClickListener that was called , false otherwise is * returned . */ public final boolean performItemClick ( final View view , final int position , final long id ) { } }
return getView ( ) . performItemClick ( view , position , id ) ;
public class Symmetry454Date { /** * Obtains the current { @ code Symmetry454Date } from the specified clock . * This will query the specified clock to obtain the current date - today . * Using this method allows the use of an alternate clock for testing . * The alternate clock may be introduced using { @ linkplain Clock dependency injection } . * @ param clock the clock to use , not null * @ return the current date , not null * @ throws DateTimeException if the current date cannot be obtained */ public static Symmetry454Date now ( Clock clock ) { } }
LocalDate now = LocalDate . now ( clock ) ; return Symmetry454Date . ofEpochDay ( now . toEpochDay ( ) ) ;
public class RamResourceProviderOld { /** * initialize ram resource * @ param scheme * @ param arguments * @ return RamResource */ @ Override public ResourceProvider init ( String scheme , Map arguments ) { } }
if ( ! StringUtil . isEmpty ( scheme ) ) this . scheme = scheme ; if ( arguments != null ) { this . arguments = arguments ; Object oCaseSensitive = arguments . get ( "case-sensitive" ) ; if ( oCaseSensitive != null ) { caseSensitive = Caster . toBooleanValue ( oCaseSensitive , true ) ; } // lock - timeout Object oTimeout = arguments . get ( "lock-timeout" ) ; if ( oTimeout != null ) { lockTimeout = Caster . toLongValue ( oTimeout , lockTimeout ) ; } } lock . setLockTimeout ( lockTimeout ) ; lock . setCaseSensitive ( caseSensitive ) ; root = new RamResourceCore ( null , RamResourceCore . TYPE_DIRECTORY , "" ) ; return this ;
public class SingletonSelectableVirtualCircuit { /** * Join channel pair to this virtual circuit * @ param ch1 * @ param ch2 * @ throws IOException */ public void join ( SelectableBySelector ch1 , SelectableBySelector ch2 ) throws IOException { } }
join ( ch1 . getSelector ( ) , ch2 . getSelector ( ) , ( ByteChannel ) ch1 , ( ByteChannel ) ch2 ) ;
public class UnicodeSetSpanner { /** * Returns the number of matching characters found in a character sequence , using SpanCondition . SIMPLE . * The code alternates spans ; see the class doc for { @ link UnicodeSetSpanner } for a note about boundary conditions . * @ param sequence * the sequence to count characters in * @ param countMethod * whether to treat an entire span as a match , or individual elements as matches * @ return the count . Zero if there are none . */ public int countIn ( CharSequence sequence , CountMethod countMethod ) { } }
return countIn ( sequence , countMethod , SpanCondition . SIMPLE ) ;
public class SipServletMessageImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipServletMessage # setHeader ( java . lang . String , java . lang . String ) */ public void setHeader ( String name , String value ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "setHeader - name=" + name + ", value=" + value ) ; } if ( name == null ) { throw new NullPointerException ( "name parameter is null" ) ; } if ( value == null ) { throw new NullPointerException ( "value parameter is null" ) ; } if ( isSystemHeader ( getModifiableRule ( name ) ) ) { throw new IllegalArgumentException ( name + " is a system header !" ) ; } checkCommitted ( ) ; try { // Dealing with Allow : INVITE , ACK , CANCEL , OPTIONS , BYE kind of headers if ( JainSipUtils . LIST_HEADER_NAMES . contains ( name ) ) { this . message . removeHeader ( name ) ; List < Header > headers = SipFactory . getInstance ( ) . createHeaderFactory ( ) . createHeaders ( name + ":" + value ) ; for ( Header header : headers ) { this . message . addHeader ( header ) ; } } else { // dealing with non list headers and extension header Header header = SipFactory . getInstance ( ) . createHeaderFactory ( ) . createHeader ( name , value ) ; this . message . setHeader ( header ) ; } } catch ( Exception e ) { throw new IllegalArgumentException ( "Error creating header!" , e ) ; }
public class NullConfigurationPersister { /** * { @ inheritDoc } */ @ Override public PersistenceResource store ( final ModelNode model , Set < PathAddress > affectedAddresses ) { } }
return NullPersistenceResource . INSTANCE ;
public class BigQueryOutputConfiguration { /** * A helper function to set the required output keys in the given configuration . * @ param conf the configuration to set the keys on . * @ param qualifiedOutputTableId the qualified id of the output table in the form : < code > ( Optional * ProjectId ) : [ DatasetId ] . [ TableId ] < / code > . If the project id is missing , the default project * id is attempted { @ link BigQueryConfiguration # PROJECT _ ID _ KEY } . * @ param outputTableSchema the schema of the BigQuery output table . If the schema is null , * BigQuery will attempt to auto detect the schema . When using avro formatted data , a schema * is not required as avro stores the schema in the file . * @ param outputGcsPath the path in GCS to stage data in . Example : ' gs : / / bucket / job ' . * @ param outputFileFormat the formatting of the data being written by the output format class . * @ param outputFormatClass the file output format that will write files to GCS . * @ throws IOException */ @ SuppressWarnings ( "rawtypes" ) public static void configure ( Configuration conf , String qualifiedOutputTableId , BigQueryTableSchema outputTableSchema , String outputGcsPath , BigQueryFileFormat outputFileFormat , Class < ? extends FileOutputFormat > outputFormatClass ) throws IOException { } }
configure ( conf , qualifiedOutputTableId , BigQueryTableHelper . getTableSchemaJson ( outputTableSchema . get ( ) ) , outputGcsPath , outputFileFormat , outputFormatClass ) ;
public class FLVReader { /** * { @ inheritDoc } */ @ Override public boolean hasVideo ( ) { } }
KeyFrameMeta meta = analyzeKeyFrames ( ) ; if ( meta == null ) { return false ; } return ( ! meta . audioOnly && meta . positions . length > 0 ) ;
public class CompareTwoImagePanel { /** * Compute individually how each image will be scaled */ private void computeScales ( ) { } }
int width = getWidth ( ) ; int height = getHeight ( ) ; width = ( width - borderSize ) / 2 ; // compute the scale factor for each image scaleLeft = scaleRight = 1 ; if ( leftImage . getWidth ( ) > width || leftImage . getHeight ( ) > height ) { double scaleX = ( double ) width / ( double ) leftImage . getWidth ( ) ; double scaleY = ( double ) height / ( double ) leftImage . getHeight ( ) ; scaleLeft = Math . min ( scaleX , scaleY ) ; } if ( rightImage . getWidth ( ) > width || rightImage . getHeight ( ) > height ) { double scaleX = ( double ) width / ( double ) rightImage . getWidth ( ) ; double scaleY = ( double ) height / ( double ) rightImage . getHeight ( ) ; scaleRight = Math . min ( scaleX , scaleY ) ; }
public class SimpleAttachable { /** * { @ inheritDoc } */ public synchronized < T > T putAttachment ( final AttachmentKey < T > key , final T value ) { } }
Assert . checkNotNullParam ( "key" , key ) ; return key . cast ( attachments . put ( key , key . cast ( value ) ) ) ;
public class StateMachineDefinition { /** * Add a transition to the state machine . Transition from a state to itself is allowed . * @ param newTransitionthe transition to be added , must not be null . * @ param fromStatefrom state of the transition , must have been defined already * @ param toStateto state of the transition , must have been defined already */ public StateMachineDefinition < S , T > addTransition ( T newTransition , S fromState , S toState ) { } }
Preconditions . checkArgument ( newTransition != null , "Transition name cannot be empty" ) ; Integer fromStateId = getStateId ( fromState ) ; Integer toStateId = getStateId ( toState ) ; Transition < S > transition = new Transition < S > ( ) ; transition . fromStateId = fromStateId ; transition . toStateId = toStateId ; transition . fromState = fromState ; transition . toState = toState ; Transition < S > previousValue = transitions . put ( new DoubleValueBean < S , T > ( fromState , newTransition ) , transition ) ; if ( previousValue != null ) { throw new IllegalArgumentException ( "Transition '" + transition + "' from state '" + fromState + "' has already been defined." ) ; } validTransitions . get ( fromState ) . add ( newTransition ) ; return this ;
public class CmsToolManager { /** * Returns the < code > { @ link CmsToolUserData } < / code > object for a given user . < p > * @ param wp the workplace object * @ return the current user data */ public CmsToolUserData getUserData ( CmsWorkplace wp ) { } }
CmsToolUserData userData = wp . getSettings ( ) . getToolUserData ( ) ; if ( userData == null ) { userData = new CmsToolUserData ( ) ; userData . setRootKey ( ROOTKEY_DEFAULT ) ; Iterator < CmsToolRootHandler > it = getToolRoots ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { CmsToolRootHandler root = it . next ( ) ; userData . setCurrentToolPath ( root . getKey ( ) , TOOLPATH_SEPARATOR ) ; userData . setBaseTool ( root . getKey ( ) , TOOLPATH_SEPARATOR ) ; } wp . getSettings ( ) . setToolUserData ( userData ) ; } return userData ;
public class GroupsApi { /** * Leave a group . * < br > * If the user is the only administrator left , and there are other members , * the oldest member will be promoted to administrator . * < br > * If the user is the last person in the group , the group will be deleted . * < br > * This method requires authentication with ' delete ' permission . * < br > * @ param groupId ( Required ) The NSID of the Group to leave . * @ param deletePhotos Delete all photos by this user from the group . * @ return object with response from Flickr indicating ok or fail . * @ throws JinxException if required parameters are missing , or if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . groups . leave . html " > flickr . groups . leave < / a > */ public Response leave ( String groupId , boolean deletePhotos ) throws JinxException { } }
JinxUtils . validateParams ( groupId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.groups.leave" ) ; params . put ( "group_id" , groupId ) ; if ( deletePhotos ) { params . put ( "delete_photos" , "true" ) ; } return jinx . flickrPost ( params , Response . class ) ;
public class BundlePackagerMojo { /** * The bundle packaging has to be triggered when : a Java source file is modified , * an internal resource is modified or the ` osgi . bnd ` file ( containing BND instructions ) is modified . * @ param file the file * @ return { @ literal true } if an event on the given file should trigger the recreation of the bundle . */ @ Override public boolean accept ( File file ) { } }
return WatcherUtils . isInDirectory ( file , WatcherUtils . getJavaSource ( basedir ) ) || WatcherUtils . isInDirectory ( file , WatcherUtils . getResources ( basedir ) ) || file . getAbsolutePath ( ) . equals ( new File ( basedir , INSTRUCTIONS_FILE ) . getAbsolutePath ( ) ) ;
public class BooleanExpressionParser { /** * Evaluates a sub expression within a pair of parentheses and pushes its result onto the stack of values * @ param operators Stack of operators * @ param values Stack of values */ private static void evaluateSubexpression ( final Deque < String > operators , final Deque < String > values ) { } }
String operator = operators . pop ( ) ; while ( ! ( operator ) . equals ( SeparatorToken . OPEN_PARENTHESIS . toString ( ) ) ) { values . push ( getBooleanResultAsString ( operator , values . pop ( ) , values . pop ( ) ) ) ; operator = operators . pop ( ) ; }
public class MultiLayerNetwork { /** * Fit the model for one iteration on the provided data * @ param features the examples to classify ( one example in each row ) * @ param labels the example labels ( a binary outcome matrix ) * @ param featuresMask The mask array for the features ( used for variable length time series , etc ) . May be null . * @ param labelsMask The mask array for the labels ( used for variable length time series , etc ) . May be null . */ public synchronized void fit ( INDArray features , INDArray labels , INDArray featuresMask , INDArray labelsMask ) { } }
try { fitHelper ( features , labels , featuresMask , labelsMask ) ; } catch ( OutOfMemoryError e ) { CrashReportingUtil . writeMemoryCrashDump ( this , e ) ; throw e ; }
public class BlockBasedDataStore { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . storage . data . impl . AbstractBlockBasedDataStore # extendStoreFiles ( int , int ) */ @ Override protected void extendStoreFiles ( int oldBlockCount , int newBlockCount ) throws DataStoreException { } }
try { // Update allocation table allocationTableRandomAccessFile . setLength ( AT_HEADER_SIZE + ( long ) newBlockCount * AT_BLOCK_SIZE ) ; for ( int n = oldBlockCount ; n < newBlockCount ; n ++ ) writeAllocationBlock ( n ) ; allocationTableRandomAccessFile . seek ( AT_HEADER_BLOCKCOUNT_OFFSET ) ; allocationTableRandomAccessFile . writeInt ( newBlockCount ) ; // Update data file dataRandomAccessFile . setLength ( ( long ) blockSize * newBlockCount ) ; } catch ( IOException e ) { throw new DataStoreException ( "Could not extends store to " + newBlockCount + " blocks : " + dataFile . getAbsolutePath ( ) , e ) ; }
public class logical_disk { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
logical_disk_responses result = ( logical_disk_responses ) service . get_payload_formatter ( ) . string_to_resource ( logical_disk_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . logical_disk_response_array ) ; } logical_disk [ ] result_logical_disk = new logical_disk [ result . logical_disk_response_array . length ] ; for ( int i = 0 ; i < result . logical_disk_response_array . length ; i ++ ) { result_logical_disk [ i ] = result . logical_disk_response_array [ i ] . logical_disk [ 0 ] ; } return result_logical_disk ;
public class NodeID { /** * Tests whether supplied IRI was generated by this parser in order to label * an anonymous node . * @ param uri * the IRI * @ return { @ code true } if the IRI was generated by this parser to label an * anonymous node */ public static boolean isAnonymousNodeIRI ( String uri ) { } }
return uri != null && uri . startsWith ( PREFIX ) && uri . contains ( NODE_ID_PREFIX ) ;
public class RegionInstanceGroupManagerClient { /** * Flags the specified instances in the managed instance group to be immediately recreated . The * instances are deleted and recreated using the current instance template for the managed * instance group . This operation is marked as DONE when the flag is set even if the instances * have not yet been recreated . You must separately verify the status of the recreating action * with the listmanagedinstances method . * < p > If the group is part of a backend service that has enabled connection draining , it can take * up to 60 seconds after the connection draining duration has elapsed before the VM instance is * removed or deleted . * < p > You can specify a maximum of 1000 instances with this method per request . * < p > Sample code : * < pre > < code > * try ( RegionInstanceGroupManagerClient regionInstanceGroupManagerClient = RegionInstanceGroupManagerClient . create ( ) ) { * ProjectRegionInstanceGroupManagerName instanceGroupManager = ProjectRegionInstanceGroupManagerName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ INSTANCE _ GROUP _ MANAGER ] " ) ; * RegionInstanceGroupManagersRecreateRequest regionInstanceGroupManagersRecreateRequestResource = RegionInstanceGroupManagersRecreateRequest . newBuilder ( ) . build ( ) ; * Operation response = regionInstanceGroupManagerClient . recreateInstancesRegionInstanceGroupManager ( instanceGroupManager . toString ( ) , regionInstanceGroupManagersRecreateRequestResource ) ; * < / code > < / pre > * @ param instanceGroupManager Name of the managed instance group . * @ param regionInstanceGroupManagersRecreateRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation recreateInstancesRegionInstanceGroupManager ( String instanceGroupManager , RegionInstanceGroupManagersRecreateRequest regionInstanceGroupManagersRecreateRequestResource ) { } }
RecreateInstancesRegionInstanceGroupManagerHttpRequest request = RecreateInstancesRegionInstanceGroupManagerHttpRequest . newBuilder ( ) . setInstanceGroupManager ( instanceGroupManager ) . setRegionInstanceGroupManagersRecreateRequestResource ( regionInstanceGroupManagersRecreateRequestResource ) . build ( ) ; return recreateInstancesRegionInstanceGroupManager ( request ) ;
public class AWSSimpleSystemsManagementClient { /** * Retrieves the high - level patch state for the instances in the specified patch group . * @ param describeInstancePatchStatesForPatchGroupRequest * @ return Result of the DescribeInstancePatchStatesForPatchGroup operation returned by the service . * @ throws InternalServerErrorException * An error occurred on the server side . * @ throws InvalidFilterException * The filter name is not valid . Verify the you entered the correct name and try again . * @ throws InvalidNextTokenException * The specified token is not valid . * @ sample AWSSimpleSystemsManagement . DescribeInstancePatchStatesForPatchGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / DescribeInstancePatchStatesForPatchGroup " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeInstancePatchStatesForPatchGroupResult describeInstancePatchStatesForPatchGroup ( DescribeInstancePatchStatesForPatchGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeInstancePatchStatesForPatchGroup ( request ) ;
public class CrossValidationSplitter { /** * { @ inheritDoc } */ @ Override public TemporalDataModelIF < U , I > [ ] split ( final TemporalDataModelIF < U , I > data ) { } }
@ SuppressWarnings ( "unchecked" ) final TemporalDataModelIF < U , I > [ ] splits = new TemporalDataModelIF [ 2 * nFolds ] ; for ( int i = 0 ; i < nFolds ; i ++ ) { splits [ 2 * i ] = DataModelFactory . getDefaultTemporalModel ( ) ; // training splits [ 2 * i + 1 ] = DataModelFactory . getDefaultTemporalModel ( ) ; // test } if ( perUser ) { int n = 0 ; for ( U user : data . getUsers ( ) ) { List < I > items = new ArrayList < > ( ) ; for ( I i : data . getUserItems ( user ) ) { items . add ( i ) ; } Collections . shuffle ( items , rnd ) ; for ( I item : items ) { Double pref = data . getUserItemPreference ( user , item ) ; Iterable < Long > time = data . getUserItemTimestamps ( user , item ) ; int curFold = n % nFolds ; for ( int i = 0 ; i < nFolds ; i ++ ) { TemporalDataModelIF < U , I > datamodel = splits [ 2 * i ] ; // training if ( i == curFold ) { datamodel = splits [ 2 * i + 1 ] ; // test } if ( pref != null ) { datamodel . addPreference ( user , item , pref ) ; } if ( time != null ) { for ( Long t : time ) { datamodel . addTimestamp ( user , item , t ) ; } } } n ++ ; } } } else { List < U > users = new ArrayList < > ( ) ; for ( U u : data . getUsers ( ) ) { users . add ( u ) ; } Collections . shuffle ( users , rnd ) ; int n = 0 ; for ( U user : users ) { List < I > items = new ArrayList < > ( ) ; for ( I i : data . getUserItems ( user ) ) { items . add ( i ) ; } Collections . shuffle ( items , rnd ) ; for ( I item : items ) { Double pref = data . getUserItemPreference ( user , item ) ; Iterable < Long > time = data . getUserItemTimestamps ( user , item ) ; int curFold = n % nFolds ; for ( int i = 0 ; i < nFolds ; i ++ ) { TemporalDataModelIF < U , I > datamodel = splits [ 2 * i ] ; // training if ( i == curFold ) { datamodel = splits [ 2 * i + 1 ] ; // test } if ( pref != null ) { datamodel . addPreference ( user , item , pref ) ; } if ( time != null ) { for ( Long t : time ) { datamodel . addTimestamp ( user , item , t ) ; } } } n ++ ; } } } return splits ;
public class UdpClient { /** * Run IO loops on the given { @ link EventLoopGroup } . * @ param eventLoopGroup an eventLoopGroup to share * @ return a new { @ link UdpClient } */ public final UdpClient runOn ( EventLoopGroup eventLoopGroup ) { } }
Objects . requireNonNull ( eventLoopGroup , "eventLoopGroup" ) ; return runOn ( preferNative -> eventLoopGroup ) ;
public class CommerceUserSegmentEntryPersistenceImpl { /** * Returns the commerce user segment entries before and after the current commerce user segment entry in the ordered set where groupId = & # 63 ; . * @ param commerceUserSegmentEntryId the primary key of the current commerce user segment entry * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce user segment entry * @ throws NoSuchUserSegmentEntryException if a commerce user segment entry with the primary key could not be found */ @ Override public CommerceUserSegmentEntry [ ] findByGroupId_PrevAndNext ( long commerceUserSegmentEntryId , long groupId , OrderByComparator < CommerceUserSegmentEntry > orderByComparator ) throws NoSuchUserSegmentEntryException { } }
CommerceUserSegmentEntry commerceUserSegmentEntry = findByPrimaryKey ( commerceUserSegmentEntryId ) ; Session session = null ; try { session = openSession ( ) ; CommerceUserSegmentEntry [ ] array = new CommerceUserSegmentEntryImpl [ 3 ] ; array [ 0 ] = getByGroupId_PrevAndNext ( session , commerceUserSegmentEntry , groupId , orderByComparator , true ) ; array [ 1 ] = commerceUserSegmentEntry ; array [ 2 ] = getByGroupId_PrevAndNext ( session , commerceUserSegmentEntry , groupId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class ChronoLocalDateTimeImpl { /** * Returns a copy of this date - time with the new date and time , checking * to see if a new object is in fact required . * @ param newDate the date of the new date - time , not null * @ param newTime the time of the new date - time , not null * @ return the date - time , not null */ private ChronoLocalDateTimeImpl < D > with ( Temporal newDate , LocalTime newTime ) { } }
if ( date == newDate && time == newTime ) { return this ; } // Validate that the new DateTime is a ChronoLocalDate ( and not something else ) D cd = date . getChronology ( ) . ensureChronoLocalDate ( newDate ) ; return new ChronoLocalDateTimeImpl < D > ( cd , newTime ) ;
public class JdbcUrlSanitizer { /** * Deal with this strange URL format : * https : / / docs . microsoft . com / en - us / sql / connect / jdbc / building - the - connection - url * https : / / docs . microsoft . com / en - us / sql / connect / jdbc / setting - the - connection - properties */ private static String sanitizeSqlServerUrl ( String jdbcUrl ) { } }
StringBuilder result = new StringBuilder ( ) ; result . append ( SQLSERVER_PREFIX ) ; String host ; if ( jdbcUrl . contains ( ";" ) ) { host = StringUtils . substringBetween ( jdbcUrl , SQLSERVER_PREFIX , ";" ) ; } else { host = StringUtils . substringAfter ( jdbcUrl , SQLSERVER_PREFIX ) ; } String queryString = StringUtils . substringAfter ( jdbcUrl , ";" ) ; Map < String , String > parameters = KeyValueFormat . parse ( queryString ) ; Optional < String > server = firstValue ( parameters , "serverName" , "servername" , "server" ) ; if ( server . isPresent ( ) ) { result . append ( server . get ( ) ) ; } else { result . append ( StringUtils . substringBefore ( host , ":" ) ) ; } Optional < String > port = firstValue ( parameters , "portNumber" , "port" ) ; if ( port . isPresent ( ) ) { result . append ( ':' ) . append ( port . get ( ) ) ; } else if ( host . contains ( ":" ) ) { result . append ( ':' ) . append ( StringUtils . substringAfter ( host , ":" ) ) ; } Optional < String > database = firstValue ( parameters , "databaseName" , "database" ) ; database . ifPresent ( s -> result . append ( '/' ) . append ( s ) ) ; return result . toString ( ) ;
public class ComQuery { /** * Send directly to socket the sql data . * @ param pos output stream * @ param sqlBytes the query in UTF - 8 bytes * @ throws IOException if connection error occur */ public static void sendMultiDirect ( final PacketOutputStream pos , List < byte [ ] > sqlBytes ) throws IOException { } }
pos . startPacket ( 0 ) ; pos . write ( Packet . COM_QUERY ) ; for ( byte [ ] bytes : sqlBytes ) { pos . write ( bytes ) ; } pos . flush ( ) ;