signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Ordering { /** * Returns the least of the specified values according to this ordering . If there are multiple * least values , the first of those is returned . * < p > < b > Java 8 users : < / b > If { @ code iterable } is a { @ link Collection } , use { @ code * Collections . min ( collection , thisComparator ) } instead . Otherwise , continue to use this method * for now . After the next release of Guava , use { @ code * Streams . stream ( iterable ) . min ( thisComparator ) . get ( ) } instead . Note that these alternatives do * not guarantee which tied minimum element is returned ) * @ param iterable the iterable whose minimum element is to be determined * @ throws NoSuchElementException if { @ code iterable } is empty * @ throws ClassCastException if the parameters are not < i > mutually comparable < / i > under this * ordering . */ @ CanIgnoreReturnValue // TODO ( kak ) : Consider removing this public < E extends T > E min ( Iterable < E > iterable ) { } }
return min ( iterable . iterator ( ) ) ;
public class TableForm { /** * Create a new column in the form . */ public void addColumn ( int spacing ) { } }
table . addCell ( "&nbsp" , "width=" + spacing ) ; column = new Table ( 0 ) ; table . addCell ( column ) ; table . cell ( ) . top ( ) ; columns ++ ;
public class DBReceiverJob { /** * Retrieve the event properties from the logging _ event _ property table . * @ param connection * @ param id * @ param event * @ throws SQLException */ void getProperties ( Connection connection , long id , LoggingEvent event ) throws SQLException { } }
PreparedStatement statement = connection . prepareStatement ( sqlProperties ) ; try { statement . setLong ( 1 , id ) ; ResultSet rs = statement . executeQuery ( ) ; while ( rs . next ( ) ) { String key = rs . getString ( 1 ) ; String value = rs . getString ( 2 ) ; event . setProperty ( key , value ) ; } } finally { statement . close ( ) ; }
public class LabelProcessor { /** * Create a new labelBuilder and register label with the default locale . * @ param label the label to be added * @ return the updated label builder */ public static Label . Builder generateLabelBuilder ( final String label ) { } }
return addLabel ( Label . newBuilder ( ) , Locale . getDefault ( ) , label ) ;
public class Cycles { /** * Internal method to wrap cycle computations which < i > should < / i > be * tractable . That is they currently won ' t throw the exception - if the * method does throw an exception an internal error is triggered as a sanity * check . * @ param finder the cycle finding method * @ param container the molecule to find the cycles of * @ param length maximum size or cycle to find * @ return the cycles of the molecule */ private static Cycles _invoke ( CycleFinder finder , IAtomContainer container , int length ) { } }
try { return finder . find ( container , length ) ; } catch ( Intractable e ) { throw new RuntimeException ( "Cycle computation should not be intractable: " , e ) ; }
public class RandomFileInputStream { /** * Skip bytes in the input file . * @ param bytes The number of bytes to skip * @ return the number of bytes skiped * @ throws IOException on IO error * @ see java . io . InputStream # skip ( long ) */ public long skip ( long bytes ) throws IOException { } }
long pos = randomFile . getFilePointer ( ) ; randomFile . seek ( pos + bytes ) ; return randomFile . getFilePointer ( ) - pos ;
public class AbstractOAuth1TokenExtractor { /** * { @ inheritDoc } */ @ Override public T extract ( Response response ) throws IOException { } }
final String body = response . getBody ( ) ; Preconditions . checkEmptyString ( body , "Response body is incorrect. Can't extract a token from an empty string" ) ; final String token = extract ( body , OAUTH_TOKEN_REGEXP_PATTERN ) ; final String secret = extract ( body , OAUTH_TOKEN_SECRET_REGEXP_PATTERN ) ; return createToken ( token , secret , body ) ;
public class AbstractRemoteClient { /** * { @ inheritDoc } */ @ Override public void removeDataObserver ( final Observer < DataProvider < M > , M > observer ) { } }
dataObservable . removeObserver ( observer ) ;
public class GitlabAPI { /** * Creates a private Project * @ param name The name of the project * @ return The GitLab Project * @ throws IOException on gitlab api call error */ public GitlabProject createProject ( String name ) throws IOException { } }
return createProject ( name , null , null , null , null , null , null , null , null , null , null ) ;
public class CrowdingDistanceComparator { /** * Compare two solutions . * @ param solution1 Object representing the first < code > Solution < / code > . * @ param solution2 Object representing the second < code > Solution < / code > . * @ return - 1 , or 0 , or 1 if solution1 is has greater , equal , or less distance value than solution2, * respectively . */ @ Override public int compare ( S solution1 , S solution2 ) { } }
int result ; if ( solution1 == null ) { if ( solution2 == null ) { result = 0 ; } else { result = 1 ; } } else if ( solution2 == null ) { result = - 1 ; } else { double distance1 = Double . MIN_VALUE ; double distance2 = Double . MIN_VALUE ; if ( crowdingDistance . getAttribute ( solution1 ) != null ) { distance1 = ( double ) crowdingDistance . getAttribute ( solution1 ) ; } if ( crowdingDistance . getAttribute ( solution2 ) != null ) { distance2 = ( double ) crowdingDistance . getAttribute ( solution2 ) ; } if ( distance1 > distance2 ) { result = - 1 ; } else if ( distance1 < distance2 ) { result = 1 ; } else { result = 0 ; } } return result ;
public class OracleDatabase { /** * { @ inheritDoc } * @ throws SQLException */ @ Override public boolean isConnected ( final Connection _connection ) throws SQLException { } }
boolean ret = false ; final Statement stmt = _connection . createStatement ( ) ; try { final ResultSet resultset = stmt . executeQuery ( "select product from product_component_version where product like 'Oracle%'" ) ; ret = resultset . next ( ) ; resultset . close ( ) ; } finally { stmt . close ( ) ; } return ret ;
public class DateParser { /** * Returns the sort version of the date . Does not have * estimation rules applied . * @ return return the sort string for this date */ public Calendar getSortCalendar ( ) { } }
final String dateString = stripApproximationKeywords ( ) ; if ( dateString . isEmpty ( ) ) { return null ; } return parseCalendar ( dateString ) ;
public class ListJobsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListJobsRequest listJobsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listJobsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listJobsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listJobsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listJobsRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class XmlEntities { /** * Unescapes the entities in a < code > String < / code > . * For example , if you have called addEntity ( & quot ; foo & quot ; , 0xA1 ) , unescape ( & quot ; & amp ; foo ; & quot ; ) will return * & quot ; \ u00A1 & quot ; * @ param str * The < code > String < / code > to escape . * @ return A new escaped < code > String < / code > . */ public String unescape ( String str ) { } }
int firstAmp = str . indexOf ( '&' ) ; if ( firstAmp < 0 ) { return str ; } else { StringBuilder sb = createStringBuilder ( str ) ; doUnescape ( sb , str , firstAmp ) ; return sb . toString ( ) ; }
public class EventService { /** * Dispatch the event to the handler registered for the category . The method * corresponding to the event type is invoked . * @ param event The event object * @ param apiContext The application context * @ throws ClassNotFoundException * @ throws SecurityException * @ throws NoSuchMethodException * @ throws InstantiationException * @ throws InvocationTargetException * @ throws IllegalArgumentException * @ throws IllegalAccessException */ protected void invokeHandler ( Event event , ApiContext apiContext ) throws ClassNotFoundException , NoSuchMethodException , SecurityException , IllegalAccessException , IllegalArgumentException , InvocationTargetException , InstantiationException { } }
String topic [ ] = event . getTopic ( ) . split ( "\\." ) ; String eventCategory = topic [ 0 ] . substring ( 0 , 1 ) . toUpperCase ( ) + topic [ 0 ] . substring ( 1 ) ; String eventAction = topic [ 1 ] ; // get list of registered handlers Object handler = EventManager . getInstance ( ) . getRegisteredClassHandlers ( eventCategory ) ; if ( handler != null ) { String methodName = eventAction ; String className = handler . getClass ( ) . getName ( ) ; try { Method m ; m = handler . getClass ( ) . getMethod ( methodName , ApiContext . class , Event . class ) ; m . invoke ( handler , apiContext , event ) ; } catch ( NoSuchMethodException e ) { logger . warn ( "No " + eventAction + " method on class " + className ) ; throw e ; } catch ( SecurityException e ) { logger . warn ( "Security exception: " + e . getMessage ( ) ) ; throw e ; } catch ( IllegalAccessException e ) { logger . warn ( "Illegal access for method " + eventAction + " on class " + className ) ; throw e ; } catch ( IllegalArgumentException e ) { logger . warn ( "Illegal argument exception for method " + eventAction + " on class " + className ) ; throw e ; } catch ( InvocationTargetException e ) { logger . warn ( "Invocation target exception for method " + eventAction + " on class " + className ) ; throw e ; } }
public class EurekaClinicalClient { /** * Gets the resource specified by the path . Sends to the server an Accepts * header for JSON . * @ param < T > the type of the resource . * @ param path the path to the resource . Cannot be < code > null < / code > . * @ param cls the type of the resource . Cannot be < code > null < / code > . * @ return the resource . * @ throws ClientException if a status code other than 200 ( OK ) is returned . */ protected < T > T doGet ( String path , Class < T > cls ) throws ClientException { } }
return doGet ( path , cls , null ) ;
public class CodedOutputStream { /** * Write a little - endian 64 - bit integer . */ public void writeRawLittleEndian64 ( final long value ) throws IOException { } }
writeRawByte ( ( int ) ( value ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 8 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 16 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 24 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 32 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 40 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 48 ) & 0xFF ) ; writeRawByte ( ( int ) ( value >> 56 ) & 0xFF ) ;
public class MyBatis { /** * Create a PreparedStatement for SELECT requests with scrolling of results */ public PreparedStatement newScrollingSelectStatement ( DbSession session , String sql ) { } }
int fetchSize = database . getDialect ( ) . getScrollDefaultFetchSize ( ) ; return newScrollingSelectStatement ( session , sql , fetchSize ) ;
public class AbstractClassOption { /** * Sets current object . * @ param obj the object to set as current . */ public void setCurrentObject ( Object obj ) { } }
if ( ( ( obj == null ) && ( this . nullString != null ) ) || this . requiredType . isInstance ( obj ) || ( obj instanceof String ) || ( obj instanceof File ) // | | ( ( obj instanceof Task ) & & this . requiredType . isAssignableFrom ( ( ( Task ) obj ) . getTaskResultType ( ) ) ) ) { this . currentValue = obj ; } else { throw new IllegalArgumentException ( "Object not of required type." ) ; }
public class GeometryTools { /** * Returns the geometric center of all the rings in this ringset . * See comment for center ( IAtomContainer atomCon , Dimension areaDim , HashMap renderingCoordinates ) for details on coordinate sets * @ param ringSet Description of the Parameter * @ return the geometric center of the rings in this ringset */ public static Point2d get2DCenter ( IRingSet ringSet ) { } }
double centerX = 0 ; double centerY = 0 ; for ( int i = 0 ; i < ringSet . getAtomContainerCount ( ) ; i ++ ) { Point2d centerPoint = get2DCenter ( ( IRing ) ringSet . getAtomContainer ( i ) ) ; centerX += centerPoint . x ; centerY += centerPoint . y ; } return new Point2d ( centerX / ( ( double ) ringSet . getAtomContainerCount ( ) ) , centerY / ( ( double ) ringSet . getAtomContainerCount ( ) ) ) ;
public class OgmCollectionPersister { /** * Centralize the RowKey column setting logic as the values settings are slightly different between insert / update and delete */ private RowKeyBuilder initializeRowKeyBuilder ( ) { } }
RowKeyBuilder builder = new RowKeyBuilder ( ) ; if ( hasIdentifier ) { builder . addColumns ( getIdentifierColumnName ( ) ) ; } else { builder . addColumns ( getKeyColumnNames ( ) ) ; // ! isOneToMany ( ) present in delete not in update if ( ! isOneToMany ( ) && hasIndex && ! indexContainsFormula ) { builder . addIndexColumns ( getIndexColumnNames ( ) ) ; } else { builder . addColumns ( getElementColumnNames ( ) ) ; } } return builder ;
public class BindMapHelper { /** * Parse a list . * @ param context the context * @ param parser the parser * @ param list the list * @ param skipRead the skip read * @ return the list */ static List < Object > parseList ( AbstractContext context , JsonParser parser , List < Object > list , boolean skipRead ) { } }
try { if ( ! skipRead ) { parser . nextToken ( ) ; } if ( parser . currentToken ( ) != JsonToken . START_ARRAY ) { throw ( new KriptonRuntimeException ( "Invalid input format" ) ) ; } skipRead = false ; JsonToken token ; do { if ( skipRead ) { token = parser . getCurrentToken ( ) ; } else { token = parser . nextToken ( ) ; skipRead = true ; } switch ( token ) { case VALUE_FALSE : case VALUE_TRUE : case VALUE_NUMBER_FLOAT : case VALUE_NUMBER_INT : case VALUE_STRING : list . add ( parser . getText ( ) ) ; break ; case VALUE_NULL : list . add ( null ) ; break ; case VALUE_EMBEDDED_OBJECT : // parse submap list . add ( parseMap ( context , parser , new LinkedHashMap < String , Object > ( ) , true ) ) ; break ; case START_ARRAY : list . add ( parseList ( context , parser , list , true ) ) ; break ; default : break ; } } while ( parser . nextToken ( ) != JsonToken . END_ARRAY ) ; return list ; } catch ( IOException e ) { e . printStackTrace ( ) ; throw ( new KriptonRuntimeException ( e ) ) ; }
public class SocketChannelController { /** * Confiure socket channel * @ param sc * @ throws IOException */ protected final void configureSocketChannel ( SocketChannel sc ) throws IOException { } }
sc . socket ( ) . setSoTimeout ( this . soTimeout ) ; sc . configureBlocking ( false ) ; if ( this . socketOptions . get ( StandardSocketOption . SO_REUSEADDR ) != null ) { sc . socket ( ) . setReuseAddress ( StandardSocketOption . SO_REUSEADDR . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . SO_REUSEADDR ) ) ) ; } if ( this . socketOptions . get ( StandardSocketOption . SO_SNDBUF ) != null ) { sc . socket ( ) . setSendBufferSize ( StandardSocketOption . SO_SNDBUF . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . SO_SNDBUF ) ) ) ; } if ( this . socketOptions . get ( StandardSocketOption . SO_KEEPALIVE ) != null ) { sc . socket ( ) . setKeepAlive ( StandardSocketOption . SO_KEEPALIVE . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . SO_KEEPALIVE ) ) ) ; } if ( this . socketOptions . get ( StandardSocketOption . SO_LINGER ) != null ) { sc . socket ( ) . setSoLinger ( this . soLingerOn , StandardSocketOption . SO_LINGER . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . SO_LINGER ) ) ) ; } if ( this . socketOptions . get ( StandardSocketOption . SO_RCVBUF ) != null ) { sc . socket ( ) . setReceiveBufferSize ( StandardSocketOption . SO_RCVBUF . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . SO_RCVBUF ) ) ) ; } if ( this . socketOptions . get ( StandardSocketOption . TCP_NODELAY ) != null ) { sc . socket ( ) . setTcpNoDelay ( StandardSocketOption . TCP_NODELAY . type ( ) . cast ( this . socketOptions . get ( StandardSocketOption . TCP_NODELAY ) ) ) ; }
public class CompressionUtils { /** * Get the file name without the . gz extension * @ param fname The name of the gzip file * @ return fname without the " . gz " extension * @ throws IAE if fname is not a valid " * . gz " file name */ public static String getGzBaseName ( String fname ) { } }
final String reducedFname = Files . getNameWithoutExtension ( fname ) ; if ( isGz ( fname ) && ! reducedFname . isEmpty ( ) ) { return reducedFname ; } throw new IAE ( "[%s] is not a valid gz file name" , fname ) ;
public class UniprotProxySequenceReader { /** * Open a URL connection . * Follows redirects . * @ param url * @ throws IOException */ private static HttpURLConnection openURLConnection ( URL url ) throws IOException { } }
// This method should be moved to a utility class in BioJava 5.0 final int timeout = 5000 ; final String useragent = "BioJava" ; HttpURLConnection conn = ( HttpURLConnection ) url . openConnection ( ) ; conn . setRequestProperty ( "User-Agent" , useragent ) ; conn . setInstanceFollowRedirects ( true ) ; conn . setConnectTimeout ( timeout ) ; conn . setReadTimeout ( timeout ) ; int status = conn . getResponseCode ( ) ; while ( status == HttpURLConnection . HTTP_MOVED_TEMP || status == HttpURLConnection . HTTP_MOVED_PERM || status == HttpURLConnection . HTTP_SEE_OTHER ) { // Redirect ! String newUrl = conn . getHeaderField ( "Location" ) ; if ( newUrl . equals ( url . toString ( ) ) ) { throw new IOException ( "Cyclic redirect detected at " + newUrl ) ; } // Preserve cookies String cookies = conn . getHeaderField ( "Set-Cookie" ) ; // open the new connection again url = new URL ( newUrl ) ; conn . disconnect ( ) ; conn = ( HttpURLConnection ) url . openConnection ( ) ; if ( cookies != null ) { conn . setRequestProperty ( "Cookie" , cookies ) ; } conn . addRequestProperty ( "User-Agent" , useragent ) ; conn . setInstanceFollowRedirects ( true ) ; conn . setConnectTimeout ( timeout ) ; conn . setReadTimeout ( timeout ) ; conn . connect ( ) ; status = conn . getResponseCode ( ) ; logger . info ( "Redirecting from {} to {}" , url , newUrl ) ; } conn . connect ( ) ; return conn ;
public class HttpClientIntroductionAdvice { /** * Interceptor to apply headers , cookies , parameter and body arguements . * @ param context The context * @ return httpClient or future */ @ Override public Object intercept ( MethodInvocationContext < Object , Object > context ) { } }
AnnotationValue < Client > clientAnnotation = context . findAnnotation ( Client . class ) . orElseThrow ( ( ) -> new IllegalStateException ( "Client advice called from type that is not annotated with @Client: " + context ) ) ; HttpClient httpClient = getClient ( context , clientAnnotation ) ; Class < ? > declaringType = context . getDeclaringType ( ) ; if ( Closeable . class == declaringType || AutoCloseable . class == declaringType ) { String clientId = clientAnnotation . getValue ( String . class ) . orElse ( null ) ; String path = clientAnnotation . get ( "path" , String . class ) . orElse ( null ) ; String clientKey = computeClientKey ( clientId , path ) ; clients . remove ( clientKey ) ; httpClient . close ( ) ; return null ; } Optional < Class < ? extends Annotation > > httpMethodMapping = context . getAnnotationTypeByStereotype ( HttpMethodMapping . class ) ; if ( context . hasStereotype ( HttpMethodMapping . class ) && httpClient != null ) { AnnotationValue < HttpMethodMapping > mapping = context . getAnnotation ( HttpMethodMapping . class ) ; String uri = mapping . getRequiredValue ( String . class ) ; if ( StringUtils . isEmpty ( uri ) ) { uri = "/" + context . getMethodName ( ) ; } Class < ? extends Annotation > annotationType = httpMethodMapping . get ( ) ; HttpMethod httpMethod = HttpMethod . valueOf ( annotationType . getSimpleName ( ) . toUpperCase ( ) ) ; ReturnType returnType = context . getReturnType ( ) ; Class < ? > javaReturnType = returnType . getType ( ) ; UriMatchTemplate uriTemplate = UriMatchTemplate . of ( "" ) ; if ( ! ( uri . length ( ) == 1 && uri . charAt ( 0 ) == '/' ) ) { uriTemplate = uriTemplate . nest ( uri ) ; } Map < String , Object > paramMap = context . getParameterValueMap ( ) ; Map < String , String > queryParams = new LinkedHashMap < > ( ) ; List < String > uriVariables = uriTemplate . getVariableNames ( ) ; boolean variableSatisfied = uriVariables . isEmpty ( ) || uriVariables . containsAll ( paramMap . keySet ( ) ) ; MutableHttpRequest < Object > request ; Object body = null ; Map < String , MutableArgumentValue < ? > > parameters = context . getParameters ( ) ; Argument [ ] arguments = context . getArguments ( ) ; Map < String , String > headers = new LinkedHashMap < > ( HEADERS_INITIAL_CAPACITY ) ; List < AnnotationValue < Header > > headerAnnotations = context . getAnnotationValuesByType ( Header . class ) ; for ( AnnotationValue < Header > headerAnnotation : headerAnnotations ) { String headerName = headerAnnotation . get ( "name" , String . class ) . orElse ( null ) ; String headerValue = headerAnnotation . getValue ( String . class ) . orElse ( null ) ; if ( StringUtils . isNotEmpty ( headerName ) && StringUtils . isNotEmpty ( headerValue ) ) { headers . put ( headerName , headerValue ) ; } } context . findAnnotation ( Version . class ) . flatMap ( versionAnnotation -> versionAnnotation . getValue ( String . class ) ) . filter ( StringUtils :: isNotEmpty ) . ifPresent ( version -> { ClientVersioningConfiguration configuration = getVersioningConfiguration ( clientAnnotation ) ; configuration . getHeaders ( ) . forEach ( header -> headers . put ( header , version ) ) ; configuration . getParameters ( ) . forEach ( parameter -> queryParams . put ( parameter , version ) ) ; } ) ; Map < String , Object > attributes = new LinkedHashMap < > ( ATTRIBUTES_INITIAL_CAPACITY ) ; List < AnnotationValue < RequestAttribute > > attributeAnnotations = context . getAnnotationValuesByType ( RequestAttribute . class ) ; for ( AnnotationValue < RequestAttribute > attributeAnnotation : attributeAnnotations ) { String attributeName = attributeAnnotation . get ( "name" , String . class ) . orElse ( null ) ; Object attributeValue = attributeAnnotation . getValue ( Object . class ) . orElse ( null ) ; if ( StringUtils . isNotEmpty ( attributeName ) && attributeValue != null ) { attributes . put ( attributeName , attributeValue ) ; } } List < NettyCookie > cookies = new ArrayList < > ( ) ; List < Argument > bodyArguments = new ArrayList < > ( ) ; ConversionService < ? > conversionService = ConversionService . SHARED ; for ( Argument argument : arguments ) { String argumentName = argument . getName ( ) ; AnnotationMetadata annotationMetadata = argument . getAnnotationMetadata ( ) ; MutableArgumentValue < ? > value = parameters . get ( argumentName ) ; Object definedValue = value . getValue ( ) ; if ( paramMap . containsKey ( argumentName ) ) { if ( annotationMetadata . hasStereotype ( Format . class ) ) { final Object v = paramMap . get ( argumentName ) ; if ( v != null ) { paramMap . put ( argumentName , conversionService . convert ( v , ConversionContext . of ( String . class ) . with ( argument . getAnnotationMetadata ( ) ) ) ) ; } } } if ( definedValue == null ) { definedValue = argument . getAnnotationMetadata ( ) . getValue ( Bindable . class , "defaultValue" , String . class ) . orElse ( null ) ; } if ( definedValue == null && ! argument . isAnnotationPresent ( Nullable . class ) ) { throw new IllegalArgumentException ( String . format ( "Null values are not allowed to be passed to client methods (%s). Add @javax.validation.Nullable if that is the desired behavior" , context . getExecutableMethod ( ) . toString ( ) ) ) ; } if ( argument . isAnnotationPresent ( Body . class ) ) { body = definedValue ; } else if ( annotationMetadata . isAnnotationPresent ( Header . class ) ) { String headerName = annotationMetadata . getValue ( Header . class , String . class ) . orElse ( null ) ; if ( StringUtils . isEmpty ( headerName ) ) { headerName = NameUtils . hyphenate ( argumentName ) ; } String finalHeaderName = headerName ; conversionService . convert ( definedValue , String . class ) . ifPresent ( o -> headers . put ( finalHeaderName , o ) ) ; } else if ( annotationMetadata . isAnnotationPresent ( CookieValue . class ) ) { String cookieName = annotationMetadata . getValue ( CookieValue . class , String . class ) . orElse ( null ) ; if ( StringUtils . isEmpty ( cookieName ) ) { cookieName = argumentName ; } String finalCookieName = cookieName ; conversionService . convert ( definedValue , String . class ) . ifPresent ( o -> cookies . add ( new NettyCookie ( finalCookieName , o ) ) ) ; } else if ( annotationMetadata . isAnnotationPresent ( QueryValue . class ) ) { String parameterName = annotationMetadata . getValue ( QueryValue . class , String . class ) . orElse ( null ) ; conversionService . convert ( definedValue , ConversionContext . of ( String . class ) . with ( annotationMetadata ) ) . ifPresent ( o -> { if ( ! StringUtils . isEmpty ( parameterName ) ) { paramMap . put ( parameterName , o ) ; queryParams . put ( parameterName , o ) ; } else { queryParams . put ( argumentName , o ) ; } } ) ; } else if ( annotationMetadata . isAnnotationPresent ( RequestAttribute . class ) ) { String attributeName = annotationMetadata . getValue ( Annotation . class , String . class ) . orElse ( null ) ; if ( StringUtils . isEmpty ( attributeName ) ) { attributeName = NameUtils . hyphenate ( argumentName ) ; } String finalAttributeName = attributeName ; conversionService . convert ( definedValue , Object . class ) . ifPresent ( o -> attributes . put ( finalAttributeName , o ) ) ; } else if ( annotationMetadata . isAnnotationPresent ( PathVariable . class ) ) { String parameterName = annotationMetadata . getValue ( PathVariable . class , String . class ) . orElse ( null ) ; conversionService . convert ( definedValue , ConversionContext . of ( String . class ) . with ( annotationMetadata ) ) . ifPresent ( o -> { if ( ! StringUtils . isEmpty ( o ) ) { paramMap . put ( parameterName , o ) ; } } ) ; } else if ( ! uriVariables . contains ( argumentName ) ) { bodyArguments . add ( argument ) ; } } if ( HttpMethod . permitsRequestBody ( httpMethod ) ) { if ( body == null && ! bodyArguments . isEmpty ( ) ) { Map < String , Object > bodyMap = new LinkedHashMap < > ( ) ; for ( Argument bodyArgument : bodyArguments ) { String argumentName = bodyArgument . getName ( ) ; MutableArgumentValue < ? > value = parameters . get ( argumentName ) ; bodyMap . put ( argumentName , value . getValue ( ) ) ; } body = bodyMap ; } if ( body != null ) { if ( ! variableSatisfied ) { if ( body instanceof Map ) { paramMap . putAll ( ( Map ) body ) ; } else { BeanMap < Object > beanMap = BeanMap . of ( body ) ; for ( Map . Entry < String , Object > entry : beanMap . entrySet ( ) ) { String k = entry . getKey ( ) ; Object v = entry . getValue ( ) ; if ( v != null ) { paramMap . put ( k , v ) ; } } } } } } uri = uriTemplate . expand ( paramMap ) ; uriVariables . forEach ( queryParams :: remove ) ; request = HttpRequest . create ( httpMethod , appendQuery ( uri , queryParams ) ) ; if ( body != null ) { request . body ( body ) ; MediaType [ ] contentTypes = context . getValue ( Produces . class , MediaType [ ] . class ) . orElse ( DEFAULT_ACCEPT_TYPES ) ; if ( ArrayUtils . isNotEmpty ( contentTypes ) ) { request . contentType ( contentTypes [ 0 ] ) ; } } // Set the URI template used to make the request for tracing purposes request . setAttribute ( HttpAttributes . URI_TEMPLATE , resolveTemplate ( clientAnnotation , uriTemplate . toString ( ) ) ) ; String serviceId = clientAnnotation . getValue ( String . class ) . orElse ( null ) ; Argument < ? > errorType = clientAnnotation . get ( "errorType" , Class . class ) . map ( ( Function < Class , Argument > ) Argument :: of ) . orElse ( HttpClient . DEFAULT_ERROR_TYPE ) ; request . setAttribute ( HttpAttributes . SERVICE_ID , serviceId ) ; if ( ! headers . isEmpty ( ) ) { for ( Map . Entry < String , String > entry : headers . entrySet ( ) ) { request . header ( entry . getKey ( ) , entry . getValue ( ) ) ; } } cookies . forEach ( request :: cookie ) ; if ( ! attributes . isEmpty ( ) ) { for ( Map . Entry < String , Object > entry : attributes . entrySet ( ) ) { request . setAttribute ( entry . getKey ( ) , entry . getValue ( ) ) ; } } MediaType [ ] acceptTypes = context . getValue ( Consumes . class , MediaType [ ] . class ) . orElse ( DEFAULT_ACCEPT_TYPES ) ; boolean isFuture = CompletableFuture . class . isAssignableFrom ( javaReturnType ) ; final Class < ? > methodDeclaringType = declaringType ; if ( Publishers . isConvertibleToPublisher ( javaReturnType ) || isFuture ) { boolean isSingle = Publishers . isSingle ( javaReturnType ) || isFuture || context . getValue ( Consumes . class , "single" , Boolean . class ) . orElse ( false ) ; Argument < ? > publisherArgument = returnType . asArgument ( ) . getFirstTypeVariable ( ) . orElse ( Argument . OBJECT_ARGUMENT ) ; Class < ? > argumentType = publisherArgument . getType ( ) ; if ( HttpResponse . class . isAssignableFrom ( argumentType ) || HttpStatus . class . isAssignableFrom ( argumentType ) ) { isSingle = true ; } Publisher < ? > publisher ; if ( ! isSingle && httpClient instanceof StreamingHttpClient ) { StreamingHttpClient streamingHttpClient = ( StreamingHttpClient ) httpClient ; if ( ! Void . class . isAssignableFrom ( argumentType ) ) { request . accept ( acceptTypes ) ; } if ( HttpResponse . class . isAssignableFrom ( argumentType ) || Void . class . isAssignableFrom ( argumentType ) ) { publisher = streamingHttpClient . exchangeStream ( request ) ; } else { boolean isEventStream = Arrays . asList ( acceptTypes ) . contains ( MediaType . TEXT_EVENT_STREAM_TYPE ) ; if ( isEventStream && streamingHttpClient instanceof SseClient ) { SseClient sseClient = ( SseClient ) streamingHttpClient ; if ( publisherArgument . getType ( ) == Event . class ) { publisher = sseClient . eventStream ( request , publisherArgument . getFirstTypeVariable ( ) . orElse ( Argument . OBJECT_ARGUMENT ) ) ; } else { publisher = Flowable . fromPublisher ( sseClient . eventStream ( request , publisherArgument ) ) . map ( Event :: getData ) ; } } else { boolean isJson = isJsonParsedMediaType ( acceptTypes ) ; if ( isJson ) { publisher = streamingHttpClient . jsonStream ( request , publisherArgument ) ; } else { Publisher < ByteBuffer < ? > > byteBufferPublisher = streamingHttpClient . dataStream ( request ) ; if ( argumentType == ByteBuffer . class ) { publisher = byteBufferPublisher ; } else { if ( conversionService . canConvert ( ByteBuffer . class , argumentType ) ) { // It would be nice if we could capture the TypeConverter here publisher = Flowable . fromPublisher ( byteBufferPublisher ) . map ( value -> conversionService . convert ( value , argumentType ) . get ( ) ) ; } else { throw new ConfigurationException ( "Cannot create the generated HTTP client's " + "required return type, since no TypeConverter from ByteBuffer to " + argumentType + " is registered" ) ; } } } } } } else { if ( Void . class . isAssignableFrom ( argumentType ) ) { publisher = httpClient . exchange ( request , null , errorType ) ; } else { request . accept ( acceptTypes ) ; if ( HttpResponse . class . isAssignableFrom ( argumentType ) ) { publisher = httpClient . exchange ( request , publisherArgument , errorType ) ; } else { publisher = httpClient . retrieve ( request , publisherArgument , errorType ) ; } } } if ( isFuture ) { CompletableFuture < Object > future = new CompletableFuture < > ( ) ; publisher . subscribe ( new CompletionAwareSubscriber < Object > ( ) { AtomicReference < Object > reference = new AtomicReference < > ( ) ; @ Override protected void doOnSubscribe ( Subscription subscription ) { subscription . request ( 1 ) ; } @ Override protected void doOnNext ( Object message ) { if ( ! Void . class . isAssignableFrom ( argumentType ) ) { reference . set ( message ) ; } } @ Override protected void doOnError ( Throwable t ) { if ( t instanceof HttpClientResponseException ) { HttpClientResponseException e = ( HttpClientResponseException ) t ; if ( e . getStatus ( ) == HttpStatus . NOT_FOUND ) { future . complete ( null ) ; return ; } } if ( LOG . isErrorEnabled ( ) ) { LOG . error ( "Client [" + methodDeclaringType . getName ( ) + "] received HTTP error response: " + t . getMessage ( ) , t ) ; } future . completeExceptionally ( t ) ; } @ Override protected void doOnComplete ( ) { future . complete ( reference . get ( ) ) ; } } ) ; return future ; } else { Object finalPublisher = conversionService . convert ( publisher , javaReturnType ) . orElseThrow ( ( ) -> new HttpClientException ( "Cannot convert response publisher to Reactive type (Unsupported Reactive type): " + javaReturnType ) ) ; for ( ReactiveClientResultTransformer transformer : transformers ) { finalPublisher = transformer . transform ( finalPublisher ) ; } return finalPublisher ; } } else { BlockingHttpClient blockingHttpClient = httpClient . toBlocking ( ) ; if ( void . class != javaReturnType ) { request . accept ( acceptTypes ) ; } if ( HttpResponse . class . isAssignableFrom ( javaReturnType ) ) { return blockingHttpClient . exchange ( request , returnType . asArgument ( ) . getFirstTypeVariable ( ) . orElse ( Argument . OBJECT_ARGUMENT ) , errorType ) ; } else if ( void . class == javaReturnType ) { blockingHttpClient . exchange ( request , null , errorType ) ; return null ; } else { try { return blockingHttpClient . retrieve ( request , returnType . asArgument ( ) , errorType ) ; } catch ( RuntimeException t ) { if ( t instanceof HttpClientResponseException && ( ( HttpClientResponseException ) t ) . getStatus ( ) == HttpStatus . NOT_FOUND ) { if ( javaReturnType == Optional . class ) { return Optional . empty ( ) ; } return null ; } else { throw t ; } } } } } // try other introduction advice return context . proceed ( ) ;
public class CalculateDateExtensions { /** * Adds minutes to the given Date object and returns it . Note : you can add negative values too * for get date in past . * @ param date * The Date object to add the minutes . * @ param addMinutes * The days to add . * @ return The resulted Date object . */ public static Date addMinutes ( final Date date , final int addMinutes ) { } }
final Calendar dateOnCalendar = Calendar . getInstance ( ) ; dateOnCalendar . setTime ( date ) ; dateOnCalendar . add ( Calendar . MINUTE , addMinutes ) ; return dateOnCalendar . getTime ( ) ;
public class ResultSetConstraint { /** * Retrieve the { @ link WQUriControlField # EXPAND } parameter as a Set ( or " all " if none is defined ) * @ return */ public Set < String > getExpand ( ) { } }
List < String > values = parameters . get ( WQUriControlField . EXPAND . getName ( ) ) ; if ( values == null || values . isEmpty ( ) ) return Collections . singleton ( "all" ) ; else return new HashSet < > ( values ) ;
public class sdx_license { /** * Use this operation to get SDX license information . */ public static sdx_license get ( nitro_service client ) throws Exception { } }
sdx_license resource = new sdx_license ( ) ; resource . validate ( "get" ) ; return ( ( sdx_license [ ] ) resource . get_resources ( client ) ) [ 0 ] ;
public class EclipseHack { /** * Constructs a map from name to method of the no - argument methods in the given type . We need this * because an ExecutableElement returned by { @ link Elements # getAllMembers } will not compare equal * to the original ExecutableElement if { @ code getAllMembers } substituted type parameters , as it * does in Eclipse . */ private Map < Name , ExecutableElement > noArgMethodsIn ( DeclaredType in ) { } }
TypeElement autoValueType = MoreElements . asType ( typeUtils . asElement ( in ) ) ; List < ExecutableElement > allMethods = ElementFilter . methodsIn ( elementUtils . getAllMembers ( autoValueType ) ) ; Map < Name , ExecutableElement > map = new LinkedHashMap < Name , ExecutableElement > ( ) ; for ( ExecutableElement method : allMethods ) { if ( method . getParameters ( ) . isEmpty ( ) ) { map . put ( method . getSimpleName ( ) , method ) ; } } return map ;
public class JMPath { /** * Apply sub file paths and get applied list list . * @ param < R > the type parameter * @ param startDirectoryPath the start directory path * @ param maxDepth the max depth * @ param function the function * @ return the list */ public static < R > List < R > applySubFilePathsAndGetAppliedList ( Path startDirectoryPath , int maxDepth , Function < Path , R > function ) { } }
return applySubFilePathsAndGetAppliedList ( startDirectoryPath , maxDepth , JMPredicate . getTrue ( ) , function ) ;
public class ClusterHeartbeatManager { /** * Removes the { @ code member } if it has not sent any heartbeats in { @ link GroupProperty # MAX _ NO _ HEARTBEAT _ SECONDS } . * If it has not sent any heartbeats in { @ link # HEART _ BEAT _ INTERVAL _ FACTOR } heartbeat intervals , it will log a warning . * @ param now the current cluster clock time * @ param member the member which needs to be checked * @ return if the member has been removed */ private boolean suspectMemberIfNotHeartBeating ( long now , Member member ) { } }
if ( clusterService . getMembershipManager ( ) . isMemberSuspected ( member . getAddress ( ) ) ) { return true ; } long lastHeartbeat = heartbeatFailureDetector . lastHeartbeat ( member ) ; if ( ! heartbeatFailureDetector . isAlive ( member , now ) ) { double suspicionLevel = heartbeatFailureDetector . suspicionLevel ( member , now ) ; String reason = format ( "Suspecting %s because it has not sent any heartbeats since %s." + " Now: %s, heartbeat timeout: %d ms, suspicion level: %.2f" , member , timeToString ( lastHeartbeat ) , timeToString ( now ) , maxNoHeartbeatMillis , suspicionLevel ) ; logger . warning ( reason ) ; clusterService . suspectMember ( member , reason , true ) ; return true ; } if ( logger . isFineEnabled ( ) && ( now - lastHeartbeat ) > heartbeatIntervalMillis * HEART_BEAT_INTERVAL_FACTOR ) { double suspicionLevel = heartbeatFailureDetector . suspicionLevel ( member , now ) ; logger . fine ( format ( "Not receiving any heartbeats from %s since %s, suspicion level: %.2f" , member , timeToString ( lastHeartbeat ) , suspicionLevel ) ) ; } return false ;
public class SubtypeOperator { /** * Check whether a type is equivalent to < code > void < / code > or not . The * complexities of Whiley ' s type system mean that this is not always obvious . * For example , the type < code > int & ( ! int ) < / code > is equivalent to * < code > void < / code > . Likewise , is the type < code > ! any < / code > . Another * interesting case is the following : * < pre > * type T is { T t } * < / pre > * This is only considered equivalent to < code > void < / code > under an * < i > inductive < / i > interpretation of types ( which is assumed in Whiley ) . The * distinction is that , under a < i > coinductive < / i > interpretation , instances of * < code > T < / code > do exist which , by construction , are infinite chains . Since * such chains cannot be constructed in Whiley , we can disregard them . * @ param type * The type being tested to see whether or not it is equivalent to * void . * @ param lifetimes * The within relation between lifetimes that should be used when * determine whether the < code > rhs < / code > is a subtype of the * < code > lhs < / code > . * @ return * @ throws ResolutionError */ public boolean isVoid ( SemanticType type , LifetimeRelation lifetimes ) { } }
return emptinessTest . isVoid ( type , EmptinessTest . PositiveMax , type , EmptinessTest . PositiveMax , lifetimes ) ;
public class KafkaHelper { /** * Creates a new { @ link KafkaProducer } instance , with custom configuration * properties . * Note : custom configuration properties will be populated < i > after < / i > and * < i > additional / overridden < / i > to the default configuration . * @ param type * @ param bootstrapServers * @ param customProps * @ return * @ since 1.2.1 */ public static KafkaProducer < String , byte [ ] > createKafkaProducer ( ProducerType type , String bootstrapServers , Properties customProps ) { } }
Properties props = buildKafkaProducerProps ( type , bootstrapServers , customProps ) ; KafkaProducer < String , byte [ ] > producer = new KafkaProducer < > ( props ) ; return producer ;
public class CacheOnDisk { /** * This method is used to update expiration times in GC and disk emtry header */ public int updateExpirationTime ( Object id , long oldExpirationTime , int size , long newExpirationTime , long newValidatorExpirationTime ) { } }
int returnCode = this . htod . updateExpirationTime ( id , oldExpirationTime , size , newExpirationTime , newValidatorExpirationTime ) ; if ( returnCode == HTODDynacache . DISK_EXCEPTION ) { stopOnError ( this . htod . diskCacheException ) ; } return returnCode ;
public class PhotosLicensesApi { /** * Fetches a list of available photo licenses for Flickr . * < br > * This method does not require authentication . * @ return object containing a list of currently available licenses . * @ throws JinxException if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . licenses . getInfo . html " > flickr . photos . licenses . getInfo < / a > */ public Licenses getInfo ( ) throws JinxException { } }
Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.licenses.getInfo" ) ; return jinx . flickrGet ( params , Licenses . class , false ) ;
public class DistributedWorkManagerStatisticsImpl { /** * Send : doWork accepted */ void sendDeltaDoWorkAccepted ( ) { } }
doWorkAccepted . incrementAndGet ( ) ; if ( trace ) log . tracef ( "sendDeltaDoWorkAccepted: %s" , workManagers ) ; if ( own != null && transport != null && transport . isInitialized ( ) ) { for ( Address address : workManagers ) { if ( ! own . equals ( address ) ) transport . deltaDoWorkAccepted ( address ) ; } }
public class TrackerStats { /** * Increment the number of succeeded tasks on a tracker . * @ param trackerName The name of the tracker . */ public void recordSucceededTask ( String trackerName ) { } }
synchronized ( this ) { NodeUsageReport usageReport = getReportUnprotected ( trackerName ) ; usageReport . setNumSucceeded ( usageReport . getNumSucceeded ( ) + 1 ) ; }
public class SQLExpressions { /** * REGR _ AVGX evaluates the average of the independent variable ( arg2 ) of the regression line . * @ param arg1 first arg * @ param arg2 second arg * @ return regr _ avgx ( arg1 , arg2) */ public static WindowOver < Double > regrAvgx ( Expression < ? extends Number > arg1 , Expression < ? extends Number > arg2 ) { } }
return new WindowOver < Double > ( Double . class , SQLOps . REGR_AVGX , arg1 , arg2 ) ;
public class WaveBase { /** * { @ inheritDoc } */ @ Override public Wave waveBeanList ( final List < WaveBean > waveBeanList ) { } }
if ( waveBeanList != null && ! waveBeanList . isEmpty ( ) ) { waveBeanList . forEach ( wb -> getWaveBeanMap ( ) . put ( wb . getClass ( ) , wb ) ) ; } return this ;
public class PojoDataParser { /** * { @ inheritDoc } */ @ NonNull @ Override public Card parseSingleGroup ( @ Nullable JSONObject data , final ServiceManager serviceManager ) { } }
if ( data == null ) { return Card . NaN ; } final CardResolver cardResolver = serviceManager . getService ( CardResolver . class ) ; Preconditions . checkState ( cardResolver != null , "Must register CardResolver into ServiceManager first" ) ; final MVHelper cellResolver = serviceManager . getService ( MVHelper . class ) ; Preconditions . checkState ( cellResolver != null , "Must register CellResolver into ServiceManager first" ) ; final String cardType = data . optString ( Card . KEY_TYPE ) ; if ( ! TextUtils . isEmpty ( cardType ) ) { final Card card = cardResolver . create ( cardType ) ; if ( card != null ) { card . serviceManager = serviceManager ; card . parseWith ( data , cellResolver ) ; card . type = data . optInt ( Card . KEY_TYPE , - 1 ) ; card . stringType = cardType ; if ( card . isValid ( ) ) { if ( card . style . slidable ) { return new SlideCard ( card ) ; } else { return card ; } } } else { final Card cellCard = new WrapCellCard ( ) ; if ( cellCard != null ) { cellCard . serviceManager = serviceManager ; cellCard . parseWith ( data , cellResolver ) ; cellCard . setStringType ( TangramBuilder . TYPE_CONTAINER_1C_FLOW ) ; if ( cellCard . isValid ( ) ) { return cellCard ; } } } } else { LogUtils . w ( TAG , "Invalid card type when parse JSON data" ) ; } return Card . NaN ;
public class ZooClassDef { /** * Only to be used during database startup to load the schema - tree . * @ param superDef The super class */ public void associateSuperDef ( ZooClassDef superDef ) { } }
if ( this . superDef != null ) { throw new IllegalStateException ( ) ; } if ( superDef == null ) { throw new IllegalArgumentException ( ) ; } // class invariant if ( superDef . getOid ( ) != oidSuper ) { throw new IllegalStateException ( "s-oid= " + oidSuper + " / " + superDef . getOid ( ) + " class=" + className ) ; } this . superDef = superDef ;
public class AbstractIoSession { /** * TODO Add method documentation */ private String getServiceName ( ) { } }
TransportMetadata tm = getTransportMetadata ( ) ; if ( tm == null ) { return "null" ; } return tm . getProviderName ( ) + ' ' + tm . getName ( ) ;
public class PendingCloudwatchLogsExports { /** * Log types that are in the process of being enabled . After they are enabled , these log types are exported to * CloudWatch Logs . * @ return Log types that are in the process of being enabled . After they are enabled , these log types are exported * to CloudWatch Logs . */ public java . util . List < String > getLogTypesToDisable ( ) { } }
if ( logTypesToDisable == null ) { logTypesToDisable = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return logTypesToDisable ;
public class MCDRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . MCDRG__RG_LENGTH : setRGLength ( RG_LENGTH_EDEFAULT ) ; return ; case AfplibPackage . MCDRG__TRIPLETS : getTriplets ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class GenericDataSinkBase { /** * Sets the sink to partition the records into ranges over the given ordering . * The bucket boundaries are determined using the given data distribution . * @ param partitionOrdering The record ordering over which to partition in ranges . * @ param distribution The distribution to use for the range partitioning . */ public void setRangePartitioned ( Ordering partitionOrdering , DataDistribution distribution ) { } }
if ( partitionOrdering . getNumberOfFields ( ) != distribution . getNumberOfFields ( ) ) { throw new IllegalArgumentException ( "The number of keys in the distribution must match number of ordered fields." ) ; } // TODO : check compatibility of distribution and ordering ( number and order of keys , key types , etc . // TODO : adapt partition ordering to data distribution ( use prefix of ordering ) this . partitionOrdering = partitionOrdering ; this . distribution = distribution ;
public class RolloverLogBase { /** * Tries to open the log . Called from inside _ logLock */ private void openLog ( ) { } }
closeLogStream ( ) ; WriteStream os = _os ; _os = null ; IoUtil . close ( os ) ; Path path = getPath ( ) ; if ( path == null ) { path = getPath ( CurrentTime . currentTime ( ) ) ; } Path parent = path . getParent ( ) ; try { if ( ! Files . isDirectory ( parent ) ) { Files . createDirectory ( parent ) ; } } catch ( Exception e ) { logWarning ( L . l ( "Can't create log directory {0}.\n Exception={1}" , parent , e ) , e ) ; } Exception exn = null ; for ( int i = 0 ; i < 3 && _os == null ; i ++ ) { try { OutputStream out = Files . newOutputStream ( path , StandardOpenOption . APPEND ) ; _os = new WriteStream ( out ) ; } catch ( IOException e ) { exn = e ; } } String pathName = path . toString ( ) ; try { if ( pathName . endsWith ( ".gz" ) ) { _zipOut = _os ; _os = new WriteStream ( new GZIPOutputStream ( _zipOut ) ) ; } else if ( pathName . endsWith ( ".zip" ) ) { throw new ConfigException ( "Can't support .zip in path-format" ) ; } } catch ( Exception e ) { if ( exn == null ) exn = e ; } if ( exn != null ) logWarning ( L . l ( "Can't create log for {0}.\n User={1} Exception={2}" , path , System . getProperty ( "user.name" ) , exn ) , exn ) ;
public class S3Dispatcher { /** * Handles PUT / bucket / id * @ param ctx the context describing the current request * @ param bucket the bucket containing the object to upload * @ param id name of the object to upload */ private void putObject ( WebContext ctx , Bucket bucket , String id , InputStreamHandler inputStream ) throws IOException { } }
StoredObject object = bucket . getObject ( id ) ; if ( inputStream == null ) { signalObjectError ( ctx , HttpResponseStatus . BAD_REQUEST , "No content posted" ) ; return ; } try ( FileOutputStream out = new FileOutputStream ( object . getFile ( ) ) ) { ByteStreams . copy ( inputStream , out ) ; } Map < String , String > properties = Maps . newTreeMap ( ) ; for ( String name : ctx . getRequest ( ) . headers ( ) . names ( ) ) { String nameLower = name . toLowerCase ( ) ; if ( nameLower . startsWith ( "x-amz-meta-" ) || "content-md5" . equals ( nameLower ) || "content-type" . equals ( nameLower ) || "x-amz-acl" . equals ( nameLower ) ) { properties . put ( name , ctx . getHeader ( name ) ) ; } } HashCode hash = Files . hash ( object . getFile ( ) , Hashing . md5 ( ) ) ; String md5 = BaseEncoding . base64 ( ) . encode ( hash . asBytes ( ) ) ; String contentMd5 = properties . get ( "Content-MD5" ) ; if ( properties . containsKey ( "Content-MD5" ) && ! md5 . equals ( contentMd5 ) ) { object . delete ( ) ; signalObjectError ( ctx , HttpResponseStatus . BAD_REQUEST , Strings . apply ( "Invalid MD5 checksum (Input: %s, Expected: %s)" , contentMd5 , md5 ) ) ; return ; } String etag = BaseEncoding . base16 ( ) . encode ( hash . asBytes ( ) ) ; properties . put ( HTTP_HEADER_NAME_ETAG , etag ) ; object . storeProperties ( properties ) ; Response response = ctx . respondWith ( ) ; response . addHeader ( HTTP_HEADER_NAME_ETAG , etag ( etag ) ) . status ( HttpResponseStatus . OK ) ; response . addHeader ( HttpHeaderNames . ACCESS_CONTROL_EXPOSE_HEADERS , HTTP_HEADER_NAME_ETAG ) ; signalObjectSuccess ( ctx ) ;
public class AutoScalingSettingsDescription { /** * Information about the scaling policies . * @ param scalingPolicies * Information about the scaling policies . */ public void setScalingPolicies ( java . util . Collection < AutoScalingPolicyDescription > scalingPolicies ) { } }
if ( scalingPolicies == null ) { this . scalingPolicies = null ; return ; } this . scalingPolicies = new java . util . ArrayList < AutoScalingPolicyDescription > ( scalingPolicies ) ;
public class HiCO { /** * Computes the correlation distance between the two subspaces defined by the * specified PCAs . * @ param pca1 first PCA * @ param pca2 second PCA * @ param dimensionality the dimensionality of the data space * @ return the correlation distance between the two subspaces defined by the * specified PCAs */ public int correlationDistance ( PCAFilteredResult pca1 , PCAFilteredResult pca2 , int dimensionality ) { } }
// TODO : Can we delay copying the matrixes ? // pca of rv1 double [ ] [ ] v1t = copy ( pca1 . getEigenvectors ( ) ) ; double [ ] [ ] v1t_strong = pca1 . getStrongEigenvectors ( ) ; int lambda1 = pca1 . getCorrelationDimension ( ) ; // pca of rv2 double [ ] [ ] v2t = copy ( pca2 . getEigenvectors ( ) ) ; double [ ] [ ] v2t_strong = pca2 . getStrongEigenvectors ( ) ; int lambda2 = pca2 . getCorrelationDimension ( ) ; // for all strong eigenvectors of rv2 double [ ] [ ] m1_czech = pca1 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v2t_strong . length ; i ++ ) { double [ ] v2_i = v2t_strong [ i ] ; // check , if distance of v2 _ i to the space of rv1 > delta // ( i . e . , if v2 _ i spans up a new dimension ) double distsq = squareSum ( v2_i ) - transposeTimesTimes ( v2_i , m1_czech , v2_i ) ; // if so , insert v2 _ i into v1 and adjust v1 // and compute m1 _ czech new , increase lambda1 if ( lambda1 < dimensionality && distsq > deltasq ) { adjust ( v1t , v2_i , lambda1 ++ ) ; // TODO : make this incremental ? double [ ] e1_czech_d = new double [ v1t . length ] ; Arrays . fill ( e1_czech_d , 0 , lambda1 , 1 ) ; m1_czech = transposeDiagonalTimes ( v1t , e1_czech_d , v1t ) ; } } // for all strong eigenvectors of rv1 double [ ] [ ] m2_czech = pca2 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v1t_strong . length ; i ++ ) { double [ ] v1_i = v1t_strong [ i ] ; // check , if distance of v1 _ i to the space of rv2 > delta // ( i . e . , if v1 _ i spans up a new dimension ) double distsq = squareSum ( v1_i ) - transposeTimesTimes ( v1_i , m2_czech , v1_i ) ; // if so , insert v1 _ i into v2 and adjust v2 // and compute m2 _ czech new , increase lambda2 if ( lambda2 < dimensionality && distsq > deltasq ) { adjust ( v2t , v1_i , lambda2 ++ ) ; // TODO : make this incremental ? double [ ] e2_czech_d = new double [ v1t . length ] ; Arrays . fill ( e2_czech_d , 0 , lambda2 , 1 ) ; m2_czech = transposeDiagonalTimes ( v2t , e2_czech_d , v2t ) ; } } return Math . max ( lambda1 , lambda2 ) ;
public class MgmtSystemManagementResource { /** * Returns a list of all caches . * @ return a list of caches for all tenants */ @ Override @ PreAuthorize ( SpringEvalExpressions . HAS_AUTH_SYSTEM_ADMIN ) public ResponseEntity < Collection < MgmtSystemCache > > getCaches ( ) { } }
final Collection < String > cacheNames = cacheManager . getCacheNames ( ) ; return ResponseEntity . ok ( cacheNames . stream ( ) . map ( cacheManager :: getCache ) . map ( this :: cacheRest ) . collect ( Collectors . toList ( ) ) ) ;
public class Gamma { /** * log of the Gamma function . Lanczos approximation ( 6 terms ) */ public static double lgamma ( double x ) { } }
double xcopy = x ; double fg = 0.0 ; double first = x + LANCZOS_SMALL_GAMMA + 0.5 ; double second = LANCZOS_COEFF [ 0 ] ; if ( x >= 0.0 ) { if ( x >= 1.0 && x - ( int ) x == 0.0 ) { fg = Math . logFactorial ( ( int ) x - 1 ) ; } else { first -= ( x + 0.5 ) * Math . log ( first ) ; for ( int i = 1 ; i <= LANCZOS_N ; i ++ ) { second += LANCZOS_COEFF [ i ] / ++ xcopy ; } fg = Math . log ( Math . sqrt ( 2.0 * Math . PI ) * second / x ) - first ; } } else { fg = Math . PI / ( gamma ( 1.0 - x ) * Math . sin ( Math . PI * x ) ) ; if ( fg != 1.0 / 0.0 && fg != - 1.0 / 0.0 ) { if ( fg < 0 ) { throw new IllegalArgumentException ( "The gamma function is negative: " + fg ) ; } else { fg = Math . log ( fg ) ; } } } return fg ;
public class StopMojo { /** * Tries to stops the running Wisdom server . * @ throws MojoExecutionException if the Wisdom server cannot be stopped */ @ Override public void execute ( ) throws MojoExecutionException { } }
new WisdomExecutor ( ) . stop ( this ) ; File pid = new File ( getWisdomRootDirectory ( ) , "RUNNING_PID" ) ; if ( WisdomExecutor . waitForFileDeletion ( pid ) ) { getLog ( ) . info ( "Wisdom server stopped." ) ; } else { throw new MojoExecutionException ( "The " + pid . getName ( ) + " file still exists after having stopped the " + "Wisdom instance - check log" ) ; }
public class CmsADEConfigCacheState { /** * Gets all the detail pages for a given type . < p > * @ param type the name of the type * @ return the detail pages for that type */ protected List < String > getDetailPages ( String type ) { } }
List < String > result = new ArrayList < String > ( ) ; for ( CmsADEConfigDataInternal configData : m_siteConfigurationsByPath . values ( ) ) { for ( CmsDetailPageInfo pageInfo : wrap ( configData ) . getDetailPagesForType ( type ) ) { result . add ( pageInfo . getUri ( ) ) ; } } return result ;
public class Transporter { /** * - - - SUBSCRIBE - - - */ public Promise subscribe ( String cmd , String nodeID ) { } }
return subscribe ( channel ( cmd , nodeID ) ) ;
public class LineItemSummary { /** * Sets the creationDateTime value for this LineItemSummary . * @ param creationDateTime * This attribute may be { @ code null } for line items created before * this feature was introduced . */ public void setCreationDateTime ( com . google . api . ads . admanager . axis . v201808 . DateTime creationDateTime ) { } }
this . creationDateTime = creationDateTime ;
public class ToStream { /** * Receive notification of character data . * @ param s The string of characters to process . * @ throws org . xml . sax . SAXException */ public void characters ( String s ) throws org . xml . sax . SAXException { } }
if ( m_inEntityRef && ! m_expandDTDEntities ) return ; final int length = s . length ( ) ; if ( length > m_charsBuff . length ) { m_charsBuff = new char [ length * 2 + 1 ] ; } s . getChars ( 0 , length , m_charsBuff , 0 ) ; characters ( m_charsBuff , 0 , length ) ;
public class Model { /** * Finder method for DB queries based on table represented by this model . Usually the SQL starts with : * < code > " select * from table _ name where " + subquery < / code > where table _ name is a table represented by this model . * Code example : * < pre > * List < Person > teenagers = Person . where ( " age & gt ? and age & lt ? " , 12 , 20 ) ; * / / iterate . . . * / / same can be achieved ( since parameters are optional ) : * List < Person > teenagers = Person . where ( " age & gt 12 and age & lt 20 " ) ; * / / iterate * < / pre > * Limit , offset and order by can be chained like this : * < pre > * List < Person > teenagers = Person . where ( " age & gt ? and age & lt ? " , 12 , 20 ) . offset ( 101 ) . limit ( 20 ) . orderBy ( age ) ; * / / iterate * < / pre > * This is a great way to build paged applications . * @ param subquery this is a set of conditions that normally follow the " where " clause . Example : * < code > " department = ? and dob > ? " < / code > . If this value is " * " and no parameters provided , then { @ link # findAll ( ) } is executed . * @ param params list of parameters corresponding to the place holders in the subquery . * @ return instance of < code > LazyList < Model > < / code > containing results . */ public static < T extends Model > LazyList < T > where ( String subquery , Object ... params ) { } }
return ModelDelegate . where ( Model . < T > modelClass ( ) , subquery , params ) ;
public class PdfPSXObject { /** * Gets the stream representing this object . * @ paramcompressionLevelthe compressionLevel * @ return the stream representing this template * @ since2.1.3 ( replacing the method without param compressionLevel ) * @ throws IOException */ PdfStream getFormXObject ( int compressionLevel ) throws IOException { } }
PdfStream s = new PdfStream ( content . toByteArray ( ) ) ; s . put ( PdfName . TYPE , PdfName . XOBJECT ) ; s . put ( PdfName . SUBTYPE , PdfName . PS ) ; s . flateCompress ( compressionLevel ) ; return s ;
public class ElementPlugin { /** * Unregisters a listener for the IPluginEvent callback event . * @ param listener Listener to be unregistered . */ public void unregisterListener ( IPluginEventListener listener ) { } }
if ( pluginEventListeners2 != null && pluginEventListeners2 . contains ( listener ) ) { pluginEventListeners2 . remove ( listener ) ; listener . onPluginEvent ( new PluginEvent ( this , PluginAction . UNSUBSCRIBE ) ) ; }
public class SemiTransactionalHiveMetastore { /** * TODO : Allow updating statistics for 2 tables in the same transaction */ public synchronized void setPartitionStatistics ( Table table , Map < List < String > , PartitionStatistics > partitionStatisticsMap ) { } }
setExclusive ( ( delegate , hdfsEnvironment ) -> partitionStatisticsMap . forEach ( ( partitionValues , newPartitionStats ) -> delegate . updatePartitionStatistics ( table . getDatabaseName ( ) , table . getTableName ( ) , getPartitionName ( table , partitionValues ) , oldPartitionStats -> updatePartitionStatistics ( oldPartitionStats , newPartitionStats ) ) ) ) ;
public class DatePickerSettings { /** * zApplyAllowKeyboardEditing , This applies the named setting to the parent component . */ void zApplyAllowKeyboardEditing ( ) { } }
if ( parentDatePicker == null ) { return ; } // Set the editability of the date picker text field . parentDatePicker . getComponentDateTextField ( ) . setEditable ( allowKeyboardEditing ) ; // Set the text field border color based on whether the text field is editable . Color textFieldBorderColor = ( allowKeyboardEditing ) ? InternalConstants . colorEditableTextFieldBorder : InternalConstants . colorNotEditableTextFieldBorder ; parentDatePicker . getComponentDateTextField ( ) . setBorder ( new CompoundBorder ( new MatteBorder ( 1 , 1 , 1 , 1 , textFieldBorderColor ) , new EmptyBorder ( 1 , 3 , 2 , 2 ) ) ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getMMCPARAMETER1 ( ) { } }
if ( mmcparameter1EEnum == null ) { mmcparameter1EEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 50 ) ; } return mmcparameter1EEnum ;
public class BigDecimal { /** * performs divideAndRound for ( dividend0 * dividend1 , divisor ) * returns null if quotient can ' t fit into long value ; */ private static BigDecimal multiplyDivideAndRound ( long dividend0 , long dividend1 , long divisor , int scale , int roundingMode , int preferredScale ) { } }
int qsign = Long . signum ( dividend0 ) * Long . signum ( dividend1 ) * Long . signum ( divisor ) ; dividend0 = Math . abs ( dividend0 ) ; dividend1 = Math . abs ( dividend1 ) ; divisor = Math . abs ( divisor ) ; // multiply dividend0 * dividend1 long d0_hi = dividend0 >>> 32 ; long d0_lo = dividend0 & LONG_MASK ; long d1_hi = dividend1 >>> 32 ; long d1_lo = dividend1 & LONG_MASK ; long product = d0_lo * d1_lo ; long d0 = product & LONG_MASK ; long d1 = product >>> 32 ; product = d0_hi * d1_lo + d1 ; d1 = product & LONG_MASK ; long d2 = product >>> 32 ; product = d0_lo * d1_hi + d1 ; d1 = product & LONG_MASK ; d2 += product >>> 32 ; long d3 = d2 >>> 32 ; d2 &= LONG_MASK ; product = d0_hi * d1_hi + d2 ; d2 = product & LONG_MASK ; d3 = ( ( product >>> 32 ) + d3 ) & LONG_MASK ; final long dividendHi = make64 ( d3 , d2 ) ; final long dividendLo = make64 ( d1 , d0 ) ; // divide return divideAndRound128 ( dividendHi , dividendLo , divisor , qsign , scale , roundingMode , preferredScale ) ;
public class PropertyResolutionUtil { /** * Resolves a Union of System . env and System . getProperties ( ) and overridingProperties where the KeyValue - Pairs of the later have the highest precedence . * @ param overridingProperties * @ return Map or null if there are no entries that match the Persistence Filter { @ link # PROP _ FILTER } */ public Map < String , String > persistencePropertiesOverrides ( Map < String , String > overridingProperties ) { } }
if ( overridingProperties == null ) { throw new IllegalArgumentException ( "the property 'overridingProperties' is not allowed to be null." ) ; } Map < String , String > overridingProperttiesCopy = new HashMap < > ( overridingProperties ) ; return mergeFilteredMaps ( getSystemJavaxPersistenceOverrides ( ) , overridingProperttiesCopy ) ;
public class BaseEncoding { /** * Returns a { @ code ByteSink } that writes base - encoded bytes to the specified { @ code CharSink } . */ @ GwtIncompatible ( "ByteSink,CharSink" ) public final ByteSink encodingSink ( final CharSink encodedSink ) { } }
checkNotNull ( encodedSink ) ; return new ByteSink ( ) { @ Override public OutputStream openStream ( ) throws IOException { return encodingStream ( encodedSink . openStream ( ) ) ; } } ;
public class Base64Encoder { /** * base64编码 , URL安全的 * @ param source 被编码的base64字符串 * @ param charset 字符集 * @ return 被加密后的字符串 * @ since 3.0.6 */ public static String encodeUrlSafe ( byte [ ] source , String charset ) { } }
return StrUtil . str ( encodeUrlSafe ( source , false ) , charset ) ;
public class Translation { /** * Translates Google App Engine Datastore entities to Acid House * { @ code AppEngineTransaction } entity with { @ code AppEngineDatastoreService } . * @ param transactions Google App Engine Datastore entities . * @ param datastore { @ code AppEngineDatastoreService } . * @ return Acid House { @ code AppEngineTransaction } entity translated from * Google App Engine Datastore entities . */ @ SuppressWarnings ( "unchecked" ) public static AppEngineGlobalTransaction toTransaction ( Iterable < Entity > transactions , AppEngineDatastoreService datastore ) { } }
AppEngineGlobalTransaction transaction = null ; Map < Long , Entity > logs = new TreeMap < Long , Entity > ( ) ; for ( Entity tx : transactions ) { if ( tx . getKind ( ) . equals ( TRANSACTION_KIND ) ) { transaction = new AppEngineGlobalTransaction ( tx . getKey ( ) . getName ( ) , null , null ) ; } else if ( tx . getKind ( ) . equals ( LOG_KIND ) ) { logs . put ( tx . getKey ( ) . getId ( ) , tx ) ; } } for ( Long sequence : logs . keySet ( ) ) { Entity log = logs . get ( sequence ) ; List < Entity > entities = new ArrayList < Entity > ( ) ; for ( Blob proto : ( List < Blob > ) log . getProperty ( PROTO_PROPERTY ) ) { OnestoreEntity . EntityProto entityProto = new OnestoreEntity . EntityProto ( ) ; entityProto . mergeFrom ( proto . getBytes ( ) ) ; entities . add ( EntityTranslator . createFromPb ( entityProto ) ) ; } try { Log l = new Log ( sequence , Enum . valueOf ( Log . Operation . class , ( String ) log . getProperty ( OPERATION_PROPERTY ) ) , toObject ( Class . forName ( ( String ) log . getProperty ( CLASS_PROPERTY ) ) , entities , new HashMap < com . google . appengine . api . datastore . Key , Object > ( 0 ) , datastore ) ) ; l . state ( State . PREPARED ) ; transaction . logs ( ) . add ( l ) ; } catch ( ClassNotFoundException e ) { throw new UncheckedException ( e ) ; } } return transaction ;
public class DaySchedule { /** * Shuts down the DaySchedule thread . */ public void shutdown ( ) { } }
interrupt ( ) ; try { join ( ) ; } catch ( Exception x ) { _logger . log ( Level . WARNING , "Failed to see DaySchedule thread joining" , x ) ; }
public class JdbcCpoAdapter { /** * Removes the Object from the datasource . The assumption is that the object exists in the datasource . This method * stores the object in the datasource * < pre > Example : * < code > * class SomeObject so = new SomeObject ( ) ; * class CpoAdapter cpo = null ; * try { * cpo = new JdbcCpoAdapter ( new JdbcDataSourceInfo ( driver , url , user , password , 1,1 , false ) ) ; * } catch ( CpoException ce ) { * / / Handle the error * cpo = null ; * if ( cpo ! = null ) { * so . setId ( 1 ) ; * so . setName ( " SomeName " ) ; * try { * cpo . deleteObject ( " DeleteById " , so ) ; * } catch ( CpoException ce ) { * / / Handle the error * < / code > * < / pre > * @ param name The String name of the DELETE Function Group that will be used to create the object in the datasource . * null signifies that the default rules will be used . * @ param obj This is an object that has been defined within the metadata of the datasource . If the class is not * defined an exception will be thrown . If the object does not exist in the datasource an exception will be thrown . * @ param wheres A collection of CpoWhere beans that define the constraints that should be used when retrieving beans * @ param orderBy The CpoOrderBy bean that defines the order in which beans should be returned * @ param nativeExpressions Native expression that will be used to augment the expression stored in the meta data . This * text will be embedded at run - time * @ return The number of objects deleted from the datasource * @ throws CpoException Thrown if there are errors accessing the datasource */ @ Override public < T > long deleteObject ( String name , T obj , Collection < CpoWhere > wheres , Collection < CpoOrderBy > orderBy , Collection < CpoNativeFunction > nativeExpressions ) throws CpoException { } }
return processUpdateGroup ( obj , JdbcCpoAdapter . DELETE_GROUP , name , wheres , orderBy , nativeExpressions ) ;
public class SAXSymbol { /** * Links left and right symbols together , i . e . removes this symbol from the string , also removing * any old digram from the hash table . * @ param left the left symbol . * @ param right the right symbol . */ public static void join ( SAXSymbol left , SAXSymbol right ) { } }
// System . out . println ( " performing the join of " + getPayload ( left ) + " and " // + getPayload ( right ) ) ; // check for an OLD digram existence - i . e . left must have a next symbol // if . n exists then we are joining TERMINAL symbols within the string , and must clean - up the // old digram if ( left . n != null ) { // System . out . println ( " " + getPayload ( left ) // + " use to be in the digram table , cleaning up " ) ; left . deleteDigram ( ) ; } // re - link left and right left . n = right ; right . p = left ;
public class GermanStyleRepeatedWordRule { /** * Is a unknown word ( has only letters and no PosTag ) */ private static boolean isUnknownWord ( AnalyzedTokenReadings token ) { } }
return token . isPosTagUnknown ( ) && token . getToken ( ) . length ( ) > 2 && token . getToken ( ) . matches ( "^[A-Za-zÄÖÜäöüß]+$" ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public FNCFNPRGLen createFNCFNPRGLenFromString ( EDataType eDataType , String initialValue ) { } }
FNCFNPRGLen result = FNCFNPRGLen . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class NodeIndexer { /** * Adds the path value to the document as the named field . The path * value is converted to an indexable string value using the name space * mappings with which this class has been created . * @ param doc The document to which to add the field * @ param fieldName The name of the field to add * @ param pathString The value for the field to add to the document . */ protected void addPathValue ( Document doc , String fieldName , Object pathString ) { } }
doc . add ( createFieldWithoutNorms ( fieldName , pathString . toString ( ) , PropertyType . PATH ) ) ;
public class vpnvserver_sharefileserver_binding { /** * Use this API to fetch vpnvserver _ sharefileserver _ binding resources of given name . */ public static vpnvserver_sharefileserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
vpnvserver_sharefileserver_binding obj = new vpnvserver_sharefileserver_binding ( ) ; obj . set_name ( name ) ; vpnvserver_sharefileserver_binding response [ ] = ( vpnvserver_sharefileserver_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class ResourceObjectIncludeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setObjName ( String newObjName ) { } }
String oldObjName = objName ; objName = newObjName ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . RESOURCE_OBJECT_INCLUDE__OBJ_NAME , oldObjName , objName ) ) ;
public class ListSimulationApplicationsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListSimulationApplicationsRequest listSimulationApplicationsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listSimulationApplicationsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listSimulationApplicationsRequest . getVersionQualifier ( ) , VERSIONQUALIFIER_BINDING ) ; protocolMarshaller . marshall ( listSimulationApplicationsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listSimulationApplicationsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listSimulationApplicationsRequest . getFilters ( ) , FILTERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PlacesSampleActivity { /** * Return the view to the DraggablePanelState : minimized , maximized , closed to the right or * closed * to the left . * @ param draggableState to apply . */ private void updateDraggablePanelStateDelayed ( DraggableState draggableState ) { } }
Handler handler = new Handler ( ) ; switch ( draggableState ) { case MAXIMIZED : handler . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { draggablePanel . maximize ( ) ; } } , DELAY_MILLIS ) ; break ; case MINIMIZED : handler . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { draggablePanel . minimize ( ) ; } } , DELAY_MILLIS ) ; break ; case CLOSED_AT_LEFT : handler . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { draggablePanel . setVisibility ( View . GONE ) ; draggablePanel . closeToLeft ( ) ; } } , DELAY_MILLIS ) ; break ; case CLOSED_AT_RIGHT : handler . postDelayed ( new Runnable ( ) { @ Override public void run ( ) { draggablePanel . setVisibility ( View . GONE ) ; draggablePanel . closeToRight ( ) ; } } , DELAY_MILLIS ) ; break ; default : draggablePanel . setVisibility ( View . GONE ) ; break ; }
public class AnnotationReader { /** * Returns a class annotation for the specified type if such an annotation is present , else null . * @ param < A > the type of the annotation * @ param clazz the target class * @ param annClass the Class object corresponding to the annotation type * @ return the target class ' s annotation for the specified annotation type if present on this element , else null * @ throws AnnotationReadException */ @ SuppressWarnings ( "unchecked" ) public < A extends Annotation > A getAnnotation ( final Class < ? > clazz , final Class < A > annClass ) throws AnnotationReadException { } }
if ( xmlInfo != null && xmlInfo . containsClassInfo ( clazz . getName ( ) ) ) { final ClassInfo classInfo = xmlInfo . getClassInfo ( clazz . getName ( ) ) ; if ( classInfo . containsAnnotationInfo ( annClass . getName ( ) ) ) { AnnotationInfo annInfo = classInfo . getAnnotationInfo ( annClass . getName ( ) ) ; try { return ( A ) annotationBuilder . buildAnnotation ( Class . forName ( annInfo . getClassName ( ) ) , annInfo ) ; } catch ( ClassNotFoundException e ) { throw new AnnotationReadException ( String . format ( "not found class '%s'" , annInfo . getClassName ( ) ) , e ) ; } } } return clazz . getAnnotation ( annClass ) ;
public class MainWindowController { /** * When the connectivity changes , we update the fields on the FX thread , we must not update * any UI fields on the connection thread . */ private void updateConnectionDetails ( ) { } }
Stage stage = ( Stage ) platformLabel . getScene ( ) . getWindow ( ) ; connectedLabel . setText ( connected ? "YES" : "NO" ) ; remoteInfo . ifPresent ( remote -> { remoteNameLabel . setText ( remote . getName ( ) ) ; versionLabel . setText ( remote . getMajorVersion ( ) + "." + remote . getMinorVersion ( ) ) ; platformLabel . setText ( remote . getPlatform ( ) . getDescription ( ) ) ; stage . setTitle ( "Connected to " + remote . getName ( ) ) ; } ) ; if ( ! connected ) { stage . setTitle ( "Disconnected [" + remoteControl . getConnector ( ) . getConnectionName ( ) + "]" ) ; menuLoadLabel . setText ( "NO" ) ; versionLabel . setText ( "" ) ; remoteNameLabel . setText ( "" ) ; }
public class DemonstrationBase { /** * Before invoking this function make sure waitingToOpenImage is false AND that the previous input has been stopped */ protected void openVideo ( boolean reopen , String ... filePaths ) { } }
synchronized ( lockStartingProcess ) { if ( startingProcess ) { System . out . println ( "Ignoring video request. Detected spamming" ) ; return ; } startingProcess = true ; } synchronized ( inputStreams ) { if ( inputStreams . size ( ) != filePaths . length ) throw new IllegalArgumentException ( "Input streams not equal to " + filePaths . length + ". Override openVideo()" ) ; } stopAllInputProcessing ( ) ; streamPaused = false ; boolean failed = false ; for ( int which = 0 ; which < filePaths . length ; which ++ ) { CacheSequenceStream cache = inputStreams . get ( which ) ; SimpleImageSequence sequence = media . openVideo ( filePaths [ which ] , cache . getImageType ( ) ) ; if ( sequence == null ) { failed = true ; System . out . println ( "Can't find file. " + filePaths [ which ] ) ; break ; } configureVideo ( which , sequence ) ; synchronized ( inputStreams ) { cache . reset ( ) ; cache . setSequence ( sequence ) ; } } if ( ! failed ) { setInputName ( new File ( filePaths [ 0 ] ) . getName ( ) ) ; synchronized ( inputStreams ) { inputMethod = InputMethod . VIDEO ; streamPeriod = 33 ; // default to 33 FPS for a video if ( threadProcess != null ) throw new RuntimeException ( "There was still an active stream thread!" ) ; threadProcess = new SynchronizedStreamsThread ( ) ; } if ( ! reopen ) { for ( int i = 0 ; i < inputStreams . size ( ) ; i ++ ) { CacheSequenceStream stream = inputStreams . get ( i ) ; handleInputChange ( i , inputMethod , stream . getWidth ( ) , stream . getHeight ( ) ) ; } } threadPool . execute ( threadProcess ) ; } else { synchronized ( inputStreams ) { inputMethod = InputMethod . NONE ; inputFilePath = null ; } synchronized ( lockStartingProcess ) { startingProcess = false ; } showRejectDiaglog ( "Can't open file" ) ; }
public class EntityManagerFactoryImpl { /** * Return an instance of Metamodel interface for access to the metamodel of * the persistence unit . * @ return Metamodel instance * @ throws IllegalStateException * if the entity manager factory has been closed * @ see javax . persistence . EntityManagerFactory # getMetamodel ( ) */ @ Override public Metamodel getMetamodel ( ) { } }
if ( isOpen ( ) ) { MetamodelImpl metamodel = null ; for ( String pu : persistenceUnits ) { metamodel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( pu ) ; if ( metamodel != null ) { return metamodel ; } } // return // KunderaMetadataManager . getMetamodel ( getPersistenceUnits ( ) ) ; } throw new IllegalStateException ( "Entity manager factory has been closed." ) ;
public class Handler { /** * Update the remove list . Remove from main list and notify listeners . Notify featurable destroyed . */ private void updateRemove ( ) { } }
if ( willDelete ) { for ( final Integer id : toDelete ) { final Featurable featurable = featurables . get ( id ) ; for ( final HandlerListener listener : listeners ) { listener . notifyHandlableRemoved ( featurable ) ; } featurable . getFeature ( Identifiable . class ) . notifyDestroyed ( ) ; featurables . remove ( featurable , id ) ; } toDelete . clear ( ) ; willDelete = false ; }
public class SetUtil { /** * 如果set为null , 转化为一个安全的空Set . * 注意返回的Set不可写 , 写入会抛出UnsupportedOperationException . * @ see java . util . Collections # emptySet ( ) */ public static < T > Set < T > emptySetIfNull ( final Set < T > set ) { } }
return set == null ? ( Set < T > ) Collections . EMPTY_SET : set ;
public class FreemarkerProfilingTransformer { /** * Makes sure that this transformer is only applied on Freemarker versions { @ code > = 2.3.23 } where the * { @ link Environment # getCurrentTemplate ( ) } method was made public . This prevents nasty * { @ link IllegalAccessError } s and { @ link NoSuchMethodError } s . */ @ Override public boolean isActive ( ) { } }
try { return hasMethodThat ( named ( "getCurrentTemplate" ) . and ( ElementMatchers . < MethodDescription . InDefinedShape > isPublic ( ) ) . and ( takesArguments ( 0 ) ) ) . matches ( new TypeDescription . ForLoadedType ( Class . forName ( "freemarker.core.Environment" ) ) ) ; } catch ( ClassNotFoundException e ) { return false ; }
public class AbstractTraceRegion { /** * Returns the nested trace regions . The list does not necessarily contain all the regions that will be returned by * the { @ link # leafIterator ( ) } . * @ return the list of directly nested regions . */ public final List < AbstractTraceRegion > getNestedRegions ( ) { } }
if ( nestedRegions == null ) return Collections . emptyList ( ) ; return Collections . unmodifiableList ( nestedRegions ) ;
public class V20170607164210_MigrateReopenedIndicesToAliases { /** * Create aliases for legacy reopened indices . */ @ Override public void upgrade ( ) { } }
this . indexSetService . findAll ( ) . stream ( ) . map ( mongoIndexSetFactory :: create ) . flatMap ( indexSet -> getReopenedIndices ( indexSet ) . stream ( ) ) . map ( indexName -> { LOG . debug ( "Marking index {} to be reopened using alias." , indexName ) ; return indexName ; } ) . forEach ( indices :: markIndexReopened ) ;
public class BingVideosImpl { /** * The Video Trending Search API lets you search on Bing and get back a list of videos that are trending based on search requests made by others . The videos are broken out into different categories . For example , Top Music Videos . For a list of markets that support trending videos , see [ Trending Videos ] ( https : / / docs . microsoft . com / azure / cognitive - services / bing - video - search / trending - videos ) . * @ param acceptLanguage A comma - delimited list of one or more languages to use for user interface strings . The list is in decreasing order of preference . For additional information , including expected format , see [ RFC2616 ] ( http : / / www . w3 . org / Protocols / rfc2616 / rfc2616 - sec14 . html ) . This header and the [ setLang ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # setlang ) query parameter are mutually exclusive ; do not specify both . If you set this header , you must also specify the [ cc ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # cc ) query parameter . To determine the market to return results for , Bing uses the first supported language it finds from the list and combines it with the cc parameter value . If the list does not include a supported language , Bing finds the closest language and market that supports the request or it uses an aggregated or default market for the results . To determine the market that Bing used , see the BingAPIs - Market header . Use this header and the cc query parameter only if you specify multiple languages . Otherwise , use the [ mkt ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # mkt ) and [ setLang ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # setlang ) query parameters . A user interface string is a string that ' s used as a label in a user interface . There are few user interface strings in the JSON response objects . Any links to Bing . com properties in the response objects apply the specified language . * @ param userAgent The user agent originating the request . Bing uses the user agent to provide mobile users with an optimized experience . Although optional , you are encouraged to always specify this header . The user - agent should be the same string that any commonly used browser sends . For information about user agents , see [ RFC 2616 ] ( http : / / www . w3 . org / Protocols / rfc2616 / rfc2616 - sec14 . html ) . The following are examples of user - agent strings . Windows Phone : Mozilla / 5.0 ( compatible ; MSIE 10.0 ; Windows Phone 8.0 ; Trident / 6.0 ; IEMobile / 10.0 ; ARM ; Touch ; NOKIA ; Lumia 822 ) . Android : Mozilla / 5.0 ( Linux ; U ; Android 2.3.5 ; en - us ; SCH - I500 Build / GINGERBREAD ) AppleWebKit / 533.1 ( KHTML ; like Gecko ) Version / 4.0 Mobile Safari / 533.1 . iPhone : Mozilla / 5.0 ( iPhone ; CPU iPhone OS 6_1 like Mac OS X ) AppleWebKit / 536.26 ( KHTML ; like Gecko ) Mobile / 10B142 iPhone4 ; 1 BingWeb / 3.03.1428.20120423 . PC : Mozilla / 5.0 ( Windows NT 6.3 ; WOW64 ; Trident / 7.0 ; Touch ; rv : 11.0 ) like Gecko . iPad : Mozilla / 5.0 ( iPad ; CPU OS 7_0 like Mac OS X ) AppleWebKit / 537.51.1 ( KHTML , like Gecko ) Version / 7.0 Mobile / 11A465 Safari / 9537.53 * @ param clientId Bing uses this header to provide users with consistent behavior across Bing API calls . Bing often flights new features and improvements , and it uses the client ID as a key for assigning traffic on different flights . If you do not use the same client ID for a user across multiple requests , then Bing may assign the user to multiple conflicting flights . Being assigned to multiple conflicting flights can lead to an inconsistent user experience . For example , if the second request has a different flight assignment than the first , the experience may be unexpected . Also , Bing can use the client ID to tailor web results to that client ID ’ s search history , providing a richer experience for the user . Bing also uses this header to help improve result rankings by analyzing the activity generated by a client ID . The relevance improvements help with better quality of results delivered by Bing APIs and in turn enables higher click - through rates for the API consumer . IMPORTANT : Although optional , you should consider this header required . Persisting the client ID across multiple requests for the same end user and device combination enables 1 ) the API consumer to receive a consistent user experience , and 2 ) higher click - through rates via better quality of results from the Bing APIs . Each user that uses your application on the device must have a unique , Bing generated client ID . If you do not include this header in the request , Bing generates an ID and returns it in the X - MSEdge - ClientID response header . The only time that you should NOT include this header in a request is the first time the user uses your app on that device . Use the client ID for each Bing API request that your app makes for this user on the device . Persist the client ID . To persist the ID in a browser app , use a persistent HTTP cookie to ensure the ID is used across all sessions . Do not use a session cookie . For other apps such as mobile apps , use the device ' s persistent storage to persist the ID . The next time the user uses your app on that device , get the client ID that you persisted . Bing responses may or may not include this header . If the response includes this header , capture the client ID and use it for all subsequent Bing requests for the user on that device . If you include the X - MSEdge - ClientID , you must not include cookies in the request . * @ param clientIp The IPv4 or IPv6 address of the client device . The IP address is used to discover the user ' s location . Bing uses the location information to determine safe search behavior . Although optional , you are encouraged to always specify this header and the X - Search - Location header . Do not obfuscate the address ( for example , by changing the last octet to 0 ) . Obfuscating the address results in the location not being anywhere near the device ' s actual location , which may result in Bing serving erroneous results . * @ param location A semicolon - delimited list of key / value pairs that describe the client ' s geographical location . Bing uses the location information to determine safe search behavior and to return relevant local content . Specify the key / value pair as & lt ; key & gt ; : & lt ; value & gt ; . The following are the keys that you use to specify the user ' s location . lat ( required ) : The latitude of the client ' s location , in degrees . The latitude must be greater than or equal to - 90.0 and less than or equal to + 90.0 . Negative values indicate southern latitudes and positive values indicate northern latitudes . long ( required ) : The longitude of the client ' s location , in degrees . The longitude must be greater than or equal to - 180.0 and less than or equal to + 180.0 . Negative values indicate western longitudes and positive values indicate eastern longitudes . re ( required ) : The radius , in meters , which specifies the horizontal accuracy of the coordinates . Pass the value returned by the device ' s location service . Typical values might be 22m for GPS / Wi - Fi , 380m for cell tower triangulation , and 18,000m for reverse IP lookup . ts ( optional ) : The UTC UNIX timestamp of when the client was at the location . ( The UNIX timestamp is the number of seconds since January 1 , 1970 . ) head ( optional ) : The client ' s relative heading or direction of travel . Specify the direction of travel as degrees from 0 through 360 , counting clockwise relative to true north . Specify this key only if the sp key is nonzero . sp ( optional ) : The horizontal velocity ( speed ) , in meters per second , that the client device is traveling . alt ( optional ) : The altitude of the client device , in meters . are ( optional ) : The radius , in meters , that specifies the vertical accuracy of the coordinates . Specify this key only if you specify the alt key . Although many of the keys are optional , the more information that you provide , the more accurate the location results are . Although optional , you are encouraged to always specify the user ' s geographical location . Providing the location is especially important if the client ' s IP address does not accurately reflect the user ' s physical location ( for example , if the client uses VPN ) . For optimal results , you should include this header and the X - MSEdge - ClientIP header , but at a minimum , you should include this header . * @ param countryCode A 2 - character country code of the country where the results come from . This API supports only the United States market . If you specify this query parameter , it must be set to us . If you set this parameter , you must also specify the Accept - Language header . Bing uses the first supported language it finds from the languages list , and combine that language with the country code that you specify to determine the market to return results for . If the languages list does not include a supported language , Bing finds the closest language and market that supports the request , or it may use an aggregated or default market for the results instead of a specified one . You should use this query parameter and the Accept - Language query parameter only if you specify multiple languages ; otherwise , you should use the mkt and setLang query parameters . This parameter and the mkt query parameter are mutually exclusive — do not specify both . * @ param market The market where the results come from . Typically , mkt is the country where the user is making the request from . However , it could be a different country if the user is not located in a country where Bing delivers results . The market must be in the form & lt ; language code & gt ; - & lt ; country code & gt ; . For example , en - US . The string is case insensitive . For a list of possible market values , see [ Market Codes ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # market - codes ) . NOTE : If known , you are encouraged to always specify the market . Specifying the market helps Bing route the request and return an appropriate and optimal response . If you specify a market that is not listed in [ Market Codes ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # market - codes ) , Bing uses a best fit market code based on an internal mapping that is subject to change . This parameter and the [ cc ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # cc ) query parameter are mutually exclusive — do not specify both . * @ param safeSearch Filter videos for adult content . The following are the possible filter values . Off : If the request is through the Video Search API , the response includes adult videos and the thumbnail images of the videos are clear ( non - fuzzy ) . If the request is through the Web Search API , the response includes adult videos but the thumbnail images of the videos are pixelated ( fuzzy ) . Moderate : If the request is through the Video Search API , the response does not include videos with adult content . If the request is through the Web Search API , the response may include videos with adult content but the thumbnail images of the videos are pixelated ( fuzzy ) . Strict : Does not return videos with adult content . The default is Moderate . If the request comes from a market that Bing ' s adult policy requires that safeSearch is set to Strict , Bing ignores the safeSearch value and uses Strict . If you use the site : query operator , there is the chance that the response may contain adult content regardless of what the safeSearch query parameter is set to . Use site : only if you are aware of the content on the site and your scenario supports the possibility of adult content . Possible values include : ' Off ' , ' Moderate ' , ' Strict ' * @ param setLang The language to use for user interface strings . Specify the language using the ISO 639-1 2 - letter language code . For example , the language code for English is EN . The default is EN ( English ) . Although optional , you should always specify the language . Typically , you set setLang to the same language specified by mkt unless the user wants the user interface strings displayed in a different language . This parameter and the [ Accept - Language ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # acceptlanguage ) header are mutually exclusive ; do not specify both . A user interface string is a string that ' s used as a label in a user interface . There are few user interface strings in the JSON response objects . Also , any links to Bing . com properties in the response objects apply the specified language . * @ param textDecorations A Boolean value that determines whether display strings contain decoration markers such as hit highlighting characters . If true , the strings may include markers . The default is false . To specify whether to use Unicode characters or HTML tags as the markers , see the [ textFormat ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - video - api - v7 - reference # textformat ) query parameter . For information about hit highlighting , see [ Hit Highlighting ] ( https : / / docs . microsoft . com / azure / cognitive - services / bing - news - search / hit - highlighting ) . * @ param textFormat The type of markers to use for text decorations ( see the textDecorations query parameter ) . Possible values are Raw — Use Unicode characters to mark content that needs special formatting . The Unicode characters are in the range E000 through E019 . For example , Bing uses E000 and E001 to mark the beginning and end of query terms for hit highlighting . HTML — Use HTML tags to mark content that needs special formatting . For example , use & lt ; b & gt ; tags to highlight query terms in display strings . The default is Raw . For display strings that contain escapable HTML characters such as & lt ; , & gt ; , and & amp ; , if textFormat is set to HTML , Bing escapes the characters as appropriate ( for example , & lt ; is escaped to & amp ; lt ; ) . Possible values include : ' Raw ' , ' Html ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the TrendingVideos object */ public Observable < ServiceResponse < TrendingVideos > > trendingWithServiceResponseAsync ( String acceptLanguage , String userAgent , String clientId , String clientIp , String location , String countryCode , String market , SafeSearch safeSearch , String setLang , Boolean textDecorations , TextFormat textFormat ) { } }
final String xBingApisSDK = "true" ; return service . trending ( xBingApisSDK , acceptLanguage , userAgent , clientId , clientIp , location , countryCode , market , safeSearch , setLang , textDecorations , textFormat ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < TrendingVideos > > > ( ) { @ Override public Observable < ServiceResponse < TrendingVideos > > call ( Response < ResponseBody > response ) { try { ServiceResponse < TrendingVideos > clientResponse = trendingDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Configuration { /** * Set the default styles . the case of the keys are not important . The retrieval will be case * insensitive . * @ param defaultStyle the mapping from geometry type name ( point , polygon , etc . . . ) to the style * to use for that type . */ public final void setDefaultStyle ( final Map < String , Style > defaultStyle ) { } }
this . defaultStyle = new HashMap < > ( defaultStyle . size ( ) ) ; for ( Map . Entry < String , Style > entry : defaultStyle . entrySet ( ) ) { String normalizedName = GEOMETRY_NAME_ALIASES . get ( entry . getKey ( ) . toLowerCase ( ) ) ; if ( normalizedName == null ) { normalizedName = entry . getKey ( ) . toLowerCase ( ) ; } this . defaultStyle . put ( normalizedName , entry . getValue ( ) ) ; }
public class NwwPanel { /** * Set the globe as flat sphere . */ public void setFlatSphereGlobe ( ) { } }
Earth globe = new Earth ( ) ; globe . setElevationModel ( new ZeroElevationModel ( ) ) ; wwd . getModel ( ) . setGlobe ( globe ) ; wwd . getView ( ) . stopMovement ( ) ; wwd . redraw ( ) ;
public class Caster { /** * converts a object to a QueryColumn , if possible , also variable declarations are allowed . this * method is used within the generated bytecode * @ param o * @ return * @ throws PageException * @ info used in bytecode generation */ public static QueryColumn toQueryColumn ( Object o , PageContext pc ) throws PageException { } }
if ( o instanceof QueryColumn ) return ( QueryColumn ) o ; if ( o instanceof String ) { o = VariableInterpreter . getVariableAsCollection ( pc , ( String ) o ) ; if ( o instanceof QueryColumn ) return ( QueryColumn ) o ; } throw new CasterException ( o , "querycolumn" ) ;
public class Request { /** * Asynchronously executes requests that have already been serialized into an HttpURLConnection . No validation is * done that the contents of the connection actually reflect the serialized requests , so it is the caller ' s * responsibility to ensure that it will correctly generate the desired responses . This function will return * immediately , and the requests will be processed on a separate thread . In order to process results of a request , * or determine whether a request succeeded or failed , a callback must be specified ( see the * { @ link # setCallback ( Callback ) setCallback } method ) * This should only be called from the UI thread . * @ param callbackHandler * a Handler that will be used to post calls to the callback for each request ; if null , a Handler will be * instantiated on the calling thread * @ param connection * the HttpURLConnection that the requests were serialized into * @ param requests * the requests represented by the HttpURLConnection * @ return a RequestAsyncTask that is executing the request */ public static RequestAsyncTask executeConnectionAsync ( Handler callbackHandler , HttpURLConnection connection , RequestBatch requests ) { } }
Validate . notNull ( connection , "connection" ) ; RequestAsyncTask asyncTask = new RequestAsyncTask ( connection , requests ) ; requests . setCallbackHandler ( callbackHandler ) ; asyncTask . executeOnSettingsExecutor ( ) ; return asyncTask ;
public class SibRaSingleProcessListener { /** * Returns the maximum number of active messages that should be associated * with this listener at any one time . * @ return int max active messages */ int getMaxActiveMessages ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { final String methodName = "getMaxActiveMessages" ; SibTr . entry ( this , TRACE , methodName ) ; } // The field max active messages is used to represent the default max // active messages , however , on distributed systems we ignore this // setting ( as we will control the concurrency ourselves ) and return // 0 - meaning there is no limit ( or 1 if strict message ordering is on ) . // This method is called by the base class constructor when it registers // the async consumer with processor . int maxActiveMsgs = _strictMessageOrdering ? 1 : 0 ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { final String methodName = "getMaxActiveMessages" ; SibTr . exit ( this , TRACE , methodName , maxActiveMsgs ) ; } return maxActiveMsgs ;
public class SimpleParserImpl { /** * value = object | array | number | string | " true " | " false " | " null " . */ public Object parseValue ( ) { } }
Object val ; switch ( T . getType ( ) ) { case LCURLY : val = parseObject ( ) ; break ; case LSQUARE : val = parseArray ( ) ; break ; case INTEGER : if ( useBig ) { val = new BigInteger ( T . getString ( ) ) ; } else { try { val = Integer . parseInt ( T . getString ( ) ) ; } catch ( NumberFormatException e0 ) { // we have an overflow , the tokenizer guarantees the format is correct try { val = Long . parseLong ( T . getString ( ) ) ; } catch ( NumberFormatException e1 ) { val = 0 ; } } } advance ( ) ; break ; case DOUBLE : if ( useBig ) { val = new BigDecimal ( T . getString ( ) ) ; } else { val = Double . parseDouble ( T . getString ( ) ) ; } advance ( ) ; break ; case STRING : val = T . getString ( ) ; advance ( ) ; break ; case TRUE : val = true ; advance ( ) ; break ; case FALSE : val = false ; advance ( ) ; break ; case NULL : val = null ; advance ( ) ; break ; default : val = null ; addError ( ) ; } return val ;
public class Crossfader { /** * add the values to the bundle for saveInstanceState * @ param savedInstanceState * @ return */ public Bundle saveInstanceState ( Bundle savedInstanceState ) { } }
if ( savedInstanceState != null ) { savedInstanceState . putBoolean ( BUNDLE_CROSS_FADED , mCrossFadeSlidingPaneLayout . isOpen ( ) ) ; } return savedInstanceState ;
public class JDBCResultSet { /** * < ! - - start generic documentation - - > * Updates the designated column with a < code > float < / code > value . * The updater methods are used to update column values in the * current row or the insert row . The updater methods do not * update the underlying database ; instead the < code > updateRow < / code > or * < code > insertRow < / code > methods are called to update the database . * < ! - - end generic documentation - - > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * HSQLDB supports this feature . < p > * < / div > * < ! - - end release - specific documentation - - > * @ param columnIndex the first column is 1 , the second is 2 , . . . * @ param x the new column value * @ exception SQLException if a database access error occurs , * the result set concurrency is < code > CONCUR _ READ _ ONLY < / code > * or this method is called on a closed result set * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.2 ( JDK 1.1 . x developers : read the overview for * JDBCResultSet ) */ public void updateFloat ( int columnIndex , float x ) throws SQLException { } }
Double value = new Double ( x ) ; startUpdate ( columnIndex ) ; preparedStatement . setParameter ( columnIndex , value ) ;
public class Matrix4d { /** * Set < code > this < / code > matrix to < code > T * R * S * M < / code > , where < code > T < / code > is the given < code > translation < / code > , * < code > R < / code > is a rotation - and possibly scaling - transformation specified by the given quaternion , < code > S < / code > is a scaling transformation * which scales the axes by < code > scale < / code > and < code > M < / code > is an { @ link # isAffine ( ) affine } matrix . * When transforming a vector by the resulting matrix the transformation described by < code > M < / code > will be applied first , then the scaling , then rotation and * at last the translation . * When used with a right - handed coordinate system , the produced rotation will rotate a vector * counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin . * When used with a left - handed coordinate system , the rotation is clockwise . * This method is equivalent to calling : < code > translation ( translation ) . rotate ( quat ) . scale ( scale ) . mulAffine ( m ) < / code > * @ see # translation ( Vector3fc ) * @ see # rotate ( Quaterniondc ) * @ see # mulAffine ( Matrix4dc ) * @ param translation * the translation * @ param quat * the quaternion representing a rotation * @ param scale * the scaling factors * @ param m * the { @ link # isAffine ( ) affine } matrix to multiply by * @ return this */ public Matrix4d translationRotateScaleMulAffine ( Vector3fc translation , Quaterniondc quat , Vector3fc scale , Matrix4d m ) { } }
return translationRotateScaleMulAffine ( translation . x ( ) , translation . y ( ) , translation . z ( ) , quat . x ( ) , quat . y ( ) , quat . z ( ) , quat . w ( ) , scale . x ( ) , scale . y ( ) , scale . z ( ) , m ) ;
public class EventDataDeserializer { /** * Deserialize JSON into super class { @ code StripeObject } where the underlying concrete class * corresponds to type specified in root - level { @ code object } field of the JSON input . * < p > Note that the expected JSON input is data at the { @ code object } value , as a sibling to * { @ code previousAttributes } , and not the discriminator field containing a string . * @ return JSON data to be deserialized to super class { @ code StripeObject } */ static StripeObject deserializeStripeObject ( JsonObject eventDataObjectJson ) { } }
String type = eventDataObjectJson . getAsJsonObject ( ) . get ( "object" ) . getAsString ( ) ; Class < ? extends StripeObject > cl = EventDataClassLookup . findClass ( type ) ; return ApiResource . GSON . fromJson ( eventDataObjectJson , cl != null ? cl : StripeRawJsonObject . class ) ;
public class Script { /** * / * package private */ static BigInteger castToBigInteger ( final byte [ ] chunk , final int maxLength , final boolean requireMinimal ) throws ScriptException { } }
if ( chunk . length > maxLength ) throw new ScriptException ( ScriptError . SCRIPT_ERR_UNKNOWN_ERROR , "Script attempted to use an integer larger than " + maxLength + " bytes" ) ; if ( requireMinimal && chunk . length > 0 ) { // Check that the number is encoded with the minimum possible // number of bytes . // If the most - significant - byte - excluding the sign bit - is zero // then we ' re not minimal . Note how this test also rejects the // negative - zero encoding , 0x80. if ( ( chunk [ chunk . length - 1 ] & 0x7f ) == 0 ) { // One exception : if there ' s more than one byte and the most // significant bit of the second - most - significant - byte is set // it would conflict with the sign bit . An example of this case // is + - 255 , which encode to 0xff00 and 0xff80 respectively . // ( big - endian ) . if ( chunk . length <= 1 || ( chunk [ chunk . length - 2 ] & 0x80 ) == 0 ) { throw new ScriptException ( ScriptError . SCRIPT_ERR_UNKNOWN_ERROR , "non-minimally encoded script number" ) ; } } } return Utils . decodeMPI ( Utils . reverseBytes ( chunk ) , false ) ;
public class FiveLetterFirstNameTextProducer { /** * { @ inheritDoc } */ @ Override public String getText ( ) { } }
int car = FIRST_NAMES . length - 1 ; return FIRST_NAMES [ RAND . nextInt ( car ) + 1 ] ;