signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SearchQueryParser { /** * / * Create a FieldValue for the query field from the string value .
* We try to convert the value types according to the data type of the query field . */
@ VisibleForTesting FieldValue createFieldValue ( SearchQueryField field , String quotedStringValue , boolean negate ) { } } | // Make sure there are no quotes in the value ( e . g . ` " foo " ' - - > ` foo ' )
final String value = quotedStringValue . replaceAll ( QUOTE_REPLACE_REGEX , "" ) ; final SearchQueryField . Type fieldType = field . getFieldType ( ) ; final Pair < String , SearchQueryOperator > pair = extractOperator ( value , fieldType == STRING ? DEFAULT_STRING_OPERATOR : DEFAULT_OPERATOR ) ; switch ( fieldType ) { case DATE : return new FieldValue ( parseDate ( pair . getLeft ( ) ) , pair . getRight ( ) , negate ) ; case STRING : return new FieldValue ( pair . getLeft ( ) , pair . getRight ( ) , negate ) ; case INT : return new FieldValue ( Integer . parseInt ( pair . getLeft ( ) ) , pair . getRight ( ) , negate ) ; case LONG : return new FieldValue ( Long . parseLong ( pair . getLeft ( ) ) , pair . getRight ( ) , negate ) ; default : throw new IllegalArgumentException ( "Unhandled field type: " + fieldType . toString ( ) ) ; } |
public class StandardDirectoryAgentServer { /** * Handles a unicast TCP SrvRqst message arrived to this directory agent .
* < br / >
* This directory agent will reply with a list of matching services .
* @ param srvRqst the SrvRqst message to handle
* @ param socket the socket connected to th client where to write the reply
* @ see # handleUDPSrvRqst ( SrvRqst , InetSocketAddress , InetSocketAddress )
* @ see # matchServices ( ServiceType , String , Scopes , String ) */
protected void handleTCPSrvRqst ( SrvRqst srvRqst , Socket socket ) { } } | // Match scopes , RFC 2608 , 11.1
if ( ! scopes . weakMatch ( srvRqst . getScopes ( ) ) ) { tcpSrvRply . perform ( socket , srvRqst , SLPError . SCOPE_NOT_SUPPORTED ) ; return ; } ServiceType serviceType = srvRqst . getServiceType ( ) ; List < ServiceInfo > matchingServices = matchServices ( serviceType , srvRqst . getLanguage ( ) , srvRqst . getScopes ( ) , srvRqst . getFilter ( ) ) ; tcpSrvRply . perform ( socket , srvRqst , matchingServices ) ; if ( logger . isLoggable ( Level . FINE ) ) logger . fine ( "DirectoryAgent " + this + " returning " + matchingServices . size ( ) + " services of type " + serviceType ) ; |
public class ConfluenceGreenPepper { /** * Retrieves the previous implemented version of the specification .
* @ param page a { @ link com . atlassian . confluence . pages . Page } object .
* @ return the previous implemented version of the specification . */
public Integer getPreviousImplementedVersion ( Page page ) { } } | ContentEntityObject entityObject = getContentEntityManager ( ) . getById ( page . getId ( ) ) ; String value = getContentPropertyManager ( ) . getStringProperty ( entityObject , PREVIOUS_IMPLEMENTED_VERSION ) ; return value == null ? null : Integer . valueOf ( value ) ; |
public class ControlDecisionExpectedImpl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . mfp . control . ControlDecisionExpected # getTick ( ) */
public final long [ ] getTick ( ) { } } | List < Long > list = ( List < Long > ) jmo . getField ( ControlAccess . BODY_DECISIONEXPECTED_TICK ) ; long lists [ ] = new long [ list . size ( ) ] ; for ( int i = 0 ; i < lists . length ; i ++ ) lists [ i ] = list . get ( i ) . longValue ( ) ; return lists ; |
public class SSLComponent { /** * Process configuration information .
* @ param properties */
private synchronized void processConfig ( boolean updateSSLConfigManager ) { } } | if ( componentContext == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Not yet activated, can not process config" ) ; } return ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Processing configuration " + updateSSLConfigManager ) ; } boolean isServer = locSvc . resolveString ( WsLocationConstants . SYMBOL_PROCESS_TYPE ) . equals ( WsLocationConstants . LOC_PROCESS_TYPE_SERVER ) ; Dictionary < String , Object > serviceProps = new Hashtable < String , Object > ( config ) ; serviceProps . put ( REPERTOIRE_IDS , repertoireMap . keySet ( ) . toArray ( new String [ repertoireMap . size ( ) ] ) ) ; serviceProps . put ( KEYSTORE_IDS , keystoreIdMap . keySet ( ) . toArray ( new String [ keystoreIdMap . size ( ) ] ) ) ; serviceProps . put ( REPERTOIRE_PIDS , repertoirePIDMap . keySet ( ) . toArray ( new String [ repertoirePIDMap . size ( ) ] ) ) ; if ( updateSSLConfigManager ) { try { // pass reinitialize = true to redo config
SSLConfigManager . getInstance ( ) . initializeSSL ( getGlobalProps ( ) , true , isServer , transportSecurityEnabled , repertoirePIDMap ) ; } catch ( SSLException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Exception processing SSL configuration; " + e ) ; } } } if ( ! repertoireMap . isEmpty ( ) && ! keystoreIdMap . isEmpty ( ) ) { serviceProps . put ( SSL_SUPPORT_KEY , SSL_SUPPORT_VALUE_ACTIVE ) ; } this . componentContext . setServiceProperties ( serviceProps ) ; |
public class CmafEncryptionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CmafEncryption cmafEncryption , ProtocolMarshaller protocolMarshaller ) { } } | if ( cmafEncryption == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( cmafEncryption . getKeyRotationIntervalSeconds ( ) , KEYROTATIONINTERVALSECONDS_BINDING ) ; protocolMarshaller . marshall ( cmafEncryption . getSpekeKeyProvider ( ) , SPEKEKEYPROVIDER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Clahe { /** * This effect deals with OneBandSource and Binary MatrixSource
* @ param source
* @ return image */
@ Override public ImageSource apply ( ImageSource source ) { } } | int width = source . getWidth ( ) ; int height = source . getHeight ( ) ; int [ ] [ ] result = new int [ height ] [ width ] ; if ( source . isGrayscale ( ) ) { for ( int i = 0 ; i < height ; i ++ ) { int iMin = Math . max ( 0 , i - blockRadius ) ; int iMax = Math . min ( height , i + blockRadius + 1 ) ; int h = iMax - iMin ; int jMin = Math . max ( 0 , - blockRadius ) ; int jMax = Math . min ( width - 1 , blockRadius ) ; int [ ] hist = new int [ bins + 1 ] ; int [ ] clippedHist = new int [ bins + 1 ] ; for ( int k = iMin ; k < iMax ; k ++ ) { for ( int l = jMin ; l < jMax ; l ++ ) { ++ hist [ roundPositive ( source . getGray ( l , k ) / 255.0f * bins ) ] ; } } for ( int j = 0 ; j < width ; j ++ ) { int v = roundPositive ( source . getGray ( j , i ) / 255.0f * bins ) ; int xMin = Math . max ( 0 , j - blockRadius ) ; int xMax = j + blockRadius + 1 ; int w = Math . min ( width , xMax ) - xMin ; int n = h * w ; int limit = ( int ) ( slope * n / bins + 0.5f ) ; /* remove left behind values from histogram */
if ( xMin > 0 ) { int xMin1 = xMin - 1 ; for ( int yi = iMin ; yi < iMax ; ++ yi ) { -- hist [ roundPositive ( source . getGray ( xMin1 , yi ) / 255.0f * bins ) ] ; } } /* add newly included values to histogram */
if ( xMax <= width ) { int xMax1 = xMax - 1 ; for ( int yi = iMin ; yi < iMax ; ++ yi ) { ++ hist [ roundPositive ( source . getGray ( xMax1 , yi ) / 255.0f * bins ) ] ; } } System . arraycopy ( hist , 0 , clippedHist , 0 , hist . length ) ; int clippedEntries = 0 , clippedEntriesBefore ; do { clippedEntriesBefore = clippedEntries ; clippedEntries = 0 ; for ( int z = 0 ; z <= bins ; ++ z ) { int d = clippedHist [ z ] - limit ; if ( d > 0 ) { clippedEntries += d ; clippedHist [ z ] = limit ; } } int d = clippedEntries / ( bins + 1 ) ; int m = clippedEntries % ( bins + 1 ) ; for ( int z = 0 ; z <= bins ; ++ z ) { clippedHist [ z ] += d ; } if ( m != 0 ) { int s = bins / m ; for ( int z = 0 ; z <= bins ; z += s ) { ++ clippedHist [ z ] ; } } } while ( clippedEntries != clippedEntriesBefore ) ; /* build cdf of clipped histogram */
int hMin = bins ; for ( int z = 0 ; z < hMin ; ++ z ) { if ( clippedHist [ z ] != 0 ) hMin = z ; } int cdf = 0 ; for ( int z = hMin ; z <= v ; ++ z ) { cdf += clippedHist [ z ] ; } int cdfMax = cdf ; for ( int z = v + 1 ; z <= bins ; ++ z ) { cdfMax += clippedHist [ z ] ; } int cdfMin = clippedHist [ hMin ] ; result [ i ] [ j ] = roundPositive ( ( cdf - cdfMin ) / ( float ) ( cdfMax - cdfMin ) * 255.0f ) ; } for ( int a = 0 ; a < width ; a ++ ) { source . setGray ( a , i , result [ i ] [ a ] ) ; } } } else { MatrixSource gray = new MatrixSource ( source ) ; Grayscale gs = new Grayscale ( algorithm ) ; // It is not OneBandSource
gs . apply ( gray ) ; for ( int i = 0 ; i < height ; i ++ ) { int iMin = Math . max ( 0 , i - blockRadius ) ; int iMax = Math . min ( height , i + blockRadius + 1 ) ; int h = iMax - iMin ; int jMin = Math . max ( 0 , - blockRadius ) ; int jMax = Math . min ( width - 1 , blockRadius ) ; int [ ] hist = new int [ bins + 1 ] ; int [ ] clippedHist = new int [ bins + 1 ] ; for ( int k = iMin ; k < iMax ; k ++ ) { for ( int l = jMin ; l < jMax ; l ++ ) { ++ hist [ roundPositive ( gray . getGray ( l , k ) / 255.0f * bins ) ] ; } } for ( int j = 0 ; j < width ; j ++ ) { int v = roundPositive ( gray . getGray ( j , i ) / 255.0f * bins ) ; int xMin = Math . max ( 0 , j - blockRadius ) ; int xMax = j + blockRadius + 1 ; int w = Math . min ( width , xMax ) - xMin ; int n = h * w ; int limit = ( int ) ( slope * n / bins + 0.5f ) ; /* remove left behind values from histogram */
if ( xMin > 0 ) { int xMin1 = xMin - 1 ; for ( int yi = iMin ; yi < iMax ; ++ yi ) { -- hist [ roundPositive ( gray . getGray ( xMin1 , yi ) / 255.0f * bins ) ] ; } } /* add newly included values to histogram */
if ( xMax <= width ) { int xMax1 = xMax - 1 ; for ( int yi = iMin ; yi < iMax ; ++ yi ) { ++ hist [ roundPositive ( gray . getGray ( xMax1 , yi ) / 255.0f * bins ) ] ; } } System . arraycopy ( hist , 0 , clippedHist , 0 , hist . length ) ; int clippedEntries = 0 , clippedEntriesBefore ; do { clippedEntriesBefore = clippedEntries ; clippedEntries = 0 ; for ( int z = 0 ; z <= bins ; ++ z ) { int d = clippedHist [ z ] - limit ; if ( d > 0 ) { clippedEntries += d ; clippedHist [ z ] = limit ; } } int d = clippedEntries / ( bins + 1 ) ; int m = clippedEntries % ( bins + 1 ) ; for ( int z = 0 ; z <= bins ; ++ z ) clippedHist [ z ] += d ; if ( m != 0 ) { int s = bins / m ; for ( int z = 0 ; z <= bins ; z += s ) ++ clippedHist [ z ] ; } } while ( clippedEntries != clippedEntriesBefore ) ; /* build cdf of clipped histogram */
int hMin = bins ; for ( int z = 0 ; z < hMin ; ++ z ) { if ( clippedHist [ z ] != 0 ) hMin = z ; } int cdf = 0 ; for ( int z = hMin ; z <= v ; ++ z ) { cdf += clippedHist [ z ] ; } int cdfMax = cdf ; for ( int z = v + 1 ; z <= bins ; ++ z ) { cdfMax += clippedHist [ z ] ; } int cdfMin = clippedHist [ hMin ] ; result [ i ] [ j ] = roundPositive ( ( cdf - cdfMin ) / ( float ) ( cdfMax - cdfMin ) * 255.0f ) ; } for ( int a = 0 ; a < width ; a ++ ) { float s = ( float ) result [ i ] [ a ] / ( float ) gray . getGray ( a , i ) ; float r = ColorHelper . clamp ( roundPositive ( s * source . getR ( a , i ) ) ) ; float g = ColorHelper . clamp ( roundPositive ( s * source . getG ( a , i ) ) ) ; float b = ColorHelper . clamp ( roundPositive ( s * source . getB ( a , i ) ) ) ; int rgb = ColorHelper . getRGB ( ( int ) r , ( int ) g , ( int ) b ) ; source . setRGB ( a , i , rgb ) ; } } } return source ; |
public class TapStream { /** * Check if all operations in the TapStream are completed .
* @ return true if all operations currently in the TapStream are completed */
public boolean isCompleted ( ) { } } | for ( TapOperation op : ops ) { if ( ! op . getState ( ) . equals ( OperationState . COMPLETE ) ) { return false ; } } return true ; |
public class WebSocketController { /** * tag : : publish [ ] */
@ OnMessage ( "/echo-all" ) public void echoAll ( @ Body String message ) { } } | logger ( ) . info ( "Message received : {}" , message ) ; publisher . publish ( "/echo-all" , message . toUpperCase ( ) ) ; |
public class AbstractNlsTemplate { /** * This method creates an { @ link NlsMessageFormatter } for the given { @ code messageTemplate } and { @ code locale } .
* @ param messageTemplate is the template of the message for the given { @ code locale } .
* @ param locale is the locale to use . The implementation may ignore it here because it is also supplied to
* { @ link net . sf . mmm . util . nls . api . NlsFormatter # format ( Object , Locale , Map , NlsTemplateResolver ) format } . Anyhow
* it allows the implementation to do smart caching of the parsed formatter in association with the locale .
* @ param nlsDependencies are the { @ link NlsDependencies } .
* @ return the formatter instance . */
protected NlsMessageFormatter createFormatter ( String messageTemplate , Locale locale , NlsDependencies nlsDependencies ) { } } | return nlsDependencies . getMessageFormatterFactory ( ) . create ( messageTemplate ) ; |
public class JSFormatter { /** * Produces { @ link IJSGeneratable } s separated by ' , '
* @ param aCont
* contained of { @ link IJSGeneratable }
* @ return this */
@ Nonnull public JSFormatter generatable ( @ Nonnull final Iterable < ? extends IJSGeneratable > aCont ) { } } | boolean bFirst = true ; for ( final IJSGeneratable aItem : aCont ) { if ( bFirst ) bFirst = false ; else plain ( ',' ) ; generatable ( aItem ) ; } return this ; |
public class ConfigImpl { /** * / * ( non - Javadoc )
* @ see com . ibm . jaggr . service . options . IOptionsListener # optionsUpdated ( com . ibm . jaggr . service . options . IOptions , long ) */
@ Override public void optionsUpdated ( IOptions options , long sequence ) { } } | Context cx = Context . enter ( ) ; try { Scriptable jsOptions = cx . newObject ( sharedScope ) ; Map < String , String > optionsMap = options . getOptionsMap ( ) ; for ( Map . Entry < String , String > entry : optionsMap . entrySet ( ) ) { Object value = Context . javaToJS ( entry . getValue ( ) , sharedScope ) ; ScriptableObject . putProperty ( jsOptions , entry . getKey ( ) , value ) ; } ScriptableObject . putProperty ( sharedScope , "options" , jsOptions ) ; // $ NON - NLS - 1 $
} finally { Context . exit ( ) ; } |
public class TangoAttribute { /** * Read attribute and return result as array .
* @ param type
* The requested output type , is the component type ( double ,
* Double . . . ) .
* @ return
* @ throws DevFailed */
public < T > Object readArray ( final Class < T > type ) throws DevFailed { } } | update ( ) ; return extractArray ( type ) ; |
public class DestructuringGlobalNameExtractor { /** * Adds the new assign or name declaration after the original assign or name declaration */
private static void addAfter ( Node originalLvalue , Node newLvalue , Node newRvalue ) { } } | Node parent = originalLvalue . getParent ( ) ; if ( parent . isAssign ( ) ) { // create ` ( < originalLvalue = . . . > , < newLvalue = newRvalue > ) `
Node newAssign = IR . assign ( newLvalue , newRvalue ) . srcref ( parent ) ; Node newComma = new Node ( Token . COMMA , newAssign ) ; parent . replaceWith ( newComma ) ; newComma . addChildToFront ( parent ) ; return ; } // This must have been in a var / let / const .
if ( newLvalue . isDestructuringPattern ( ) ) { newLvalue = new Node ( Token . DESTRUCTURING_LHS , newLvalue , newRvalue ) . srcref ( parent ) ; } else { newLvalue . addChildToBack ( newRvalue ) ; } Node declaration = parent . isDestructuringLhs ( ) ? originalLvalue . getGrandparent ( ) : parent ; checkState ( NodeUtil . isNameDeclaration ( declaration ) , declaration ) ; if ( NodeUtil . isStatementParent ( declaration . getParent ( ) ) ) { // ` const { } = originalRvalue ; const newLvalue = newRvalue ; `
// create an entirely new statement
Node newDeclaration = new Node ( declaration . getToken ( ) ) . srcref ( declaration ) ; newDeclaration . addChildToBack ( newLvalue ) ; declaration . getParent ( ) . addChildAfter ( newDeclaration , declaration ) ; } else { // ` const { } = originalRvalue , newLvalue = newRvalue ; `
// The Normalize pass tries to ensure name declarations are always in statement blocks , but
// currently has not implemented normalization for ` for ( let x = 0 ; . . . `
// so we can ' t add a new statement
declaration . addChildToBack ( newLvalue ) ; } |
public class WxApi2Impl { /** * 二维码API */
@ Override public WxResp qrcode_create ( Object scene_id , int expire_seconds ) { } } | NutMap params = new NutMap ( ) ; NutMap scene ; // 临时二维码
if ( expire_seconds > 0 ) { params . put ( "expire_seconds" , expire_seconds ) ; // 临时整型二维码
if ( scene_id instanceof Number ) { params . put ( "action_name" , "QR_SCENE" ) ; scene = Lang . map ( "scene_id" , Castors . me ( ) . castTo ( scene_id , Integer . class ) ) ; // 临时字符串二维码
} else { params . put ( "action_name" , "QR_STR_SCENE" ) ; scene = Lang . map ( "scene_str" , scene_id . toString ( ) ) ; } } // 永久二维码
else if ( scene_id instanceof Number ) { params . put ( "action_name" , "QR_LIMIT_SCENE" ) ; scene = Lang . map ( "scene_id" , Castors . me ( ) . castTo ( scene_id , Integer . class ) ) ; } // 永久字符串二维码
else { params . put ( "action_name" , "QR_LIMIT_STR_SCENE" ) ; scene = Lang . map ( "scene_str" , scene_id . toString ( ) ) ; } params . put ( "action_info" , Lang . map ( "scene" , scene ) ) ; return postJson ( "/qrcode/create" , params ) ; |
public class Reductions { /** * Reduces an iterator of elements using the passed function .
* @ param < E > the element type parameter
* @ param < R > the result type parameter
* @ param iterator the iterator to be consumed
* @ param function the reduction function
* @ param init the initial value for reductions
* @ return the reduced value */
public static < E , R > R reduce ( Iterator < E > iterator , BiFunction < R , E , R > function , R init ) { } } | return new Reductor < > ( function , init ) . apply ( iterator ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CurvePropertyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link CurvePropertyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "curveProperty" ) public JAXBElement < CurvePropertyType > createCurveProperty ( CurvePropertyType value ) { } } | return new JAXBElement < CurvePropertyType > ( _CurveProperty_QNAME , CurvePropertyType . class , null , value ) ; |
public class ClustersInner { /** * Update a Kusto cluster .
* @ param resourceGroupName The name of the resource group containing the Kusto cluster .
* @ param clusterName The name of the Kusto cluster .
* @ param parameters The Kusto cluster parameters supplied to the Update operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ClusterInner object */
public Observable < ClusterInner > beginUpdateAsync ( String resourceGroupName , String clusterName , ClusterUpdate parameters ) { } } | return beginUpdateWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . map ( new Func1 < ServiceResponse < ClusterInner > , ClusterInner > ( ) { @ Override public ClusterInner call ( ServiceResponse < ClusterInner > response ) { return response . body ( ) ; } } ) ; |
public class CharsetDetector { /** * Set the input text ( byte ) data whose charset is to be detected .
* The input stream that supplies the character data must have markSupported ( )
* = = true ; the charset detection process will read a small amount of data ,
* then return the stream to its original position via
* the InputStream . reset ( ) operation . The exact amount that will
* be read depends on the characteristics of the data itself .
* @ param in the input text of unknown encoding
* @ return This CharsetDetector */
public CharsetDetector setText ( InputStream in ) throws IOException { } } | fInputStream = in ; fInputStream . mark ( kBufSize ) ; fRawInput = new byte [ kBufSize ] ; // Always make a new buffer because the
// previous one may have come from the caller ,
// in which case we can ' t touch it .
fRawLength = 0 ; int remainingLength = kBufSize ; while ( remainingLength > 0 ) { // read ( ) may give data in smallish chunks , esp . for remote sources . Hence , this loop .
int bytesRead = fInputStream . read ( fRawInput , fRawLength , remainingLength ) ; if ( bytesRead <= 0 ) { break ; } fRawLength += bytesRead ; remainingLength -= bytesRead ; } fInputStream . reset ( ) ; return this ; |
public class QueryReferenceBroker { /** * retrieves an Object reference by its Identity .
* < br >
* If there is a Proxy - class is defined in the ReferenceDescriptor or
* if the ReferenceDescriptor is lazy , a Proxy - object is returned .
* < br >
* If no Proxy - class is defined , a getObjectByIdentity ( . . . ) lookup is performed . */
private Object getReferencedObject ( Identity id , ObjectReferenceDescriptor rds ) { } } | Class baseClassForProxy ; if ( rds . isLazy ( ) ) { /* arminw :
use real reference class instead of the top - level class ,
because we want to use a proxy representing the real class
not only the top - level class - right ? */
// referencedProxy = getClassDescriptor ( referencedClass ) . getDynamicProxyClass ( ) ;
// referencedProxy = rds . getItemClass ( ) ;
/* * andrew . clute :
* With proxy generation now handled by the ProxyFactory implementations , the class of the Item
* is now the nessecary parameter to generate a proxy . */
baseClassForProxy = rds . getItemClass ( ) ; } else { /* * andrew . clute :
* If the descriptor does not mark it as lazy , then the class for the proxy must be of type VirtualProxy */
baseClassForProxy = rds . getItemProxyClass ( ) ; } if ( baseClassForProxy != null ) { try { return pb . createProxy ( baseClassForProxy , id ) ; } catch ( Exception e ) { log . error ( "Error while instantiate object " + id + ", msg: " + e . getMessage ( ) , e ) ; if ( e instanceof PersistenceBrokerException ) { throw ( PersistenceBrokerException ) e ; } else { throw new PersistenceBrokerException ( e ) ; } } } else { return pb . doGetObjectByIdentity ( id ) ; } |
public class StringUtilities { /** * Get the minimum number of operations required to get from one string to another using the Damerau - Levenshtein distance algorithm
* Note : Java implementation of the C # algorithm from https : / / en . wikipedia . org / wiki / Damerau % E2%80%93Levenshtein _ distance
* @ param source The source string .
* @ param target The string to transform the source into .
* @ return The number of operations required to transform source into target . */
public static int getDamerauLevenshteinDistance ( String source , String target ) { } } | if ( source == null || source . isEmpty ( ) ) { if ( target == null || target . isEmpty ( ) ) { return 0 ; } else { return target . length ( ) ; } } else if ( target == null || target . isEmpty ( ) ) { return source . length ( ) ; } int [ ] [ ] score = new int [ source . length ( ) + 2 ] [ target . length ( ) + 2 ] ; int INF = source . length ( ) + target . length ( ) ; score [ 0 ] [ 0 ] = INF ; for ( int i = 0 ; i <= source . length ( ) ; i ++ ) { score [ i + 1 ] [ 1 ] = i ; score [ i + 1 ] [ 0 ] = INF ; } for ( int j = 0 ; j <= target . length ( ) ; j ++ ) { score [ 1 ] [ j + 1 ] = j ; score [ 0 ] [ j + 1 ] = INF ; } final SortedMap < Character , Integer > sd = new TreeMap < Character , Integer > ( ) ; for ( final char letter : ( source + target ) . toCharArray ( ) ) { if ( ! sd . containsKey ( letter ) ) sd . put ( letter , 0 ) ; } for ( int i = 1 ; i <= source . length ( ) ; i ++ ) { int DB = 0 ; for ( int j = 1 ; j <= target . length ( ) ; j ++ ) { int i1 = sd . get ( target . charAt ( j - 1 ) ) ; int j1 = DB ; if ( source . charAt ( i - 1 ) == target . charAt ( j - 1 ) ) { score [ i + 1 ] [ j + 1 ] = score [ i ] [ j ] ; DB = j ; } else { score [ i + 1 ] [ j + 1 ] = Math . min ( score [ i ] [ j ] , Math . min ( score [ i + 1 ] [ j ] , score [ i ] [ j + 1 ] ) ) + 1 ; } score [ i + 1 ] [ j + 1 ] = Math . min ( score [ i + 1 ] [ j + 1 ] , score [ i1 ] [ j1 ] + ( i - i1 - 1 ) + 1 + ( j - j1 - 1 ) ) ; } sd . put ( source . charAt ( i - 1 ) , i ) ; } return score [ source . length ( ) + 1 ] [ target . length ( ) + 1 ] ; |
public class FieldPropertyExtractor { /** * { @ inheritDoc } */
public Map < String , PropertyWrapper > extractProperties ( Class < ? > clazz ) { } } | Map < String , PropertyWrapper > map = new LinkedHashMap < > ( ) ; extractProperties0 ( clazz , map ) ; return map ; |
public class PageOverlayConditionalProcessingImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setLevel ( Integer newLevel ) { } } | Integer oldLevel = level ; level = newLevel ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . PAGE_OVERLAY_CONDITIONAL_PROCESSING__LEVEL , oldLevel , level ) ) ; |
public class GetDatabasesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetDatabasesRequest getDatabasesRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getDatabasesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDatabasesRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( getDatabasesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getDatabasesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Base64 { /** * This method encodes the byte array into a char array in base 64 according to the specification given by the RFC
* 1521 ( 5.2 ) .
* @ param data
* the encoded char array
* @ return the byte array that needs to be encoded */
public static byte [ ] encode ( byte [ ] data ) { } } | int sourceChunks = data . length / 3 ; int len = ( data . length + 2 ) / 3 * 4 ; byte [ ] result = new byte [ len ] ; int extraBytes = data . length - sourceChunks * 3 ; // Each 4 bytes of input ( encoded ) we end up with 3 bytes of output
int dataIndex = 0 ; int resultIndex = 0 ; int allBits = 0 ; for ( int i = 0 ; i < sourceChunks ; i ++ ) { allBits = 0 ; // Loop 3 times gathering input bits ( 3 * 8 = 24)
for ( int j = 0 ; j < 3 ; j ++ ) { allBits = allBits << 8 | data [ dataIndex ++ ] & 0xff ; } // Loop 4 times generating output bits ( 4 * 6 = 24)
for ( int j = resultIndex + 3 ; j >= resultIndex ; j -- ) { result [ j ] = ( byte ) digits [ allBits & 0x3f ] ; // Bottom
// bits
allBits = allBits >>> 6 ; } resultIndex += 4 ; // processed 4 result bytes
} // Now we do the extra bytes in case the original ( non - encoded ) data
// is not multiple of 4 bytes
switch ( extraBytes ) { case 1 : allBits = data [ dataIndex ++ ] ; // actual byte
allBits = allBits << 8 ; // 8 bits of zeroes
allBits = allBits << 8 ; // 8 bits of zeroes
// Loop 4 times generating output bits ( 4 * 6 = 24)
for ( int j = resultIndex + 3 ; j >= resultIndex ; j -- ) { result [ j ] = ( byte ) digits [ allBits & 0x3f ] ; // Bottom
// bits
allBits = allBits >>> 6 ; } // 2 pad tags
result [ result . length - 1 ] = equalSign ; result [ result . length - 2 ] = equalSign ; break ; case 2 : allBits = data [ dataIndex ++ ] ; // actual byte
allBits = allBits << 8 | data [ dataIndex ++ ] & 0xff ; // actual
// byte
allBits = allBits << 8 ; // 8 bits of zeroes
// Loop 4 times generating output bits ( 4 * 6 = 24)
for ( int j = resultIndex + 3 ; j >= resultIndex ; j -- ) { result [ j ] = ( byte ) digits [ allBits & 0x3f ] ; // Bottom
// bits
allBits = allBits >>> 6 ; } // 1 pad tag
result [ result . length - 1 ] = equalSign ; break ; } return result ; |
public class SvdlibcSparseBinaryFileRowIterator { /** * { @ inheritDoc } */
public SparseDoubleVector next ( ) { } } | if ( next == null ) throw new NoSuchElementException ( "No futher entries" ) ; SparseDoubleVector curCol = next ; try { advance ( ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } return curCol ; |
public class SynchroReader { /** * Extract predecessor data . */
private void processPredecessors ( ) { } } | for ( Map . Entry < Task , List < MapRow > > entry : m_predecessorMap . entrySet ( ) ) { Task task = entry . getKey ( ) ; List < MapRow > predecessors = entry . getValue ( ) ; for ( MapRow predecessor : predecessors ) { processPredecessor ( task , predecessor ) ; } } |
public class AttributeHelper { /** * Extract data from DeviceAttribute to a String
* @ param deviceAttributeRead
* the DeviceAttribute attribute to read
* @ return String , the result in String format
* @ throws DevFailed */
public static String extractToString ( final DeviceAttribute deviceAttributeRead ) throws DevFailed { } } | final Object value = AttributeHelper . extract ( deviceAttributeRead ) ; String argout = null ; if ( value instanceof Short ) { argout = ( ( Short ) value ) . toString ( ) ; } else if ( value instanceof String ) { argout = ( String ) value ; } else if ( value instanceof Integer ) { argout = ( ( Integer ) value ) . toString ( ) ; } else if ( value instanceof Long ) { argout = ( ( Long ) value ) . toString ( ) ; } else if ( value instanceof Float ) { argout = ( ( Float ) value ) . toString ( ) ; } else if ( value instanceof Boolean ) { argout = ( ( Boolean ) value ) . toString ( ) ; } else if ( value instanceof Double ) { argout = ( ( Double ) value ) . toString ( ) ; } else if ( value instanceof DevState ) { argout = StateUtilities . getNameForState ( ( DevState ) value ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "output type " + value . getClass ( ) + " not supported" , "AttributeHelper.extractToString(Object value,deviceAttributeWritten)" ) ; } return argout ; |
public class MutableRoaringBitmap { /** * Add the value if it is not already present , otherwise remove it .
* @ param x integer value */
public void flip ( final int x ) { } } | final short hb = BufferUtil . highbits ( x ) ; final int i = highLowContainer . getIndex ( hb ) ; if ( i >= 0 ) { MappeableContainer c = highLowContainer . getContainerAtIndex ( i ) ; c = c . flip ( BufferUtil . lowbits ( x ) ) ; if ( ! c . isEmpty ( ) ) { ( ( MutableRoaringArray ) highLowContainer ) . setContainerAtIndex ( i , c ) ; } else { ( ( MutableRoaringArray ) highLowContainer ) . removeAtIndex ( i ) ; } } else { final MappeableArrayContainer newac = new MappeableArrayContainer ( ) ; ( ( MutableRoaringArray ) highLowContainer ) . insertNewKeyValueAt ( - i - 1 , hb , newac . add ( BufferUtil . lowbits ( x ) ) ) ; } |
public class MetaFileHandler { /** * Remove a file from ~ / . fscrawler / { subdir } dir
* @ param subdir subdir where we can read the file ( null if we read in the root dir )
* @ param filename filename
* @ throws IOException in case of error while reading */
protected void removeFile ( String subdir , String filename ) throws IOException { } } | Path dir = root ; if ( subdir != null ) { dir = dir . resolve ( subdir ) ; } Files . deleteIfExists ( dir . resolve ( filename ) ) ; |
public class Server { /** * Creates the default HTTP server ( listening on port 9000 / ` http . port ` ) , no SSL , no mutual authentication ,
* accept all requests .
* @ param accessor the service accessor
* @ param vertx the vertx singleton
* @ return the configured server ( not bound , not started ) */
public static Server defaultHttp ( ServiceAccessor accessor , Vertx vertx ) { } } | return new Server ( accessor , vertx , "default-http" , accessor . getConfiguration ( ) . getIntegerWithDefault ( "http.port" , 9000 ) , false , false , null , Collections . < String > emptyList ( ) , Collections . < String > emptyList ( ) , null ) ; |
public class MessageInteraction { /** * Create a MessageInteractionReader to execute read .
* @ param pathServiceSid The SID of the Service to read the resource from
* @ param pathSessionSid The SID of the parent Session
* @ param pathParticipantSid The SID of the Participant resource
* @ return MessageInteractionReader capable of executing the read */
public static MessageInteractionReader reader ( final String pathServiceSid , final String pathSessionSid , final String pathParticipantSid ) { } } | return new MessageInteractionReader ( pathServiceSid , pathSessionSid , pathParticipantSid ) ; |
public class AbstractJpaTenantAwareBaseEntity { /** * PrePersist listener method for all { @ link TenantAwareBaseEntity }
* entities . */
@ PrePersist void prePersist ( ) { } } | // before persisting the entity check the current ID of the tenant by
// using the TenantAware
// service
final String currentTenant = SystemManagementHolder . getInstance ( ) . currentTenant ( ) ; if ( currentTenant == null ) { throw new TenantNotExistException ( "Tenant " + TenantAwareHolder . getInstance ( ) . getTenantAware ( ) . getCurrentTenant ( ) + " does not exists, cannot create entity " + this . getClass ( ) + " with id " + super . getId ( ) ) ; } setTenant ( currentTenant . toUpperCase ( ) ) ; |
public class OutboundCookiesHandler { /** * Sets the session cookie to the current HttpServerExchange
* @ param exchange The Undertow HttpServerExchange */
protected void setSessionCookie ( HttpServerExchange exchange ) { } } | Session session = this . attachment . getSession ( ) ; if ( session . isInvalid ( ) ) { Cookie cookie = new CookieImpl ( this . config . getSessionCookieName ( ) ) . setSecure ( this . config . isSessionCookieSecure ( ) ) . setHttpOnly ( true ) . setPath ( "/" ) . setMaxAge ( 0 ) . setSameSite ( true ) . setSameSiteMode ( SAME_SITE_MODE ) . setDiscard ( true ) ; exchange . setResponseCookie ( cookie ) ; } else if ( session . hasChanges ( ) ) { JwtClaims jwtClaims = new JwtClaims ( ) ; jwtClaims . setClaim ( ClaimKey . AUTHENTICITY . toString ( ) , session . getAuthenticity ( ) ) ; jwtClaims . setClaim ( ClaimKey . DATA . toString ( ) , session . getValues ( ) ) ; if ( session . getExpires ( ) == null ) { jwtClaims . setClaim ( ClaimKey . EXPIRES . toString ( ) , "-1" ) ; } else { jwtClaims . setClaim ( ClaimKey . EXPIRES . toString ( ) , session . getExpires ( ) . format ( DateUtils . formatter ) ) ; } JsonWebSignature jsonWebSignature = new JsonWebSignature ( ) ; jsonWebSignature . setKey ( new HmacKey ( this . config . getSessionCookieSignKey ( ) . getBytes ( StandardCharsets . UTF_8 ) ) ) ; jsonWebSignature . setPayload ( jwtClaims . toJson ( ) ) ; jsonWebSignature . setAlgorithmHeaderValue ( AlgorithmIdentifiers . HMAC_SHA512 ) ; try { String encryptedValue = Application . getInstance ( Crypto . class ) . encrypt ( jsonWebSignature . getCompactSerialization ( ) , this . config . getSessionCookieEncryptionKey ( ) ) ; Cookie cookie = new CookieImpl ( this . config . getSessionCookieName ( ) ) . setValue ( encryptedValue ) . setSameSite ( true ) . setSameSiteMode ( SAME_SITE_MODE ) . setHttpOnly ( true ) . setPath ( "/" ) . setSecure ( this . config . isSessionCookieSecure ( ) ) ; if ( session . getExpires ( ) != null ) { cookie . setExpires ( DateUtils . localDateTimeToDate ( session . getExpires ( ) ) ) ; } exchange . setResponseCookie ( cookie ) ; } catch ( Exception e ) { // NOSONAR Intentionally catching exception here
LOG . error ( "Failed to generate session cookie" , e ) ; } } else { // Ignore and send no cookie to the client
} |
public class ApiOvhDbaasqueue { /** * Get a key
* REST : GET / dbaas / queue / { serviceName } / key / { keyId }
* @ param serviceName [ required ] Application ID
* @ param keyId [ required ] Key ID
* API beta */
public OvhKey serviceName_key_keyId_GET ( String serviceName , String keyId ) throws IOException { } } | String qPath = "/dbaas/queue/{serviceName}/key/{keyId}" ; StringBuilder sb = path ( qPath , serviceName , keyId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhKey . class ) ; |
public class DescribePublicIpv4PoolsResult { /** * Information about the address pools .
* @ return Information about the address pools . */
public java . util . List < PublicIpv4Pool > getPublicIpv4Pools ( ) { } } | if ( publicIpv4Pools == null ) { publicIpv4Pools = new com . amazonaws . internal . SdkInternalList < PublicIpv4Pool > ( ) ; } return publicIpv4Pools ; |
public class StyleCache { /** * Set the feature style into the polyline options
* @ param polylineOptions polyline options
* @ param featureStyle feature style
* @ return true if style was set into the polyline options */
public boolean setFeatureStyle ( PolylineOptions polylineOptions , FeatureStyle featureStyle ) { } } | return StyleUtils . setFeatureStyle ( polylineOptions , featureStyle , density ) ; |
public class JacksonJsonHandler { /** * { @ inheritDoc } */
@ Override public Map < String , Object > readJson ( Reader reader ) throws IOException { } } | // Issue # 99 : Take a look at better dealing with root Map < > vs root List < > in the response .
// We are expecting this isn ' t called on nested nodes in a recursive manner .
JsonNode rootNode = mapper . readTree ( reader ) ; Map < String , Object > rootMap = null ; // If we try to parse an empty string or invalid content , we could get null .
if ( null == rootNode ) { throw new IllegalArgumentException ( "Empty reader or ill-formatted JSON encountered." ) ; } else if ( rootNode . isArray ( ) ) { rootMap = new LinkedHashMap < String , Object > ( ) ; rootMap . put ( KeenConstants . KEEN_FAKE_JSON_ROOT , mapper . readValue ( rootNode . traverse ( ) , COLLECTION_TYPE ) ) ; } else if ( rootNode . isObject ( ) ) { rootMap = mapper . readValue ( rootNode . traverse ( ) , MAP_TYPE ) ; } return rootMap ; |
public class CommerceSubscriptionEntryPersistenceImpl { /** * Returns the last commerce subscription entry in the ordered set where subscriptionStatus = & # 63 ; .
* @ param subscriptionStatus the subscription status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce subscription entry , or < code > null < / code > if a matching commerce subscription entry could not be found */
@ Override public CommerceSubscriptionEntry fetchBySubscriptionStatus_Last ( int subscriptionStatus , OrderByComparator < CommerceSubscriptionEntry > orderByComparator ) { } } | int count = countBySubscriptionStatus ( subscriptionStatus ) ; if ( count == 0 ) { return null ; } List < CommerceSubscriptionEntry > list = findBySubscriptionStatus ( subscriptionStatus , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class DoublePropertyAssert { /** * Verifies that the actual observable number has a value that is close to the given one by less then the given offset .
* @ param expectedValue the given value to compare the actual observables value to .
* @ param offset the given positive offset .
* @ return { @ code this } assertion object .
* @ throws java . lang . NullPointerException if the given offset is < code > null < / code > .
* @ throws java . lang . AssertionError if the actual observables value is not equal to the expected one . */
public DoublePropertyAssert hasValue ( Double expectedValue , Offset offset ) { } } | new ObservableNumberValueAssertions ( actual ) . hasValue ( expectedValue , offset ) ; return this ; |
public class JobContext { /** * Get a { @ link Map } from dataset URNs ( as being specified by { @ link ConfigurationKeys # DATASET _ URN _ KEY } to
* { @ link JobState . DatasetState } objects that represent the dataset states and store { @ link TaskState } s
* corresponding to the datasets .
* @ see JobState # createDatasetStatesByUrns ( ) .
* @ return a { @ link Map } from dataset URNs to { @ link JobState . DatasetState } s representing the dataset states */
Map < String , JobState . DatasetState > getDatasetStatesByUrns ( ) { } } | return ImmutableMap . copyOf ( this . datasetStatesByUrns . or ( Maps . < String , JobState . DatasetState > newHashMap ( ) ) ) ; |
public class GenericHibernateDao { /** * Unproxy the entity ( and eagerly fetch properties ) . */
@ SuppressWarnings ( "unchecked" ) public E unproxy ( E e ) { } } | if ( e == null ) { throw new NullPointerException ( "Entity passed for initialization is null" ) ; } Hibernate . initialize ( e ) ; if ( e instanceof HibernateProxy ) { e = ( E ) ( ( HibernateProxy ) e ) . getHibernateLazyInitializer ( ) . getImplementation ( ) ; } return e ; |
public class AbstractFilteredDataStream { /** * Apply the filters .
* @ param row The row to filter .
* @ return The filtered row . */
public final Map < String , Object > applyFilters ( final Map < String , Object > row ) { } } | Map < String , Object > filteringRow = new LinkedHashMap < > ( row ) ; assertFilterListExists ( ) ; for ( IDataFilter filter : getFilters ( ) ) { filteringRow = filter . apply ( filteringRow ) ; } return filteringRow ; |
public class PaginationDto { /** * Returns the first pagination link .
* @ return the first pagination link */
@ XmlElement ( name = "firstPageLink" ) @ JsonProperty ( value = "firstPageLink" ) @ ApiModelProperty ( value = "The first pagination link." , position = 1 ) public PageRequestLinkDto getFirstPageLink ( ) { } } | return firstPageLink ; |
public class ManifestCleanerDriver { /** * ( non - Javadoc )
* @ see
* org . duracloud . mill . util . DriverSupport # executeImpl ( org . apache . commons .
* cli . CommandLine ) */
@ Override protected void executeImpl ( CommandLine cmd ) { } } | try { List < PropertyDefinition > defintions = new PropertyDefinitionListBuilder ( ) . addMillDb ( ) . addManifestExpirationDate ( ) . build ( ) ; PropertyVerifier verifier = new PropertyVerifier ( defintions ) ; verifier . verify ( System . getProperties ( ) ) ; String time = System . getProperty ( ConfigConstants . MANIFEST_EXPIRATION_TIME ) ; Date expirationDate ; expirationDate = parseExpirationDate ( time ) ; ApplicationContext context = new AnnotationConfigApplicationContext ( "org.duracloud.mill" ) ; log . info ( "spring context initialized." ) ; ManifestStore store = context . getBean ( ManifestStore . class ) ; log . info ( "beginning purge of deleted items" ) ; long deleted = 0l ; long total = 0l ; while ( ( deleted = store . purgeDeletedItemsBefore ( expirationDate ) ) > 0 ) { total += deleted ; log . info ( "Deleted {} items that were flagged as deleted before {}" , deleted , expirationDate ) ; } log . info ( "Purge completed: Deleted a grand total of {} items that were flagged as deleted before {}" , total , expirationDate ) ; } catch ( Exception e ) { log . error ( e . getMessage ( ) , e ) ; } finally { log . info ( "exiting..." ) ; } |
public class InternalXbaseParser { /** * InternalXbase . g : 1675:1 : ruleJvmParameterizedTypeReference : ( ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 ) ) ; */
public final void ruleJvmParameterizedTypeReference ( ) throws RecognitionException { } } | int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 1679:2 : ( ( ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 ) ) )
// InternalXbase . g : 1680:2 : ( ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 ) )
{ // InternalXbase . g : 1680:2 : ( ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 ) )
// InternalXbase . g : 1681:3 : ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 )
{ if ( state . backtracking == 0 ) { before ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGroup ( ) ) ; } // InternalXbase . g : 1682:3 : ( rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0 )
// InternalXbase . g : 1682:4 : rule _ _ JvmParameterizedTypeReference _ _ Group _ _ 0
{ pushFollow ( FOLLOW_2 ) ; rule__JvmParameterizedTypeReference__Group__0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGroup ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ; |
public class ExtensionsInner { /** * Enables the Operations Management Suite ( OMS ) on the HDInsight cluster .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ param parameters The Operations Management Suite ( OMS ) workspace parameters .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginEnableMonitoring ( String resourceGroupName , String clusterName , ClusterMonitoringRequest parameters ) { } } | beginEnableMonitoringWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ConverterUtils { /** * Does cell of a given address copy from { @ link Sheet } to { @ link IDataModel } . */
static void copyCell ( ICellAddress address , Sheet from , IDataModel to ) { } } | if ( from == null ) { return ; } Row fromRow = from . getRow ( address . a1Address ( ) . row ( ) ) ; if ( fromRow == null ) { return ; } Cell fromCell = fromRow . getCell ( address . a1Address ( ) . column ( ) ) ; if ( fromCell == null ) { return ; } DmCell toCell = new DmCell ( ) ; toCell . setAddress ( address ) ; toCell . setContent ( resolveCellValue ( fromCell ) ) ; to . setCell ( address , toCell ) ; |
public class TaxinvoiceServiceImp { /** * ( non - Javadoc )
* @ see com . popbill . api . TaxinvoiceService # getMassPrintURL ( java . lang . String , com . popbill . api . taxinvoice . MgtKeyType , java . lang . String [ ] ) */
@ Override public String getMassPrintURL ( String CorpNum , MgtKeyType KeyType , String [ ] MgtKeyList ) throws PopbillException { } } | return getMassPrintURL ( CorpNum , KeyType , MgtKeyList , null ) ; |
public class RequestLoggingFilter { public void init ( FilterConfig filterConfig ) throws ServletException { } } | this . config = filterConfig ; this . errorLogging = isBooleanParameter ( filterConfig , "errorLogging" , true ) ; setupExceptExtSet ( filterConfig ) ; setupExceptUrlPattern ( filterConfig ) ; setupRequestUriTitleUrlPattern ( filterConfig ) ; setupSubRequestUrlPatternUrlPattern ( filterConfig ) ; setupRequestCharacterEncoding ( filterConfig ) ; setupMaskParamSet ( filterConfig ) ; setupMaskedString ( filterConfig ) ; |
public class Node { /** * Inserts the suffix at the given active point .
* @ param suffix The suffix to insert .
* @ param activePoint The active point to insert it at . */
@ SuppressWarnings ( "unchecked" ) void insert ( Suffix < T , S > suffix , ActivePoint < T , S > activePoint ) { } } | Object item = suffix . getEndItem ( ) ; if ( edges . containsKey ( item ) ) { if ( tree . isNotFirstInsert ( ) && activePoint . getNode ( ) != tree . getRoot ( ) ) tree . setSuffixLink ( activePoint . getNode ( ) ) ; activePoint . setEdge ( edges . get ( item ) ) ; activePoint . incrementLength ( ) ; } else { saveSequenceTerminal ( item ) ; Edge < T , S > newEdge = new Edge < > ( suffix . getEndPosition ( ) - 1 , this , sequence , tree ) ; edges . put ( ( T ) suffix . getEndItem ( ) , newEdge ) ; suffix . decrement ( ) ; activePoint . updateAfterInsert ( suffix ) ; if ( tree . isNotFirstInsert ( ) && ! this . equals ( tree . getRoot ( ) ) ) { tree . getLastNodeInserted ( ) . setSuffixLink ( this ) ; } if ( suffix . isEmpty ( ) ) { } else tree . insert ( suffix ) ; } |
public class StringHelper { /** * Get the number of trailing white spaces according to
* { @ link Character # isWhitespace ( char ) }
* @ param s
* The string to be parsed . May be < code > null < / code > .
* @ return Always & ge ; 0. */
@ Nonnegative public static int getTrailingWhitespaceCount ( @ Nullable final String s ) { } } | int ret = 0 ; if ( s != null ) { int nLast = s . length ( ) - 1 ; while ( nLast >= 0 && Character . isWhitespace ( s . charAt ( nLast ) ) ) { ++ ret ; -- nLast ; } } return ret ; |
public class DataSourceInitialContextBinder { /** * Unbinds all data sources currently bound by this instance of this class .
* If successful , it then closes the { @ link InitialContext } .
* @ throws Exception if an error occurs during unbinding or closing . */
@ Override public void close ( ) throws Exception { } } | unbindAll ( ) ; initialContext . destroySubcontext ( "java:/comp/env/jdbc" ) ; initialContext . destroySubcontext ( "java:/comp/env" ) ; initialContext . destroySubcontext ( "java:/comp" ) ; initialContext . destroySubcontext ( "java:" ) ; initialContext . close ( ) ; |
public class MiscUtils { /** * for logging */
static String dateToStrTs ( long dateMs ) { } } | SimpleDateFormat df = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ss.SSS" ) ; return df . format ( new Date ( dateMs ) ) ; |
public class SummarizedAttackVectorMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SummarizedAttackVector summarizedAttackVector , ProtocolMarshaller protocolMarshaller ) { } } | if ( summarizedAttackVector == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( summarizedAttackVector . getVectorType ( ) , VECTORTYPE_BINDING ) ; protocolMarshaller . marshall ( summarizedAttackVector . getVectorCounters ( ) , VECTORCOUNTERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ContextRuleAssistant { /** * Resolve bean class for the autowire rule .
* @ param autowireRule the autowire rule
* @ throws IllegalRuleException if an illegal rule is found */
public void resolveBeanClass ( AutowireRule autowireRule ) throws IllegalRuleException { } } | if ( autowireRule . getTargetType ( ) == AutowireTargetType . FIELD ) { if ( autowireRule . isRequired ( ) ) { Class < ? > [ ] types = autowireRule . getTypes ( ) ; String [ ] qualifiers = autowireRule . getQualifiers ( ) ; reserveBeanReference ( qualifiers [ 0 ] , types [ 0 ] , autowireRule ) ; } } else if ( autowireRule . getTargetType ( ) == AutowireTargetType . FIELD_VALUE ) { Token token = autowireRule . getToken ( ) ; resolveBeanClass ( token , autowireRule ) ; } else if ( autowireRule . getTargetType ( ) == AutowireTargetType . METHOD || autowireRule . getTargetType ( ) == AutowireTargetType . CONSTRUCTOR ) { if ( autowireRule . isRequired ( ) ) { Class < ? > [ ] types = autowireRule . getTypes ( ) ; String [ ] qualifiers = autowireRule . getQualifiers ( ) ; for ( int i = 0 ; i < types . length ; i ++ ) { reserveBeanReference ( qualifiers [ i ] , types [ i ] , autowireRule ) ; } } } |
public class ConcurrentIntrusiveList { /** * Removes the given { @ code element } from the list .
* @ param element the element to remove .
* @ throws IllegalArgumentException if the element is not in the list . */
public synchronized void removeElement ( T element ) { } } | checkArgument ( element . getNext ( ) != null || element . getPrev ( ) != null || element == head , "Element not in the list." ) ; size -- ; T prev = element . getPrev ( ) ; T next = element . getNext ( ) ; if ( prev == null ) { // This is the first element
head = next ; if ( head != null ) { // If more than one element in the list .
head . setPrev ( null ) ; element . setNext ( null ) ; } } else if ( next == null ) { // This is the last element , and there is at least another element because
// element . getPrev ( ) ! = null .
prev . setNext ( null ) ; element . setPrev ( null ) ; } else { prev . setNext ( element . getNext ( ) ) ; next . setPrev ( element . getPrev ( ) ) ; element . setNext ( null ) ; element . setPrev ( null ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcAlarmType ( ) { } } | if ( ifcAlarmTypeEClass == null ) { ifcAlarmTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 10 ) ; } return ifcAlarmTypeEClass ; |
public class ASMUtil { /** * Gibt ein uebergeordnetes Tag mit dem uebergebenen Full - Name ( Namespace und Name ) zurueck , falls
* ein solches existiert , andernfalls wird null zurueckgegeben .
* @ param el Startelement , von wo aus gesucht werden soll .
* @ param fullName Name des gesuchten Tags .
* @ return uebergeornetes Element oder null . */
public static Tag getAncestorTag ( Statement stat , String fullName ) { } } | Statement parent = stat ; Tag tag ; while ( true ) { parent = parent . getParent ( ) ; if ( parent == null ) return null ; if ( parent instanceof Tag ) { tag = ( Tag ) parent ; if ( tag . getFullname ( ) . equalsIgnoreCase ( fullName ) ) return tag ; } } |
public class FileSystem { /** * Replies the system shared library directory for the specified software .
* < p > On Unix operating systems , the system directory for a
* software is by default { @ code / usr / lib / software } where { @ code software }
* is the given parameter ( case - sensitive ) . On Windows & reg ; operating systems , the user
* directory for a software is by default
* { @ code C : < span > \ < / span > Program Files < span > \ < / span > software }
* where { @ code software } is the given parameter ( case - insensitive ) .
* @ param software is the name of the concerned software .
* @ return the configuration directory of the software for the current user . */
@ Pure public static String getSystemSharedLibraryDirectoryNameFor ( String software ) { } } | final File f = getSystemSharedLibraryDirectoryFor ( software ) ; if ( f == null ) { return null ; } return f . getAbsolutePath ( ) ; |
public class CRDExample { /** * Example of Cluster and Namespaced scoped K8S Custom Resources .
* To test Cluster scoped resource use " - - cluster " as first argument .
* To test Namespaced resource provide namespace as first argument ( namespace must exists in K8S ) .
* @ param args Either " - - cluster " or namespace name . */
public static void main ( String [ ] args ) { } } | boolean resourceNamespaced = true ; String namespace = null ; if ( args . length > 0 ) { if ( "--cluster" . equals ( args [ 0 ] ) ) { resourceNamespaced = false ; } else { namespace = args [ 0 ] ; } } try ( final KubernetesClient client = new DefaultKubernetesClient ( ) ) { if ( resourceNamespaced ) { if ( namespace == null ) { namespace = client . getNamespace ( ) ; } if ( namespace == null ) { System . err . println ( "No namespace specified and no default defined!" ) ; return ; } System . out . println ( "Using namespace: " + namespace ) ; } else { System . out . println ( "Creating cluster scoped resource" ) ; } if ( logRootPaths ) { RootPaths rootPaths = client . rootPaths ( ) ; if ( rootPaths != null ) { List < String > paths = rootPaths . getPaths ( ) ; if ( paths != null ) { System . out . println ( "Supported API Paths:" ) ; for ( String path : paths ) { System . out . println ( " " + path ) ; } System . out . println ( ) ; } } } CustomResourceDefinitionList crds = client . customResourceDefinitions ( ) . list ( ) ; List < CustomResourceDefinition > crdsItems = crds . getItems ( ) ; System . out . println ( "Found " + crdsItems . size ( ) + " CRD(s)" ) ; CustomResourceDefinition dummyCRD = null ; for ( CustomResourceDefinition crd : crdsItems ) { ObjectMeta metadata = crd . getMetadata ( ) ; if ( metadata != null ) { String name = metadata . getName ( ) ; System . out . println ( " " + name + " => " + metadata . getSelfLink ( ) ) ; if ( DUMMY_CRD_NAME . equals ( name ) ) { dummyCRD = crd ; } } } if ( dummyCRD != null ) { System . out . println ( "Found CRD: " + dummyCRD . getMetadata ( ) . getSelfLink ( ) ) ; } else { dummyCRD = new CustomResourceDefinitionBuilder ( ) . withApiVersion ( "apiextensions.k8s.io/v1beta1" ) . withNewMetadata ( ) . withName ( DUMMY_CRD_NAME ) . endMetadata ( ) . withNewSpec ( ) . withGroup ( DUMMY_CRD_GROUP ) . withVersion ( "v1" ) . withScope ( resourceScope ( resourceNamespaced ) ) . withNewNames ( ) . withKind ( "Dummy" ) . withShortNames ( "dummy" ) . withPlural ( "dummies" ) . endNames ( ) . endSpec ( ) . build ( ) ; client . customResourceDefinitions ( ) . create ( dummyCRD ) ; System . out . println ( "Created CRD " + dummyCRD . getMetadata ( ) . getName ( ) ) ; } KubernetesDeserializer . registerCustomKind ( DUMMY_CRD_GROUP + "/v1" , "Dummy" , Dummy . class ) ; // lets create a client for the CRD
NonNamespaceOperation < Dummy , DummyList , DoneableDummy , Resource < Dummy , DoneableDummy > > dummyClient = client . customResources ( dummyCRD , Dummy . class , DummyList . class , DoneableDummy . class ) ; if ( resourceNamespaced ) { dummyClient = ( ( MixedOperation < Dummy , DummyList , DoneableDummy , Resource < Dummy , DoneableDummy > > ) dummyClient ) . inNamespace ( namespace ) ; } CustomResourceList < Dummy > dummyList = dummyClient . list ( ) ; List < Dummy > items = dummyList . getItems ( ) ; System . out . println ( " found " + items . size ( ) + " dummies" ) ; for ( Dummy item : items ) { System . out . println ( " " + item ) ; } Dummy dummy = new Dummy ( ) ; ObjectMeta metadata = new ObjectMeta ( ) ; metadata . setName ( "foo" ) ; dummy . setMetadata ( metadata ) ; DummySpec dummySpec = new DummySpec ( ) ; Date now = new Date ( ) ; dummySpec . setBar ( "beer: " + now ) ; dummySpec . setFoo ( "cheese: " + now ) ; dummy . setSpec ( dummySpec ) ; Dummy created = dummyClient . createOrReplace ( dummy ) ; System . out . println ( "Upserted " + dummy ) ; created . getSpec ( ) . setBar ( "otherBar" ) ; dummyClient . createOrReplace ( created ) ; System . out . println ( "Watching for changes to Dummies" ) ; dummyClient . withResourceVersion ( created . getMetadata ( ) . getResourceVersion ( ) ) . watch ( new Watcher < Dummy > ( ) { @ Override public void eventReceived ( Action action , Dummy resource ) { System . out . println ( "==> " + action + " for " + resource ) ; if ( resource . getSpec ( ) == null ) { logger . error ( "No Spec for resource " + resource ) ; } } @ Override public void onClose ( KubernetesClientException cause ) { } } ) ; System . in . read ( ) ; } catch ( KubernetesClientException e ) { logger . error ( e . getMessage ( ) , e ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } |
public class StatsOptions { /** * Adds a field to the statistics to be requested .
* @ param field
* @ return */
public FieldStatsOptions addField ( Field field ) { } } | Assert . notNull ( field , "Field for statistics must not be 'null'." ) ; state . fields . add ( field ) ; return new FieldStatsOptions ( field , state ) ; |
public class Status { /** * Extract an error trailers from the causal chain of a { @ link Throwable } .
* @ return the trailers or { @ code null } if not found . */
@ ExperimentalApi ( "https://github.com/grpc/grpc-java/issues/4683" ) public static Metadata trailersFromThrowable ( Throwable t ) { } } | Throwable cause = checkNotNull ( t , "t" ) ; while ( cause != null ) { if ( cause instanceof StatusException ) { return ( ( StatusException ) cause ) . getTrailers ( ) ; } else if ( cause instanceof StatusRuntimeException ) { return ( ( StatusRuntimeException ) cause ) . getTrailers ( ) ; } cause = cause . getCause ( ) ; } return null ; |
public class PeerEurekaNode { /** * Synchronize { @ link InstanceInfo } information if the timestamp between
* this node and the peer eureka nodes vary . */
private void syncInstancesIfTimestampDiffers ( String appName , String id , InstanceInfo info , InstanceInfo infoFromPeer ) { } } | try { if ( infoFromPeer != null ) { logger . warn ( "Peer wants us to take the instance information from it, since the timestamp differs," + "Id : {} My Timestamp : {}, Peer's timestamp: {}" , id , info . getLastDirtyTimestamp ( ) , infoFromPeer . getLastDirtyTimestamp ( ) ) ; if ( infoFromPeer . getOverriddenStatus ( ) != null && ! InstanceStatus . UNKNOWN . equals ( infoFromPeer . getOverriddenStatus ( ) ) ) { logger . warn ( "Overridden Status info -id {}, mine {}, peer's {}" , id , info . getOverriddenStatus ( ) , infoFromPeer . getOverriddenStatus ( ) ) ; registry . storeOverriddenStatusIfRequired ( appName , id , infoFromPeer . getOverriddenStatus ( ) ) ; } registry . register ( infoFromPeer , true ) ; } } catch ( Throwable e ) { logger . warn ( "Exception when trying to set information from peer :" , e ) ; } |
public class TaskAPI { /** * Creates a new task with a reference to the given object .
* @ param task
* The data of the task to be created
* @ param reference
* The reference to the object the task should be attached to
* @ param silent
* Disable notifications
* @ param hook
* Execute hooks for the change
* @ return The id of the newly created task */
public int createTaskWithReference ( TaskCreate task , Reference reference , boolean silent , boolean hook ) { } } | return getResourceFactory ( ) . getApiResource ( "/task/" + reference . getType ( ) . name ( ) . toLowerCase ( ) + "/" + reference . getId ( ) + "/" ) . queryParam ( "silent" , silent ? "1" : "0" ) . queryParam ( "hook" , hook ? "1" : "0" ) . entity ( task , MediaType . APPLICATION_JSON_TYPE ) . post ( TaskCreateResponse . class ) . getId ( ) ; |
public class AWSOpsWorksClient { /** * Updates a specified layer .
* < b > Required Permissions < / b > : To use this action , an IAM user must have a Manage permissions level for the stack ,
* or an attached policy that explicitly grants permissions . For more information on user permissions , see < a
* href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User
* Permissions < / a > .
* @ param updateLayerRequest
* @ return Result of the UpdateLayer operation returned by the service .
* @ throws ValidationException
* Indicates that a request was not valid .
* @ throws ResourceNotFoundException
* Indicates that a resource was not found .
* @ sample AWSOpsWorks . UpdateLayer
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / UpdateLayer " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public UpdateLayerResult updateLayer ( UpdateLayerRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateLayer ( request ) ; |
public class CommerceOrderNotePersistenceImpl { /** * Returns a range of all the commerce order notes .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceOrderNoteModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce order notes
* @ param end the upper bound of the range of commerce order notes ( not inclusive )
* @ return the range of commerce order notes */
@ Override public List < CommerceOrderNote > findAll ( int start , int end ) { } } | return findAll ( start , end , null ) ; |
public class CommonDataProvider { /** * Writes a success result
* @ param line
* The line number */
public void writeSuccessResult ( int line ) { } } | logger . debug ( "Write Success result => line:{}" , line ) ; writeValue ( resultColumnName , line , Messages . getMessage ( Messages . SUCCESS_MESSAGE ) ) ; |
public class JCommander { /** * Invoke Console . readPassword through reflection to avoid depending
* on Java 6. */
private char [ ] readPassword ( String description , boolean echoInput ) { } } | getConsole ( ) . print ( description + ": " ) ; return getConsole ( ) . readPassword ( echoInput ) ; |
public class StatisticsJDBCStorageConnection { /** * { @ inheritDoc } */
public List < ACLHolder > getACLHolders ( ) throws RepositoryException , IllegalStateException , UnsupportedOperationException { } } | Statistics s = ALL_STATISTICS . get ( GET_ACL_HOLDERS ) ; try { s . begin ( ) ; return wcs . getACLHolders ( ) ; } finally { s . end ( ) ; } |
public class AmazonCloudDirectoryClient { /** * Publishes a development schema with a major version and a recommended minor version .
* @ param publishSchemaRequest
* @ return Result of the PublishSchema operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws ResourceNotFoundException
* The specified resource could not be found .
* @ throws SchemaAlreadyPublishedException
* Indicates that a schema is already published .
* @ sample AmazonCloudDirectory . PublishSchema
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / PublishSchema " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public PublishSchemaResult publishSchema ( PublishSchemaRequest request ) { } } | request = beforeClientExecution ( request ) ; return executePublishSchema ( request ) ; |
public class ReadCommEventLogResponse { /** * getMessage - - format the message into a byte array .
* @ return Response as byte array */
public byte [ ] getMessage ( ) { } } | byte result [ ] = new byte [ events . length + 7 ] ; result [ 0 ] = ( byte ) ( byteCount = events . length + 6 ) ; result [ 1 ] = ( byte ) ( status >> 8 ) ; result [ 2 ] = ( byte ) ( status & 0xFF ) ; result [ 3 ] = ( byte ) ( eventCount >> 8 ) ; result [ 4 ] = ( byte ) ( eventCount & 0xFF ) ; result [ 5 ] = ( byte ) ( messageCount >> 8 ) ; result [ 6 ] = ( byte ) ( messageCount & 0xFF ) ; System . arraycopy ( events , 0 , result , 7 , events . length ) ; return result ; |
public class EglCore { /** * Creates an EGL surface associated with a Surface .
* If this is destined for MediaCodec , the EGLConfig should have the " recordable " attribute . */
public EGLSurface createWindowSurface ( Object surface ) { } } | if ( ! ( surface instanceof Surface ) && ! ( surface instanceof SurfaceTexture ) ) { throw new RuntimeException ( "invalid surface: " + surface ) ; } // Create a window surface , and attach it to the Surface we received .
int [ ] surfaceAttribs = { EGL14 . EGL_NONE } ; EGLSurface eglSurface = EGL14 . eglCreateWindowSurface ( mEGLDisplay , mEGLConfig , surface , surfaceAttribs , 0 ) ; checkEglError ( "eglCreateWindowSurface" ) ; if ( eglSurface == null ) { throw new RuntimeException ( "surface was null" ) ; } return eglSurface ; |
public class FilteringBeanMap { /** * < p > from . < / p >
* @ param src a { @ link java . lang . Object } object .
* @ param pathProperties a { @ link ameba . message . internal . BeanPathProperties } object .
* @ return a { @ link java . lang . Object } object . */
public static Object from ( Object src , final BeanPathProperties pathProperties ) { } } | return new Transformer ( ) { @ Override protected FilteringBeanMap onTransform ( Object obj ) { return new FilteringBeanMap < > ( obj , pathProperties ) ; } } . transform ( src ) ; |
public class StreamingWordsi { /** * { @ inheritDoc } */
public void processSpace ( Properties props ) { } } | final double mergeThreshold = .15 ; WorkQueue workQueue = WorkQueue . getWorkQueue ( ) ; Object key = workQueue . registerTaskGroup ( clusterMap . size ( ) ) ; // Iterate through all of the clusters and perform an agglomerative
// cluster over the learned word senses . If there is a reporter , the
// cluster assignments are reported .
for ( Map . Entry < String , OnlineClustering < SparseDoubleVector > > entry : clusterMap . entrySet ( ) ) { final String primaryKey = entry . getKey ( ) ; final OnlineClustering < SparseDoubleVector > contexts = entry . getValue ( ) ; workQueue . add ( key , new Runnable ( ) { public void run ( ) { clusterAndAssignSenses ( contexts , primaryKey , mergeThreshold ) ; } } ) ; } workQueue . await ( key ) ; // Null out the cluster map so that the garbage collector can reclaim it
// and any data associated with the Clusters .
clusterMap = null ; if ( reporter != null ) reporter . finalizeReport ( ) ; |
public class Table { /** * Transform the table in a CSV ( with selected delimiter ) representation .
* @ return A String representation of the table , with the column names in the head . */
public String getRowsWithColumnsAndColumnsNamesAsString ( ) { } } | StringBuffer sb = new StringBuffer ( ) ; String lvColumnNames = getColumnNames ( ) ; if ( lvColumnNames . trim ( ) . length ( ) > 0 ) { sb . append ( lvColumnNames ) ; sb . append ( CsvParser . CRLF ) ; } sb . append ( getRowsWithColumnsAsString ( ) ) ; return sb . toString ( ) ; |
public class JSONUtils { /** * Finds out if n represents a BigInteger
* @ return true if n is instanceOf BigInteger or the literal value can be
* evaluated as a BigInteger */
private static boolean isBigInteger ( Number n ) { } } | if ( n instanceof BigInteger ) { return true ; } try { new BigInteger ( String . valueOf ( n ) ) ; return true ; } catch ( NumberFormatException e ) { return false ; } |
public class MarcFieldTransformer { /** * Tail ( appending ) mode .
* @ param marcField MARC field
* @ param key key for the MARC field
* @ param appendToThisField the MARC field to append to
* @ return transformed MARC field */
public MarcField tail ( MarcField marcField , String key , MarcField appendToThisField ) { } } | if ( key == null ) { return marcField ; } MarcField newMarcField = appendToThisField ; if ( newMarcField == null ) { newMarcField = get ( key ) ; } if ( lastReceived != null ) { String lastKey = getTransformKey ( lastReceived ) ; if ( key . equals ( lastKey ) ) { repeatCounter ++ ; } else { repeatCounter = 0 ; } } lastReceived = marcField ; MarcField . Builder builder = MarcField . builder ( ) ; if ( appendToThisField != null ) { builder . marcField ( appendToThisField ) ; } else { builder . tag ( newMarcField . getTag ( ) ) . value ( marcField . getValue ( ) ) ; if ( ignoreIndicator ) { builder . indicator ( marcField . getIndicator ( ) ) ; } else { builder . indicator ( interpolate ( marcField , newMarcField . getIndicator ( ) ) ) ; } } if ( ignoreSubfieldIds ) { // just copy subfields as they are
for ( MarcField . Subfield subfield : marcField . getSubfields ( ) ) { builder . subfield ( subfield . getId ( ) , subfield . getValue ( ) ) ; } } else { // get the correct MARC field to map subfield IDs
MarcField marcField1 = get ( key ) ; Iterator < MarcField . Subfield > subfields = marcField . getSubfields ( ) . iterator ( ) ; Iterator < MarcField . Subfield > newSubfields = marcField1 . getSubfields ( ) . iterator ( ) ; while ( subfields . hasNext ( ) && newSubfields . hasNext ( ) ) { MarcField . Subfield subfield = subfields . next ( ) ; MarcField . Subfield newSubfield = newSubfields . next ( ) ; builder . subfield ( newSubfield . getId ( ) , subfield . getValue ( ) ) ; } } lastBuilt = builder . build ( ) ; return lastBuilt ; |
public class XTreeHeader { /** * Writes this header to the specified file . Writes to file the integer values
* < code > version < / code > , < code > pageSize < / code > , { @ link # dirCapacity } ,
* { @ link # leafCapacity } , { @ link # dirMinimum } , { @ link # leafMinimum } ,
* { @ link # min _ fanout } , { @ link # dimensionality } , the < code > float < / code >
* { @ link # max _ overlap } and the
* < code > long < / code > { @ link # supernode _ offset } . */
@ Override public void writeHeader ( RandomAccessFile file ) throws IOException { } } | super . writeHeader ( file ) ; file . writeInt ( min_fanout ) ; file . writeLong ( num_elements ) ; file . writeInt ( dimensionality ) ; file . writeFloat ( max_overlap ) ; file . writeLong ( supernode_offset ) ; |
public class CmsPublishQueue { /** * Returns the next publish job to be published , removing it
* from the queue , or < code > null < / code > if the queue is empty . < p >
* @ return the next publish job to be published */
protected CmsPublishJobInfoBean next ( ) { } } | CmsPublishJobInfoBean publishJob = OpenCms . getMemoryMonitor ( ) . getFirstCachedPublishJob ( ) ; if ( publishJob != null ) { OpenCms . getMemoryMonitor ( ) . uncachePublishJob ( publishJob ) ; } return publishJob ; |
public class ExtendedMockito { /** * Common implementation of verification of static method calls .
* @ param method The static method call to be verified
* @ param mode The verification mode
* @ param instanceInOrder If set , the { @ link StaticInOrder } object */
@ SuppressWarnings ( { } } | "CheckReturnValue" , "MockitoUsage" , "unchecked" } ) static void verifyInt ( MockedVoidMethod method , VerificationMode mode , InOrder instanceInOrder ) { if ( onMethodCallDuringVerification . get ( ) != null ) { throw new IllegalStateException ( "Verification is already in progress on this " + "thread." ) ; } ArrayList < Method > verifications = new ArrayList < > ( ) ; /* Set up callback that is triggered when the next static method is called on this thread .
* This is necessary as we don ' t know which class the method will be called on . Once the
* call is intercepted this will
* 1 . Remove all matchers ( e . g . eq ( ) , any ( ) ) from the matcher stack
* 2 . Call verify on the marker for the class
* 3 . Add the markers back to the stack */
onMethodCallDuringVerification . set ( ( clazz , verifiedMethod ) -> { // TODO : O holy reflection ! Let ' s hope we can integrate this better .
try { ArgumentMatcherStorageImpl argMatcherStorage = ( ArgumentMatcherStorageImpl ) mockingProgress ( ) . getArgumentMatcherStorage ( ) ; List < LocalizedMatcher > matchers ; // Matcher are called before verify , hence remove the from the storage
Method resetStackMethod = argMatcherStorage . getClass ( ) . getDeclaredMethod ( "resetStack" ) ; resetStackMethod . setAccessible ( true ) ; matchers = ( List < LocalizedMatcher > ) resetStackMethod . invoke ( argMatcherStorage ) ; if ( instanceInOrder == null ) { verify ( staticMockMarker ( clazz ) , mode ) ; } else { instanceInOrder . verify ( staticMockMarker ( clazz ) , mode ) ; } // Add the matchers back after verify is called
Field matcherStackField = argMatcherStorage . getClass ( ) . getDeclaredField ( "matcherStack" ) ; matcherStackField . setAccessible ( true ) ; Method pushMethod = matcherStackField . getType ( ) . getDeclaredMethod ( "push" , Object . class ) ; for ( LocalizedMatcher matcher : matchers ) { pushMethod . invoke ( matcherStackField . get ( argMatcherStorage ) , matcher ) ; } } catch ( NoSuchFieldException | NoSuchMethodException | IllegalAccessException | InvocationTargetException | ClassCastException e ) { throw new Error ( "Reflection failed. Do you use a compatible version of " + "mockito?" , e ) ; } verifications . add ( verifiedMethod ) ; } ) ; try { try { // Trigger the method call . This call will be intercepted and trigger the
// onMethodCallDuringVerification callback .
method . run ( ) ; } catch ( Throwable t ) { if ( t instanceof RuntimeException ) { throw ( RuntimeException ) t ; } else if ( t instanceof Error ) { throw ( Error ) t ; } throw new RuntimeException ( t ) ; } if ( verifications . isEmpty ( ) ) { // Make sure something was intercepted
throw new IllegalArgumentException ( "Nothing was verified. Does the lambda call " + "a static method on a 'static' mock/spy ?" ) ; } else if ( verifications . size ( ) > 1 ) { // A lambda might call several methods . In this case it is not clear what should
// be verified . Hence throw an error .
throw new IllegalArgumentException ( "Multiple intercepted calls on methods " + verifications ) ; } } finally { onMethodCallDuringVerification . remove ( ) ; } |
public class StreamSet { /** * Set an element in the stream array
* @ param priority
* @ param reliability
* @ param stream
* @ throws SIResourceException */
protected void setStream ( int priority , Reliability reliability , Stream stream ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setStream" , new Object [ ] { new Integer ( priority ) , reliability , stream } ) ; ReliabilitySubset subset = getSubset ( reliability ) ; subset . setStream ( priority , stream ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setStream" ) ; |
public class GetIdentityPoolConfigurationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetIdentityPoolConfigurationRequest getIdentityPoolConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getIdentityPoolConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getIdentityPoolConfigurationRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PrimitiveDataChecksum { /** * Note : leaves the checksum untouched if given value is null ( provide a special value for stronger hashing ) . */
public void updateUtf8 ( String [ ] strings ) { } } | if ( strings != null ) { for ( String string : strings ) { updateUtf8 ( string ) ; } } |
public class HelixUtils { /** * We have switched from Helix JobQueue to WorkFlow based job execution . */
@ Deprecated public static void submitJobToQueue ( JobConfig . Builder jobConfigBuilder , String queueName , String jobName , TaskDriver helixTaskDriver , HelixManager helixManager , long jobQueueDeleteTimeoutSeconds ) throws Exception { } } | submitJobToWorkFlow ( jobConfigBuilder , queueName , jobName , helixTaskDriver , helixManager , jobQueueDeleteTimeoutSeconds ) ; |
public class Sorter { /** * Sorts the tests in < code > target < / code > using < code > comparator < / code > .
* @ since 4.0 */
@ Override public void apply ( Object target ) { } } | /* * Note that all runners that are Orderable are also Sortable ( because
* Orderable extends Sortable ) . Sorting is more efficient than ordering ,
* so we override the parent behavior so we sort instead . */
if ( target instanceof Sortable ) { Sortable sortable = ( Sortable ) target ; sortable . sort ( this ) ; } |
public class DictionaryFactory { /** * create a new ADictionary instance
* @ param _ class
* @ return ADictionary */
public static ADictionary createDictionary ( Class < ? extends ADictionary > _class , Class < ? > [ ] paramType , Object [ ] args ) { } } | try { Constructor < ? > cons = _class . getConstructor ( paramType ) ; return ( ( ADictionary ) cons . newInstance ( args ) ) ; } catch ( Exception e ) { System . err . println ( "can't create the ADictionary instance " + "with classpath [" + _class . getName ( ) + "]" ) ; e . printStackTrace ( ) ; } return null ; |
public class ReadWriteTypeExtractor { /** * Count the number of matching fields . That is , fields with the same name .
* @ param lhsFields
* @ param rhsFields
* @ return */
protected static int countMatchingFields ( Tuple < ? extends SemanticType . Field > lhsFields , Tuple < ? extends SemanticType . Field > rhsFields ) { } } | int count = 0 ; for ( int i = 0 ; i != lhsFields . size ( ) ; ++ i ) { for ( int j = 0 ; j != rhsFields . size ( ) ; ++ j ) { SemanticType . Field lhsField = lhsFields . get ( i ) ; SemanticType . Field rhsField = rhsFields . get ( j ) ; Identifier lhsFieldName = lhsField . getName ( ) ; Identifier rhsFieldName = rhsField . getName ( ) ; if ( lhsFieldName . equals ( rhsFieldName ) ) { count = count + 1 ; } } } return count ; |
public class VaultsInner { /** * Permanently deletes the specified vault . aka Purges the deleted Azure key vault .
* @ param vaultName The name of the soft - deleted vault .
* @ param location The location of the soft - deleted vault .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < Void > purgeDeletedAsync ( String vaultName , String location ) { } } | return purgeDeletedWithServiceResponseAsync ( vaultName , location ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class ProxyInvocation { /** * < p > Creates a new instance of { @ link ProxyInvocation } which models a command for request invocation
* on the proxy of an endpoint definition . < / p >
* @ param template
* the instance of { @ link ProxyInvocation . Template } which defines the stages in invocation
* < br > < br >
* @ param proxy
* the instance of the proxy for the endpoint definition on which the request was invoked
* < br > < br >
* @ param method
* the { @ link Method } on the endpoint definition interface representing the invoked request
* < br > < br >
* @ param args
* the runtime method arguments which were passed to the request invoked on the proxy
* < br > < br >
* @ return an instance of { @ link ProxyInvocation } which can be used to perform the request execution
* < br > < br >
* @ since 1.3.0 */
public static ProxyInvocation newInstance ( ProxyInvocation . Template template , Object proxy , Method method , Object [ ] args ) { } } | return new ProxyInvocation ( InvocationContext . newBuilder ( ) . setEndpoint ( template . endpoint ) . setProxy ( proxy ) . setRequest ( method ) . setArguments ( args ) . build ( ) , template ) ; |
public class CurrentAndFlatListHolder { /** * Returns in a { @ link ListHolder } all the T elements that are present in the
* current node ' s ancestors , up to the root node . */
public SimpleListHolder < T > getFlatAbove ( ) { } } | if ( flatAbove == null ) { flatAbove = newSimpleListHolder ( above , getSortProperty ( ) ) ; } return flatAbove ; |
public class DoubleChromosome { /** * Create a new { @ code DoubleChromosome } with the given genes .
* @ since 4.3
* @ param genes the genes of the chromosome .
* @ return a new chromosome with the given genes .
* @ throws NullPointerException if the given { @ code genes } are { @ code null }
* @ throws IllegalArgumentException if the of the genes iterable is empty or
* the given { @ code genes } doesn ' t have the same range . */
public static DoubleChromosome of ( final Iterable < DoubleGene > genes ) { } } | final ISeq < DoubleGene > values = ISeq . of ( genes ) ; checkGeneRange ( values . stream ( ) . map ( DoubleGene :: range ) ) ; return new DoubleChromosome ( values , IntRange . of ( values . length ( ) ) ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcObjective ( ) { } } | if ( ifcObjectiveEClass == null ) { ifcObjectiveEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 392 ) ; } return ifcObjectiveEClass ; |
public class SimpleArangoRepository { /** * Finds one document which matches the given example object
* @ param example
* example object to construct query with
* @ param < S >
* @ return An object representing the example if it exists , else null */
@ Override public < S extends T > Optional < S > findOne ( final Example < S > example ) { } } | final ArangoCursor cursor = findAllInternal ( ( Pageable ) null , example , new HashMap ( ) ) ; return cursor . hasNext ( ) ? Optional . ofNullable ( ( S ) cursor . next ( ) ) : Optional . empty ( ) ; |
public class WebSockets { /** * Sends a complete text message , invoking the callback when complete
* @ param message The text to send
* @ param wsChannel The web socket channel */
public static void sendTextBlocking ( final String message , final WebSocketChannel wsChannel ) throws IOException { } } | final ByteBuffer data = ByteBuffer . wrap ( message . getBytes ( StandardCharsets . UTF_8 ) ) ; sendBlockingInternal ( data , WebSocketFrameType . TEXT , wsChannel ) ; |
public class GroupsApi { /** * Gets a list of users in a group .
* Retrieves a list of users in a group .
* @ param accountId The external account number ( int ) or account ID Guid . ( required )
* @ param groupId The ID of the group being accessed . ( required )
* @ param options for modifying the method behavior .
* @ return UsersResponse
* @ throws ApiException if fails to make API call */
public UsersResponse listGroupUsers ( String accountId , String groupId , GroupsApi . ListGroupUsersOptions options ) throws ApiException { } } | Object localVarPostBody = "{}" ; // verify the required parameter ' accountId ' is set
if ( accountId == null ) { throw new ApiException ( 400 , "Missing the required parameter 'accountId' when calling listGroupUsers" ) ; } // verify the required parameter ' groupId ' is set
if ( groupId == null ) { throw new ApiException ( 400 , "Missing the required parameter 'groupId' when calling listGroupUsers" ) ; } // create path and map variables
String localVarPath = "/v2/accounts/{accountId}/groups/{groupId}/users" . replaceAll ( "\\{format\\}" , "json" ) . replaceAll ( "\\{" + "accountId" + "\\}" , apiClient . escapeString ( accountId . toString ( ) ) ) . replaceAll ( "\\{" + "groupId" + "\\}" , apiClient . escapeString ( groupId . toString ( ) ) ) ; // query params
java . util . List < Pair > localVarQueryParams = new java . util . ArrayList < Pair > ( ) ; java . util . Map < String , String > localVarHeaderParams = new java . util . HashMap < String , String > ( ) ; java . util . Map < String , Object > localVarFormParams = new java . util . HashMap < String , Object > ( ) ; if ( options != null ) { localVarQueryParams . addAll ( apiClient . parameterToPairs ( "" , "count" , options . count ) ) ; localVarQueryParams . addAll ( apiClient . parameterToPairs ( "" , "start_position" , options . startPosition ) ) ; } final String [ ] localVarAccepts = { "application/json" } ; final String localVarAccept = apiClient . selectHeaderAccept ( localVarAccepts ) ; final String [ ] localVarContentTypes = { } ; final String localVarContentType = apiClient . selectHeaderContentType ( localVarContentTypes ) ; String [ ] localVarAuthNames = new String [ ] { "docusignAccessCode" } ; GenericType < UsersResponse > localVarReturnType = new GenericType < UsersResponse > ( ) { } ; return apiClient . invokeAPI ( localVarPath , "GET" , localVarQueryParams , localVarPostBody , localVarHeaderParams , localVarFormParams , localVarAccept , localVarContentType , localVarAuthNames , localVarReturnType ) ; |
public class AsynchronousRequest { /** * For more info on Skills API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / skills " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see Skill skill info */
public void getAllSkillID ( Callback < List < Integer > > callback ) throws NullPointerException { } } | gw2API . getAllSkillIDs ( ) . enqueue ( callback ) ; |
public class FilterBasedTriggeringPolicy { /** * { @ inheritDoc } */
public boolean parseUnrecognizedElement ( final Element element , final Properties props ) throws Exception { } } | final String nodeName = element . getNodeName ( ) ; if ( "filter" . equals ( nodeName ) ) { OptionHandler filter = org . apache . log4j . extras . DOMConfigurator . parseElement ( element , props , Filter . class ) ; if ( filter instanceof Filter ) { filter . activateOptions ( ) ; this . addFilter ( ( Filter ) filter ) ; } return true ; } return false ; |
public class TimerSupport { /** * return tick observable for each user , user should handle the observable with cell ' s lifecycle
* @ param interval timer interval , in TimeUnit . SECOND
* @ param intermediate true , get event immediately
* @ return
* @ since 3.0.0 */
public Observable < Long > getTickObservable ( int interval , boolean intermediate ) { } } | return Observable . interval ( intermediate ? 0 : interval , interval , TimeUnit . SECONDS ) ; |
public class RandomNormal { /** * Compute the next randomn value using the polar algorithm .
* Requires two uniformly - distributed random values in [ - 1 , + 1 ) .
* Actually computes two random values and saves the second one
* for the next invokation . */
public float nextPolar ( ) { } } | // If there ' s a saved value , return it .
if ( haveNextPolar ) { haveNextPolar = false ; return nextPolar ; } float u1 , u2 , r ; // point coordinates and their radius
do { // u1 and u2 will be uniformly - distributed
// random values in [ - 1 , + 1 ) .
u1 = 2 * gen . nextFloat ( ) - 1 ; u2 = 2 * gen . nextFloat ( ) - 1 ; // Want radius r inside the unit circle .
r = u1 * u1 + u2 * u2 ; } while ( r >= 1 ) ; // Factor incorporates the standard deviation .
float factor = ( float ) ( stddev * Math . sqrt ( - 2 * Math . log ( r ) / r ) ) ; // v1 and v2 are normally - distributed random values .
float v1 = factor * u1 + mean ; float v2 = factor * u2 + mean ; // Save v1 for next time .
nextPolar = v1 ; haveNextPolar = true ; return v2 ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.