signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MwsAQCall { /** * Perform a synchronous call with no retry or error handling . * @ return */ @ Override public MwsResponse execute ( ) { } }
HttpPost request = createRequest ( ) ; try { HttpResponse hr = executeRequest ( request ) ; StatusLine statusLine = hr . getStatusLine ( ) ; int status = statusLine . getStatusCode ( ) ; String message = statusLine . getReasonPhrase ( ) ; rhmd = getResponseHeaderMetadata ( hr ) ; String body = getResponseBody ( hr ) ; MwsResponse response = new MwsResponse ( status , message , rhmd , body ) ; return response ; } catch ( Exception e ) { throw MwsUtl . wrap ( e ) ; } finally { request . releaseConnection ( ) ; }
public class WARCReader { /** * Write out the arcfile . * @ param reader * @ param format Format to use outputting . * @ throws IOException * @ throws java . text . ParseException */ protected static void output ( WARCReader reader , String format ) throws IOException , java . text . ParseException { } }
if ( ! reader . output ( format ) ) { throw new IOException ( "Unsupported format: " + format ) ; }
public class CmsVfsSitemapService { /** * Changes the navigation for a moved entry and its neighbors . < p > * @ param change the sitemap change * @ param entryFolder the moved entry * @ throws CmsException if something goes wrong */ private void applyNavigationChanges ( CmsSitemapChange change , CmsResource entryFolder ) throws CmsException { } }
CmsObject cms = getCmsObject ( ) ; String parentPath = null ; if ( change . hasNewParent ( ) ) { CmsResource parent = cms . readResource ( change . getParentId ( ) ) ; parentPath = cms . getSitePath ( parent ) ; } else { parentPath = CmsResource . getParentFolder ( cms . getSitePath ( entryFolder ) ) ; } List < CmsJspNavElement > navElements = getNavBuilder ( ) . getNavigationForFolder ( parentPath , Visibility . all , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; CmsSitemapNavPosCalculator npc = new CmsSitemapNavPosCalculator ( navElements , entryFolder , change . getPosition ( ) ) ; List < CmsJspNavElement > navs = npc . getNavigationChanges ( ) ; List < CmsResource > needToUnlock = new ArrayList < CmsResource > ( ) ; try { for ( CmsJspNavElement nav : navs ) { LockInfo lockInfo = ensureLockAndGetInfo ( nav . getResource ( ) ) ; if ( ! nav . getResource ( ) . equals ( entryFolder ) && lockInfo . wasJustLocked ( ) ) { needToUnlock . add ( nav . getResource ( ) ) ; } } for ( CmsJspNavElement nav : navs ) { CmsProperty property = new CmsProperty ( CmsPropertyDefinition . PROPERTY_NAVPOS , "" + nav . getNavPosition ( ) , null ) ; cms . writePropertyObject ( cms . getSitePath ( nav . getResource ( ) ) , property ) ; } } finally { for ( CmsResource lockedRes : needToUnlock ) { try { cms . unlockResource ( lockedRes ) ; } catch ( CmsException e ) { // we catch this because we still want to unlock the other resources LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } }
public class DialectFactory { /** * 获取共享方言 * @ param ds 数据源 , 每一个数据源对应一个唯一方言 * @ return { @ link Dialect } 方言 */ public static Dialect getDialect ( DataSource ds ) { } }
Dialect dialect = dialectPool . get ( ds ) ; if ( null == dialect ) { synchronized ( lock ) { dialect = dialectPool . get ( ds ) ; if ( null == dialect ) { dialect = newDialect ( ds ) ; dialectPool . put ( ds , dialect ) ; } } } return dialect ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeObjectType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractTimeObjectType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_TimeObject" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_GML" ) public JAXBElement < AbstractTimeObjectType > create_TimeObject ( AbstractTimeObjectType value ) { } }
return new JAXBElement < AbstractTimeObjectType > ( __TimeObject_QNAME , AbstractTimeObjectType . class , null , value ) ;
public class NotificationHandler { /** * { @ inheritDoc } */ @ Override public void handleChannelData ( final String action , final JSONArray payload ) throws BitfinexClientException { } }
logger . debug ( "Got notification callback {}" , payload . toString ( ) ) ; if ( payload . isEmpty ( ) ) { return ; } // Test for order error callback // [0 , " n " , [ null , " on - req " , null , null , [ null , null , 1513970684865000 , " tBTCUSD " , null , null , 0.001,0.001 , " EXCHANGE MARKET " , null , null , null , null , null , null , null , 12940 , null , null , null , null , null , null , 0 , null , null ] , null , " ERROR " , " Invalid order : minimum size for BTC / USD is 0.002 " ] ] if ( "on-req" . equals ( payload . getString ( 1 ) ) ) { final String state = payload . optString ( 6 ) ; if ( "ERROR" . equals ( state ) ) { BitfinexSubmittedOrder exchangeOrder = jsonToBitfinexSubmittedOrder ( payload ) ; submittedOrderConsumer . accept ( symbol , exchangeOrder ) ; } }
public class Smb2NegotiateRequest { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2Request # createResponse ( jcifs . Configuration , * jcifs . internal . smb2 . ServerMessageBlock2Request ) */ @ Override protected Smb2NegotiateResponse createResponse ( CIFSContext tc , ServerMessageBlock2Request < Smb2NegotiateResponse > req ) { } }
return new Smb2NegotiateResponse ( tc . getConfig ( ) ) ;
public class IntFloatDenseVector { /** * Gets the index of the first element in this vector with the specified * value , or - 1 if it is not present . * @ param value The value to search for . * @ param delta The delta with which to evaluate equality . * @ return The index or - 1 if not present . */ public int lookupIndex ( float value , float delta ) { } }
for ( int i = 0 ; i < elements . length ; i ++ ) { if ( Primitives . equals ( elements [ i ] , value , delta ) ) { return i ; } } return - 1 ;
public class LoganSquare { /** * Parse an object from a String . Note : parsing from an InputStream should be preferred over parsing from a String if possible . * @ param jsonString The JSON string being parsed . * @ param jsonObjectClass The @ JsonObject class to parse the InputStream into */ public static < E > E parse ( String jsonString , Class < E > jsonObjectClass ) throws IOException { } }
return mapperFor ( jsonObjectClass ) . parse ( jsonString ) ;
public class ExtendedACLEntries { /** * Update the mask to be the union of owning group entry , named user entry and named group entry . * @ param groupActions the group entry to be integrated into the mask */ public void updateMask ( AclActions groupActions ) { } }
AclActions result = new AclActions ( groupActions ) ; for ( Map . Entry < String , AclActions > kv : mNamedUserActions . entrySet ( ) ) { AclActions userAction = kv . getValue ( ) ; result . merge ( userAction ) ; for ( AclAction action : AclAction . values ( ) ) { if ( result . contains ( action ) || userAction . contains ( action ) ) { result . add ( action ) ; } } } for ( Map . Entry < String , AclActions > kv : mNamedGroupActions . entrySet ( ) ) { AclActions userAction = kv . getValue ( ) ; result . merge ( userAction ) ; for ( AclAction action : AclAction . values ( ) ) { if ( result . contains ( action ) || userAction . contains ( action ) ) { result . add ( action ) ; } } } mMaskActions = result ;
public class JSONWriter { /** * Write a JSON key - value pair in one optimized step that assumes that * the key is a symbol composed of normal characters requiring no escaping * and asserts that keys are non - null and unique within an object ONLY if * asserts are enabled . This method is most suitable in the common case * where the caller is making a hard - coded series of calls with the same * hard - coded strings for keys . Any sequencing errors can be detected * in debug runs with asserts enabled . * @ param aKey * @ param aValue * @ return this * @ throws JSONException */ public JSONWriter keySymbolValuePair ( String aKey , String aValue ) throws JSONException { } }
assert ( aKey != null ) ; assert ( m_mode == 'k' ) ; // The key should not have already been seen in this scope . assert ( m_scopeStack [ m_top ] . add ( aKey ) ) ; try { m_writer . write ( m_expectingComma ? ",\"" : "\"" ) ; m_writer . write ( aKey ) ; if ( aValue == null ) { m_writer . write ( "\":null" ) ; } else { m_writer . write ( "\":\"" ) ; m_writer . write ( JSONObject . quotable ( aValue ) ) ; m_writer . write ( '"' ) ; } } catch ( IOException e ) { throw new JSONException ( e ) ; } m_expectingComma = true ; return this ;
public class JSONEmitter { /** * Start a new group object . The group emits the JSON : * < pre > * " [ name ] " : { * < / pre > * @ param name Name used for this group . * @ return The same JSONEmitter object , which allows call chaining . */ public JSONEmitter startGroup ( String name ) { } }
checkComma ( ) ; write ( '"' ) ; write ( encodeString ( name ) ) ; write ( "\":{" ) ; push ( ) ; return this ;
public class GrpcManagedChannelPool { /** * Acquires and increases the ref - count for the { @ link ManagedChannel } . * @ param channelKey channel key * @ param healthCheckTimeoutMs health check timeout in milliseconds * @ param shutdownTimeoutMs shutdown timeout in milliseconds * @ return a { @ link ManagedChannel } */ public ManagedChannel acquireManagedChannel ( ChannelKey channelKey , long healthCheckTimeoutMs , long shutdownTimeoutMs ) { } }
boolean shutdownExistingChannel = false ; ManagedChannelReference managedChannelRef = null ; try ( LockResource lockShared = new LockResource ( mLock . readLock ( ) ) ) { if ( mChannels . containsKey ( channelKey ) ) { managedChannelRef = mChannels . get ( channelKey ) ; if ( waitForChannelReady ( managedChannelRef . get ( ) , healthCheckTimeoutMs ) ) { LOG . debug ( "Acquiring an existing managed channel. ChannelKey: {}. Ref-count: {}" , channelKey , managedChannelRef . getRefCount ( ) ) ; return managedChannelRef . reference ( ) ; } else { // Postpone channel shutdown under exclusive lock below . shutdownExistingChannel = true ; } } } try ( LockResource lockExclusive = new LockResource ( mLock . writeLock ( ) ) ) { // Dispose existing channel if required . int existingRefCount = 0 ; if ( shutdownExistingChannel && mChannels . containsKey ( channelKey ) && mChannels . get ( channelKey ) == managedChannelRef ) { existingRefCount = managedChannelRef . getRefCount ( ) ; LOG . debug ( "Shutting down an existing unhealthy managed channel. " + "ChannelKey: {}. Existing Ref-count: {}" , channelKey , existingRefCount ) ; shutdownManagedChannel ( channelKey , shutdownTimeoutMs ) ; mChannels . remove ( channelKey ) ; } if ( ! mChannels . containsKey ( channelKey ) ) { LOG . debug ( "Creating a new managed channel. ChannelKey: {}. Ref-count:{}" , channelKey , existingRefCount ) ; mChannels . put ( channelKey , new ManagedChannelReference ( createManagedChannel ( channelKey ) , existingRefCount ) ) ; } return mChannels . get ( channelKey ) . reference ( ) ; }
public class Http { /** * Executes a POST request . * @ param uri url of resource . * @ param content content to be posted . * @ return { @ link Post } object . */ public static Post post ( String uri , String content ) { } }
return post ( uri , content . getBytes ( ) , CONNECTION_TIMEOUT , READ_TIMEOUT ) ;
public class UiDevice { /** * Open notification shade * @ return * @ throws Exception */ public static boolean openNotification ( ) throws Exception { } }
boolean success = false ; // get API level int apiLevel = Client . getInstance ( ) . mapField ( "android.os.Build$VERSION" , "SDK_INT" ) . getInt ( 0 ) ; if ( apiLevel >= 18 ) { success = Client . getInstance ( ) . map ( Constants . UIAUTOMATOR_UIDEVICE , "openNotification" ) . getBoolean ( 0 ) ; } else { // try a brute force method int displayHeight = getDisplayHeight ( ) ; // Calculated a Y position to pull down to that is the display height minus 10% int pullTo = displayHeight - ( int ) ( ( double ) displayHeight * .1 ) ; Client . getInstance ( ) . map ( Constants . UIAUTOMATOR_UIDEVICE , "swipe" , 10 , 0 , 10 , pullTo , 100 ) ; success = true ; } return success ;
public class CoverageUtilities { /** * Creates a { @ link GridCoverage2D coverage } from the { @ link WritableRaster writable raster } and the necessary geographic Information . * @ param name the name of the coverage . * @ param writableRaster the raster containing the data . * @ param envelopeParams the map of boundary parameters . * @ param crs the { @ link CoordinateReferenceSystem } . * @ return the { @ link GridCoverage2D coverage } . */ public static GridCoverage2D buildCoverage ( String name , WritableRaster writableRaster , HashMap < String , Double > envelopeParams , CoordinateReferenceSystem crs ) { } }
if ( writableRaster instanceof GrassLegacyWritableRaster ) { GrassLegacyWritableRaster wRaster = ( GrassLegacyWritableRaster ) writableRaster ; double west = envelopeParams . get ( WEST ) ; double south = envelopeParams . get ( SOUTH ) ; double east = envelopeParams . get ( EAST ) ; double north = envelopeParams . get ( NORTH ) ; int rows = envelopeParams . get ( ROWS ) . intValue ( ) ; int cols = envelopeParams . get ( COLS ) . intValue ( ) ; Window window = new Window ( west , east , south , north , rows , cols ) ; GrassLegacyGridCoverage2D coverage2D = new GrassLegacyGridCoverage2D ( window , wRaster . getData ( ) , crs ) ; return coverage2D ; } else { double west = envelopeParams . get ( WEST ) ; double south = envelopeParams . get ( SOUTH ) ; double east = envelopeParams . get ( EAST ) ; double north = envelopeParams . get ( NORTH ) ; Envelope2D writeEnvelope = new Envelope2D ( crs , west , south , east - west , north - south ) ; GridCoverageFactory factory = CoverageFactoryFinder . getGridCoverageFactory ( null ) ; GridCoverage2D coverage2D = factory . create ( name , writableRaster , writeEnvelope ) ; return coverage2D ; }
public class CurrentAddressTypeUnmarshaller { /** * Special handling of the Base64 encoded value that represents the address elements . */ @ Override public XMLObject unmarshall ( Element domElement ) throws UnmarshallingException { } }
Document newDocument = null ; Node childNode = domElement . getFirstChild ( ) ; while ( childNode != null ) { if ( childNode . getNodeType ( ) != Node . TEXT_NODE ) { // We skip everything except for a text node . log . info ( "Ignoring node {} - it is not a text node" , childNode . getNodeName ( ) ) ; } else { newDocument = parseContents ( ( Text ) childNode , domElement ) ; if ( newDocument != null ) { break ; } } childNode = childNode . getNextSibling ( ) ; } return super . unmarshall ( newDocument != null ? newDocument . getDocumentElement ( ) : domElement ) ;
public class Symmetry010Chronology { /** * Obtains a Symmetry010 zoned date - time from another date - time object . * @ param temporal the date - time object to convert , not null * @ return the Symmetry010 zoned date - time , not null * @ throws DateTimeException if unable to create the date - time */ @ Override @ SuppressWarnings ( "unchecked" ) public ChronoZonedDateTime < Symmetry010Date > zonedDateTime ( TemporalAccessor temporal ) { } }
return ( ChronoZonedDateTime < Symmetry010Date > ) super . zonedDateTime ( temporal ) ;
public class JSONObject { /** * unhidden . */ public JSONObject accumulate ( String name , Object value ) throws JSONException { } }
Object current = nameValuePairs . get ( checkName ( name ) ) ; if ( current == null ) { return put ( name , value ) ; } if ( current instanceof JSONArray ) { JSONArray array = ( JSONArray ) current ; array . checkedPut ( value ) ; } else { JSONArray array = new JSONArray ( ) ; array . checkedPut ( current ) ; array . checkedPut ( value ) ; nameValuePairs . put ( name , array ) ; } return this ;
public class ProductPlan { /** * Returns a histogram that , for each field , approximates the value * distribution of products from the specified histograms . * @ param hist1 * the left - hand - side histogram * @ param hist2 * the right - hand - side histogram * @ return a histogram that , for each field , approximates the value * distribution of the products */ public static Histogram productHistogram ( Histogram hist1 , Histogram hist2 ) { } }
Set < String > prodFlds = new HashSet < String > ( hist1 . fields ( ) ) ; prodFlds . addAll ( hist2 . fields ( ) ) ; Histogram prodHist = new Histogram ( prodFlds ) ; double numRec1 = hist1 . recordsOutput ( ) ; double numRec2 = hist2 . recordsOutput ( ) ; if ( Double . compare ( numRec1 , 1.0 ) < 0 || Double . compare ( numRec2 , 1.0 ) < 0 ) return prodHist ; for ( String fld : hist1 . fields ( ) ) for ( Bucket bkt : hist1 . buckets ( fld ) ) prodHist . addBucket ( fld , new Bucket ( bkt . valueRange ( ) , bkt . frequency ( ) * numRec2 , bkt . distinctValues ( ) , bkt . valuePercentiles ( ) ) ) ; for ( String fld : hist2 . fields ( ) ) for ( Bucket bkt : hist2 . buckets ( fld ) ) prodHist . addBucket ( fld , new Bucket ( bkt . valueRange ( ) , bkt . frequency ( ) * numRec1 , bkt . distinctValues ( ) , bkt . valuePercentiles ( ) ) ) ; return prodHist ;
public class AWSSecretsManagerClient { /** * Retrieves the JSON text of the resource - based policy document that ' s attached to the specified secret . The JSON * request string input and response output are shown formatted with white space and line breaks for better * readability . Submit your input as a single line JSON string . * < b > Minimum permissions < / b > * To run this command , you must have the following permissions : * < ul > * < li > * secretsmanager : GetResourcePolicy * < / li > * < / ul > * < b > Related operations < / b > * < ul > * < li > * To attach a resource policy to a secret , use < a > PutResourcePolicy < / a > . * < / li > * < li > * To delete the resource - based policy that ' s attached to a secret , use < a > DeleteResourcePolicy < / a > . * < / li > * < li > * To list all of the currently available secrets , use < a > ListSecrets < / a > . * < / li > * < / ul > * @ param getResourcePolicyRequest * @ return Result of the GetResourcePolicy operation returned by the service . * @ throws ResourceNotFoundException * We can ' t find the resource that you asked for . * @ throws InternalServiceErrorException * An error occurred on the server side . * @ throws InvalidRequestException * You provided a parameter value that is not valid for the current state of the resource . < / p > * Possible causes : * < ul > * < li > * You tried to perform the operation on a secret that ' s currently marked deleted . * < / li > * < li > * You tried to enable rotation on a secret that doesn ' t already have a Lambda function ARN configured and * you didn ' t include such an ARN as a parameter in this call . * < / li > * @ sample AWSSecretsManager . GetResourcePolicy * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / secretsmanager - 2017-10-17 / GetResourcePolicy " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetResourcePolicyResult getResourcePolicy ( GetResourcePolicyRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetResourcePolicy ( request ) ;
public class MethodInvocation { /** * Validates the array of { @ link Object arguments } to be passed to the { @ link Method } by comparing * the { @ link Object arguments } to the given { @ link Method Method ' s } declared parameters . * @ param method { @ link Method } used to validate the array of { @ link Object arguments } . * @ param args array of { @ link Object arguments } to validate . * @ return the given array of { @ link Object arguments } . * @ throws IllegalArgumentException if { @ link Method } is { @ literal null } , or the number of arguments * does not equal the number of { @ link Method } parameters , or an argument does not the match the type * of the corresponding { @ link Method } parameter . * @ see java . lang . reflect . Method */ protected Object [ ] validateArguments ( Method method , Object ... args ) { } }
Assert . notNull ( method , "Method cannot be null" ) ; Object [ ] arguments = nullSafeArray ( args ) ; int methodParameterCount = method . getParameterCount ( ) ; assertThat ( arguments . length ) . throwing ( newIllegalArgumentException ( "The number of arguments [%1$d] does not match the number of parameters [%2$d] for method [%3$s] in class [%4$s]" , arguments . length , methodParameterCount , method . getName ( ) , method . getDeclaringClass ( ) . getName ( ) ) ) . isEqualTo ( methodParameterCount ) ; Class < ? > [ ] parameterTypes = method . getParameterTypes ( ) ; int parameterIndex = 0 ; for ( Object argument : arguments ) { assertThat ( argument ) . throwing ( newIllegalArgumentException ( "Argument [%1$s] is not assignable to parameter [%2$d] of type [%3$s]" , argument , parameterIndex , parameterTypes [ parameterIndex ] . getName ( ) ) ) . isAssignableTo ( parameterTypes [ parameterIndex ++ ] ) ; } return arguments ;
public class BaseDesktopMenu { /** * Creates the look and feel menu . * @ param listener * the listener * @ return the j menu */ protected JMenu newLookAndFeelMenu ( final ActionListener listener ) { } }
final JMenu menuLookAndFeel = new JMenu ( "Look and Feel" ) ; menuLookAndFeel . setMnemonic ( 'L' ) ; // Look and Feel JMenuItems // GTK JMenuItem jmiPlafGTK ; jmiPlafGTK = new JMenuItem ( "GTK" , 'g' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafGTK , 'G' ) ; jmiPlafGTK . addActionListener ( new LookAndFeelGTKAction ( "GTK" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafGTK ) ; // Metal default Metal theme JMenuItem jmiPlafMetal ; jmiPlafMetal = new JMenuItem ( "Metal" , 'm' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafMetal , 'M' ) ; jmiPlafMetal . addActionListener ( new LookAndFeelMetalAction ( "Metal" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafMetal ) ; // Metal Ocean theme JMenuItem jmiPlafOcean ; jmiPlafOcean = new JMenuItem ( "Ocean" , 'o' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafOcean , 'O' ) ; jmiPlafOcean . addActionListener ( new LookAndFeelMetalAction ( "Ocean" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafOcean ) ; // Motif JMenuItem jmiPlafMotiv ; jmiPlafMotiv = new JMenuItem ( "Motif" , 't' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafMotiv , 'T' ) ; jmiPlafMotiv . addActionListener ( new LookAndFeelMotifAction ( "Motif" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafMotiv ) ; // Nimbus JMenuItem jmiPlafNimbus ; jmiPlafNimbus = new JMenuItem ( "Nimbus" , 'n' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafNimbus , 'N' ) ; jmiPlafNimbus . addActionListener ( new LookAndFeelNimbusAction ( "Nimbus" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafNimbus ) ; // Windows JMenuItem jmiPlafSystem ; jmiPlafSystem = new JMenuItem ( "System" , 'd' ) ; // $ NON - NLS - 1 $ MenuExtensions . setCtrlAccelerator ( jmiPlafSystem , 'W' ) ; jmiPlafSystem . addActionListener ( new LookAndFeelSystemAction ( "System" , this . applicationFrame ) ) ; menuLookAndFeel . add ( jmiPlafSystem ) ; return menuLookAndFeel ;
public class ApiOvhCdndedicated { /** * Return stats about a domain * REST : GET / cdn / dedicated / { serviceName } / domains / { domain } / statistics * @ param period [ required ] * @ param type [ required ] * @ param value [ required ] * @ param serviceName [ required ] The internal name of your CDN offer * @ param domain [ required ] Domain of this object */ public ArrayList < OvhStatsDataType > serviceName_domains_domain_statistics_GET ( String serviceName , String domain , OvhStatsPeriodEnum period , OvhStatsTypeEnum type , OvhStatsValueEnum value ) throws IOException { } }
String qPath = "/cdn/dedicated/{serviceName}/domains/{domain}/statistics" ; StringBuilder sb = path ( qPath , serviceName , domain ) ; query ( sb , "period" , period ) ; query ( sb , "type" , type ) ; query ( sb , "value" , value ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t3 ) ;
public class ReflTools { /** * Read the value of the field < tt > fieldName < / tt > of object < tt > obj < / tt > . * @ param obj The object * @ param fieldName The field name * @ return the value of the field . */ public static int getInt ( Object obj , String fieldName ) { } }
try { return getField ( obj . getClass ( ) , fieldName ) . getInt ( obj ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; }
public class ServicePriceListRetriever { /** * < p > Retrieves sample data row ( tree ) to make CSV column . < / p > * @ param pReqVars additional param * @ return sample data row * @ throws Exception an Exception */ @ Override public final List < Node < String > > getSampleDataRow ( final Map < String , Object > pReqVars ) throws Exception { } }
String lang = ( String ) pReqVars . get ( "lang" ) ; List < Node < String > > result = new ArrayList < Node < String > > ( ) ; Integer idx = 1 ; Node < String > nodeService = new Node < String > ( ) ; result . add ( nodeService ) ; nodeService . setItsName ( getSrvI18n ( ) . getMsg ( "service" , lang ) ) ; nodeService . setItsNodes ( new ArrayList < Node < String > > ( ) ) ; Node < String > nodeServiceName = new Node < String > ( ) ; nodeService . getItsNodes ( ) . add ( nodeServiceName ) ; nodeServiceName . setItsName ( getSrvI18n ( ) . getMsg ( "itsName" , lang ) ) ; nodeServiceName . setItsValue ( idx . toString ( ) + ";itsName" ) ; Node < String > nodeServiceId = new Node < String > ( ) ; nodeService . getItsNodes ( ) . add ( nodeServiceId ) ; nodeServiceId . setItsName ( getSrvI18n ( ) . getMsg ( "itsId" , lang ) ) ; nodeServiceId . setItsValue ( idx . toString ( ) + ";itsId" ) ; Node < String > nodeServiceItsCategory = new Node < String > ( ) ; nodeService . getItsNodes ( ) . add ( nodeServiceItsCategory ) ; nodeServiceItsCategory . setItsName ( getSrvI18n ( ) . getMsg ( "itsCategory" , lang ) ) ; nodeServiceItsCategory . setItsNodes ( new ArrayList < Node < String > > ( ) ) ; Node < String > nodeServiceItsCategoryName = new Node < String > ( ) ; nodeServiceItsCategory . getItsNodes ( ) . add ( nodeServiceItsCategoryName ) ; nodeServiceItsCategoryName . setItsName ( getSrvI18n ( ) . getMsg ( "itsName" , lang ) ) ; nodeServiceItsCategoryName . setItsValue ( idx . toString ( ) + ";itsCategory,itsName" ) ; Node < String > nodeServiceItsCategoryId = new Node < String > ( ) ; nodeServiceItsCategory . getItsNodes ( ) . add ( nodeServiceItsCategoryId ) ; nodeServiceItsCategoryId . setItsName ( getSrvI18n ( ) . getMsg ( "itsId" , lang ) ) ; nodeServiceItsCategoryId . setItsValue ( idx . toString ( ) + ";itsCategory,itsId" ) ; idx ++ ; Node < String > nodePrice = new Node < String > ( ) ; result . add ( nodePrice ) ; nodePrice . setItsName ( getSrvI18n ( ) . getMsg ( "itsPrice" , lang ) ) ; nodePrice . setItsValue ( idx . toString ( ) ) ; idx ++ ; Node < String > nodeCost = new Node < String > ( ) ; result . add ( nodeCost ) ; nodeCost . setItsName ( getSrvI18n ( ) . getMsg ( "itsCost" , lang ) ) ; nodeCost . setItsValue ( idx . toString ( ) ) ; idx ++ ; Node < String > nodeQuantity = new Node < String > ( ) ; result . add ( nodeQuantity ) ; nodeQuantity . setItsName ( getSrvI18n ( ) . getMsg ( "itsQuantity" , lang ) ) ; nodeQuantity . setItsValue ( idx . toString ( ) ) ; idx ++ ; Node < String > nodeIsAvailable = new Node < String > ( ) ; result . add ( nodeIsAvailable ) ; nodeIsAvailable . setItsName ( getSrvI18n ( ) . getMsg ( "isAvailable" , lang ) ) ; nodeIsAvailable . setItsValue ( idx . toString ( ) ) ; Set < String > ndFlIdNm = new HashSet < String > ( ) ; ndFlIdNm . add ( "itsId" ) ; ndFlIdNm . add ( "itsName" ) ; pReqVars . put ( "InvItemTaxCategoryneededFields" , ndFlIdNm ) ; pReqVars . put ( "TaxneededFields" , ndFlIdNm ) ; List < InvItemTaxCategoryLine > allTaxCatsLns = getSrvOrm ( ) . retrieveList ( pReqVars , InvItemTaxCategoryLine . class ) ; pReqVars . remove ( "InvItemTaxCategoryneededFields" ) ; pReqVars . remove ( "TaxneededFields" ) ; List < Tax > usedTaxes = new ArrayList < Tax > ( ) ; List < InvItemTaxCategory > usedTaxCats = new ArrayList < InvItemTaxCategory > ( ) ; for ( InvItemTaxCategoryLine tcl : allTaxCatsLns ) { boolean txListed = false ; for ( Tax tx : usedTaxes ) { if ( tx . getItsId ( ) . equals ( tcl . getTax ( ) . getItsId ( ) ) ) { txListed = true ; break ; } } if ( ! txListed ) { usedTaxes . add ( tcl . getTax ( ) ) ; } int tci = - 1 ; for ( InvItemTaxCategory tc : usedTaxCats ) { if ( tc . getItsId ( ) . equals ( tcl . getItsOwner ( ) . getItsId ( ) ) ) { tci = usedTaxCats . indexOf ( tc ) ; break ; } } if ( tci == - 1 ) { usedTaxCats . add ( tcl . getItsOwner ( ) ) ; tcl . getItsOwner ( ) . setTaxes ( new ArrayList < InvItemTaxCategoryLine > ( ) ) ; tcl . getItsOwner ( ) . getTaxes ( ) . add ( tcl ) ; } else { usedTaxCats . get ( tci ) . getTaxes ( ) . add ( tcl ) ; } } boolean isOnlyTax = true ; for ( InvItemTaxCategory txc : usedTaxCats ) { if ( txc . getTaxes ( ) . size ( ) > 1 ) { isOnlyTax = false ; break ; } } if ( isOnlyTax ) { idx ++ ; addTaxWr ( result , idx . toString ( ) , getSrvI18n ( ) . getMsg ( "OnlyTax" , lang ) , lang ) ; } else { idx ++ ; addTaxCatWr ( result , idx . toString ( ) , getSrvI18n ( ) . getMsg ( "taxCategory" , lang ) , lang ) ; Collections . sort ( usedTaxes , new CmprHasIdLong < Tax > ( ) ) ; for ( Tax tx : usedTaxes ) { idx ++ ; addTaxWr ( result , idx . toString ( ) , tx . getItsName ( ) , lang ) ; } Collections . sort ( usedTaxCats , new CmprHasIdLong < InvItemTaxCategory > ( ) ) ; for ( InvItemTaxCategory txc : usedTaxCats ) { idx ++ ; addTaxCatWr ( result , idx . toString ( ) , txc . getItsName ( ) , lang ) ; } } return result ;
public class StatefulPassivator { /** * d430549.10 */ private EJBObjectInfo createNonSerializableObjectInfo ( Object obj , Map < String , Map < String , Field > > passivatorFields ) throws RemoteException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "createNonSerializableObjectInfo" , obj ) ; // d460047 , d648122 - re - work EJBObjectInfo ejbObjectInfo = new EJBObjectInfo ( ) ; ejbObjectInfo . setSerializable ( false ) ; Class < ? > clazz = obj . getClass ( ) ; ejbObjectInfo . setClassName ( clazz . getName ( ) ) ; for ( Class < ? > classIter = clazz ; classIter != Object . class ; classIter = classIter . getSuperclass ( ) ) { String className = classIter . getName ( ) ; Map < String , Field > classPassivatorFields = passivatorFields . get ( className ) ; List < FieldInfo > fieldInfoList = new ArrayList < FieldInfo > ( classPassivatorFields . size ( ) ) ; for ( Field field : classPassivatorFields . values ( ) ) { FieldInfo fieldInfo = generateFieldInfo ( obj , field ) ; fieldInfoList . add ( fieldInfo ) ; } ejbObjectInfo . addFieldInfo ( className , fieldInfoList ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createNonSerializableObjectInfo" , ejbObjectInfo ) ; return ejbObjectInfo ;
public class GenericResolver { /** * syck _ genericresolver _ node _ import */ @ JRubyMethod public static IRubyObject node_import ( IRubyObject self , IRubyObject node ) { } }
// System . err . println ( " syck _ genericresolver _ node _ import ( ) " ) ; Ruby runtime = self . getRuntime ( ) ; ThreadContext ctx = runtime . getCurrentContext ( ) ; org . yecht . Node n = ( org . yecht . Node ) node . dataGetStructChecked ( ) ; IRubyObject t = runtime . getNil ( ) ; Extra x = ( Extra ) self . dataGetStruct ( ) ; if ( n . type_id != null ) { t = runtime . newString ( n . type_id ) ; } switch ( n . kind ) { case Str : return x . scalar ( t , n , ctx ) ; case Seq : return x . sequence ( t , n , ctx ) ; case Map : return x . mapping ( t , n , ctx ) ; } return runtime . getNil ( ) ;
public class JacksonAdapter { /** * Initializes an instance of JacksonMapperAdapter with default configurations * applied to the object mapper . * @ param mapper the object mapper to use . */ private static < T extends ObjectMapper > T initializeObjectMapper ( T mapper ) { } }
mapper . configure ( SerializationFeature . WRITE_DATES_AS_TIMESTAMPS , false ) . configure ( SerializationFeature . WRITE_EMPTY_JSON_ARRAYS , true ) . configure ( SerializationFeature . FAIL_ON_EMPTY_BEANS , false ) . configure ( DeserializationFeature . ACCEPT_EMPTY_STRING_AS_NULL_OBJECT , true ) . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) . configure ( DeserializationFeature . ACCEPT_SINGLE_VALUE_AS_ARRAY , true ) . setSerializationInclusion ( JsonInclude . Include . NON_NULL ) . registerModule ( new JavaTimeModule ( ) ) . registerModule ( ByteArraySerializer . getModule ( ) ) . registerModule ( Base64UrlSerializer . getModule ( ) ) . registerModule ( DateTimeSerializer . getModule ( ) ) . registerModule ( DateTimeRfc1123Serializer . getModule ( ) ) . registerModule ( DurationSerializer . getModule ( ) ) ; mapper . setVisibility ( mapper . getSerializationConfig ( ) . getDefaultVisibilityChecker ( ) . withFieldVisibility ( JsonAutoDetect . Visibility . ANY ) . withSetterVisibility ( JsonAutoDetect . Visibility . NONE ) . withGetterVisibility ( JsonAutoDetect . Visibility . NONE ) . withIsGetterVisibility ( JsonAutoDetect . Visibility . NONE ) ) ; return mapper ;
public class DescribeAutoScalingInstancesResult { /** * The instances . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAutoScalingInstances ( java . util . Collection ) } or { @ link # withAutoScalingInstances ( java . util . Collection ) } * if you want to override the existing values . * @ param autoScalingInstances * The instances . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeAutoScalingInstancesResult withAutoScalingInstances ( AutoScalingInstanceDetails ... autoScalingInstances ) { } }
if ( this . autoScalingInstances == null ) { setAutoScalingInstances ( new com . amazonaws . internal . SdkInternalList < AutoScalingInstanceDetails > ( autoScalingInstances . length ) ) ; } for ( AutoScalingInstanceDetails ele : autoScalingInstances ) { this . autoScalingInstances . add ( ele ) ; } return this ;
public class CMMClassifier { /** * Build a Dataset from some data . Used for training a classifier . * By passing in an extra origDataset , you can get a Dataset based on featureIndex and * classIndex in an existing origDataset . * @ param data This variable is a list of lists of CoreLabel . That is , * it is a collection of documents , each of which is represented * as a sequence of CoreLabel objects . * @ param origDataset if you want to get a Dataset based on featureIndex and * classIndex in an existing origDataset * @ return The Dataset which is an efficient encoding of the information * in a List of Datums */ public Dataset < String , String > getDataset ( ObjectBank < List < IN > > data , Dataset < String , String > origDataset ) { } }
if ( origDataset == null ) { return getDataset ( data ) ; } return getDataset ( data , origDataset . featureIndex , origDataset . labelIndex ) ;
public class KxPublisherActor { /** * acts as an pull based event producer then * @ param iterator */ public void initFromIterator ( Iterator < IN > iterator ) { } }
this . pending = new ArrayDeque < > ( ) ; isIteratorBased = true ; // this . _ streams = kxReactiveStreams ; needs sync init ! ! // in case iterator is blocking , need a dedicated thread . . Executor iteratorThread = new ThreadPoolExecutor ( 0 , 1 , 10L , TimeUnit . MILLISECONDS , new LinkedBlockingQueue < Runnable > ( ) ) ; producer = new Subscription ( ) { boolean complete = false ; @ Override public void request ( long outern ) { iteratorThread . execute ( ( ) -> { if ( complete ) { return ; } long n = outern ; try { while ( iterator . hasNext ( ) && n -- > 0 ) { self ( ) . onNext ( iterator . next ( ) ) ; } if ( ! iterator . hasNext ( ) ) { complete = true ; self ( ) . onComplete ( ) ; } } catch ( Throwable t ) { self ( ) . onError ( t ) ; } } ) ; } @ Override public void cancel ( ) { } } ; processor = in -> ( OUT ) in ; onSubscribe ( producer ) ; Thread . currentThread ( ) . setName ( Thread . currentThread ( ) + " (rx async stream processor)" ) ;
public class StatisticalSequenceLabeler { /** * Get array of Spans from a list of tokens . * @ param tokens * the sentence tokens * @ return the array of Sequence Spans */ public final Span [ ] seqToSpans ( final String [ ] tokens ) { } }
final Span [ ] annotatedText = this . sequenceLabeler . tag ( tokens ) ; final List < Span > probSpans = new ArrayList < Span > ( Arrays . asList ( annotatedText ) ) ; return probSpans . toArray ( new Span [ probSpans . size ( ) ] ) ;
public class AmazonPinpointSMSVoiceClient { /** * Create a new voice message and send it to a recipient ' s phone number . * @ param sendVoiceMessageRequest * SendVoiceMessageRequest * @ return Result of the SendVoiceMessage operation returned by the service . * @ throws TooManyRequestsException * TooManyRequestsException * @ throws BadRequestException * BadRequestException * @ throws InternalServiceErrorException * InternalServiceErrorException * @ sample AmazonPinpointSMSVoice . SendVoiceMessage * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - sms - voice - 2018-09-05 / SendVoiceMessage " * target = " _ top " > AWS API Documentation < / a > */ @ Override public SendVoiceMessageResult sendVoiceMessage ( SendVoiceMessageRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSendVoiceMessage ( request ) ;
public class Util { /** * This function copies file from one location to another * @ param from the full path to the source file * @ param to the full path to the destination file * @ return total bytes copied . 0 indicates */ public static int copyFile ( String from , String to ) { } }
InputStream inStream = null ; FileOutputStream fs = null ; try { int bytesum = 0 ; int byteread ; File oldfile = new File ( from ) ; if ( oldfile . exists ( ) ) { inStream = new FileInputStream ( from ) ; fs = new FileOutputStream ( to ) ; byte [ ] buffer = new byte [ 1444 ] ; while ( ( byteread = inStream . read ( buffer ) ) != - 1 ) { bytesum += byteread ; fs . write ( buffer , 0 , byteread ) ; } } return bytesum ; } catch ( Exception e ) { return 0 ; } finally { Util . ensureClosed ( inStream ) ; Util . ensureClosed ( fs ) ; }
public class Tuples { /** * Adapts a ternary consumer to an consumer accepting a triple . * @ param < T > the consumer first parameter type * @ param < U > the consumer second parameter type * @ param < V > the consumer third parameter type * @ param consumer the consumer to be adapted * @ return the adatped consumer */ public static < T , U , V > Consumer < Triple < T , U , V > > tupled ( TriConsumer < T , U , V > consumer ) { } }
dbc . precondition ( consumer != null , "cannot apply a triple to a null consumer" ) ; return triple -> consumer . accept ( triple . first ( ) , triple . second ( ) , triple . third ( ) ) ;
public class ProtobufIDLProxy { /** * Creates the code by type . * @ param type the type * @ param topLevelClass the top level class * @ param packageName the package name * @ param mappedUniName the mapped uni name * @ param isUniName the is uni name * @ return the code dependent */ private static CodeDependent createCodeByType ( EnumElement type , boolean topLevelClass , String packageName , Map < String , String > mappedUniName , boolean isUniName ) { } }
CodeDependent cd = new CodeDependent ( ) ; String defaultClsName = type . name ( ) ; String simpleName = getProxyClassName ( defaultClsName , mappedUniName , isUniName ) ; // To generate class StringBuilder code = new StringBuilder ( ) ; if ( topLevelClass ) { // define package code . append ( "package " ) . append ( packageName ) . append ( CODE_END ) ; code . append ( "\n" ) ; // add import ; code . append ( "import com.baidu.bjf.remoting.protobuf.EnumReadable;\n" ) ; } // define class // define class if ( topLevelClass ) { code . append ( "public enum " ) ; } else { code . append ( "public static enum " ) ; } code . append ( simpleName ) . append ( " implements EnumReadable {\n" ) ; Iterator < EnumConstantElement > iter = type . constants ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { EnumConstantElement value = iter . next ( ) ; String name = value . name ( ) ; int tag = value . tag ( ) ; code . append ( name ) . append ( "(" ) . append ( tag ) . append ( ")" ) ; if ( iter . hasNext ( ) ) { code . append ( "," ) ; } else { code . append ( ";\n" ) ; } } code . append ( "private final int value;\n" ) ; code . append ( simpleName ) . append ( "(int value) { this.value = value; }\n" ) ; code . append ( "public int value() { return value; }\n" ) ; code . append ( "}\n" ) ; cd . name = simpleName ; cd . pkg = packageName ; cd . code = code . toString ( ) ; return cd ;
public class Future { /** * / * ( non - Javadoc ) * @ see com . oath . cyclops . types . MonadicValue # flatMapP ( java . util . function . Function ) */ @ Override public < R > Future < R > mergeMap ( final Function < ? super T , ? extends Publisher < ? extends R > > mapper ) { } }
return ( Future < R > ) MonadicValue . super . mergeMap ( mapper ) ;
public class CmsIconUtil { /** * Returns the CSS class for the given filename . < p > * @ param resourceTypeName the resource type name * @ param fileName the filename * @ param small if true , get the CSS class for the small icon , else for the biggest one available * @ return the CSS class */ private static String getFileTypeIconClass ( String resourceTypeName , String fileName , boolean small ) { } }
if ( ( fileName != null ) && fileName . contains ( "." ) ) { int last = fileName . lastIndexOf ( "." ) ; if ( fileName . length ( ) > ( last + 1 ) ) { String suffix = fileName . substring ( fileName . lastIndexOf ( "." ) + 1 ) ; return getResourceSubTypeIconClass ( resourceTypeName , suffix , small ) ; } } return "" ;
public class GPX { /** * Writes the given { @ code gpx } object ( in GPX XML format ) to the given * { @ code output } stream . * @ since 1.1 * @ param gpx the GPX object to write to the output * @ param path the output path where the GPX object is written to * @ throws IOException if the writing of the GPX object fails * @ throws NullPointerException if one of the given arguments is { @ code null } */ public static void write ( final GPX gpx , final Path path ) throws IOException { } }
writer ( ) . write ( gpx , path ) ;
public class MethodUtil { /** * Get parameter type name string from a arg types string array * @ param argTypes * @ return */ public static String getArgsTypeName ( String [ ] argTypes ) { } }
if ( argTypes != null ) { return StringUtil . join ( argTypes , ',' ) ; } return StringUtil . EMPTY ;
public class ClockSkin { /** * * * * * * Canvas * * * * * */ private void drawTicks ( ) { } }
double sinValue ; double cosValue ; double startAngle = 180 ; double angleStep = 360 / 60 ; Point2D center = new Point2D ( size * 0.5 , size * 0.5 ) ; Color hourTickMarkColor = getSkinnable ( ) . getHourTickMarkColor ( ) ; Color minuteTickMarkColor = getSkinnable ( ) . getMinuteTickMarkColor ( ) ; boolean hourTickMarksVisible = getSkinnable ( ) . isHourTickMarksVisible ( ) ; boolean minuteTickMarksVisible = getSkinnable ( ) . isMinuteTickMarksVisible ( ) ; tickCtx . clearRect ( 0 , 0 , size , size ) ; tickCtx . setLineCap ( StrokeLineCap . ROUND ) ; for ( double angle = 0 , counter = 0 ; Double . compare ( counter , 59 ) <= 0 ; angle -= angleStep , counter ++ ) { sinValue = Math . sin ( Math . toRadians ( angle + startAngle ) ) ; cosValue = Math . cos ( Math . toRadians ( angle + startAngle ) ) ; Point2D innerPoint = new Point2D ( center . getX ( ) + size * 0.405 * sinValue , center . getY ( ) + size * 0.405 * cosValue ) ; Point2D innerMinutePoint = new Point2D ( center . getX ( ) + size * 0.435 * sinValue , center . getY ( ) + size * 0.435 * cosValue ) ; Point2D outerPoint = new Point2D ( center . getX ( ) + size * 0.465 * sinValue , center . getY ( ) + size * 0.465 * cosValue ) ; if ( counter % 5 == 0 ) { // Draw hour tickmark tickCtx . setStroke ( hourTickMarkColor ) ; if ( hourTickMarksVisible ) { tickCtx . setLineWidth ( size * 0.01 ) ; tickCtx . strokeLine ( innerPoint . getX ( ) , innerPoint . getY ( ) , outerPoint . getX ( ) , outerPoint . getY ( ) ) ; } else if ( minuteTickMarksVisible ) { tickCtx . setLineWidth ( size * 0.005 ) ; tickCtx . strokeLine ( innerMinutePoint . getX ( ) , innerMinutePoint . getY ( ) , outerPoint . getX ( ) , outerPoint . getY ( ) ) ; } } else if ( counter % 1 == 0 && minuteTickMarksVisible ) { // Draw minute tickmark tickCtx . setLineWidth ( size * 0.005 ) ; tickCtx . setStroke ( minuteTickMarkColor ) ; tickCtx . strokeLine ( innerMinutePoint . getX ( ) , innerMinutePoint . getY ( ) , outerPoint . getX ( ) , outerPoint . getY ( ) ) ; } }
public class PublicIPAddressesInner { /** * Creates or updates a static or dynamic public IP address . * @ param resourceGroupName The name of the resource group . * @ param publicIpAddressName The name of the public IP address . * @ param parameters Parameters supplied to the create or update public IP address operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < PublicIPAddressInner > createOrUpdateAsync ( String resourceGroupName , String publicIpAddressName , PublicIPAddressInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , publicIpAddressName , parameters ) . map ( new Func1 < ServiceResponse < PublicIPAddressInner > , PublicIPAddressInner > ( ) { @ Override public PublicIPAddressInner call ( ServiceResponse < PublicIPAddressInner > response ) { return response . body ( ) ; } } ) ;
public class SDKUtil { /** * 将Map中的数据转换成key1 = value1 & key2 = value2的形式 不包含签名域signature * @ param data * 待拼接的Map数据 * @ return 拼接好后的字符串 */ public static String coverMap2String ( Map < String , String > data ) { } }
TreeMap < String , String > tree = new TreeMap < String , String > ( ) ; Iterator < Entry < String , String > > it = data . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Entry < String , String > en = it . next ( ) ; if ( SDKConstants . param_signature . equals ( en . getKey ( ) . trim ( ) ) ) { continue ; } tree . put ( en . getKey ( ) , en . getValue ( ) ) ; } it = tree . entrySet ( ) . iterator ( ) ; StringBuffer sf = new StringBuffer ( ) ; while ( it . hasNext ( ) ) { Entry < String , String > en = it . next ( ) ; sf . append ( en . getKey ( ) + SDKConstants . EQUAL + en . getValue ( ) + SDKConstants . AMPERSAND ) ; } return sf . substring ( 0 , sf . length ( ) - 1 ) ;
public class appfwconfidfield { /** * Use this API to delete appfwconfidfield resources of given names . */ public static base_responses delete ( nitro_service client , String fieldname [ ] ) throws Exception { } }
base_responses result = null ; if ( fieldname != null && fieldname . length > 0 ) { appfwconfidfield deleteresources [ ] = new appfwconfidfield [ fieldname . length ] ; for ( int i = 0 ; i < fieldname . length ; i ++ ) { deleteresources [ i ] = new appfwconfidfield ( ) ; deleteresources [ i ] . fieldname = fieldname [ i ] ; } result = delete_bulk_request ( client , deleteresources ) ; } return result ;
public class BoofConcurrency { /** * Automatically breaks the problem up into blocks based on the number of threads available . It is assumed * that there is some cost associated with processing a block and the number of blocks is minimized . * Examples : * < ul > * < li > Given a range of 0 to 100 , and minBlock is 5 , and 10 threads . Blocks will be size 10 . < / li > * < li > Given a range of 0 to 100 , and minBlock is 20 , and 10 threads . Blocks will be size 20 . < / li > * < li > Given a range of 0 to 100 , and minBlock is 15 , and 10 threads . Blocks will be size 16 and 20 . < / li > * < li > Given a range of 0 to 100 , and minBlock is 80 , and 10 threads . Blocks will be size 100 . < / li > * < / ul > * @ param start First index , inclusive * @ param endExclusive Last index , exclusive * @ param minBlock Minimum size of a block * @ param consumer The consumer */ public static void loopBlocks ( int start , int endExclusive , int minBlock , IntRangeConsumer consumer ) { } }
final ForkJoinPool pool = BoofConcurrency . pool ; int numThreads = pool . getParallelism ( ) ; int range = endExclusive - start ; if ( range == 0 ) // nothing to do here ! return ; if ( range < 0 ) throw new IllegalArgumentException ( "end must be more than start. " + start + " -> " + endExclusive ) ; int block = selectBlockSize ( range , minBlock , numThreads ) ; try { pool . submit ( new IntRangeTask ( start , endExclusive , block , consumer ) ) . get ( ) ; } catch ( InterruptedException | ExecutionException e ) { e . printStackTrace ( ) ; }
public class Property_Builder { /** * Copies values from { @ code template } , appending to collections , and skipping empty optionals and * unset properties . * @ return this { @ code Builder } object */ public org . inferred . freebuilder . processor . property . Property . Builder mergeFrom ( org . inferred . freebuilder . processor . property . Property . Builder template ) { } }
// Upcast to access private fields ; otherwise , oddly , we get an access violation . Property_Builder base = template ; Property_Builder defaults = new org . inferred . freebuilder . processor . property . Property . Builder ( ) ; if ( ! base . _unsetProperties . contains ( Property . TYPE ) && ( defaults . _unsetProperties . contains ( Property . TYPE ) || ! Objects . equals ( template . getType ( ) , defaults . getType ( ) ) ) ) { setType ( template . getType ( ) ) ; } template . getBoxedType ( ) . ifPresent ( this :: setBoxedType ) ; if ( ! base . _unsetProperties . contains ( Property . NAME ) && ( defaults . _unsetProperties . contains ( Property . NAME ) || ! Objects . equals ( template . getName ( ) , defaults . getName ( ) ) ) ) { setName ( template . getName ( ) ) ; } if ( ! base . _unsetProperties . contains ( Property . CAPITALIZED_NAME ) && ( defaults . _unsetProperties . contains ( Property . CAPITALIZED_NAME ) || ! Objects . equals ( template . getCapitalizedName ( ) , defaults . getCapitalizedName ( ) ) ) ) { setCapitalizedName ( template . getCapitalizedName ( ) ) ; } if ( ! base . _unsetProperties . contains ( Property . ALL_CAPS_NAME ) && ( defaults . _unsetProperties . contains ( Property . ALL_CAPS_NAME ) || ! Objects . equals ( template . getAllCapsName ( ) , defaults . getAllCapsName ( ) ) ) ) { setAllCapsName ( template . getAllCapsName ( ) ) ; } if ( ! base . _unsetProperties . contains ( Property . USING_BEAN_CONVENTION ) && ( defaults . _unsetProperties . contains ( Property . USING_BEAN_CONVENTION ) || template . isUsingBeanConvention ( ) != defaults . isUsingBeanConvention ( ) ) ) { setUsingBeanConvention ( template . isUsingBeanConvention ( ) ) ; } if ( ! base . _unsetProperties . contains ( Property . GETTER_NAME ) && ( defaults . _unsetProperties . contains ( Property . GETTER_NAME ) || ! Objects . equals ( template . getGetterName ( ) , defaults . getGetterName ( ) ) ) ) { setGetterName ( template . getGetterName ( ) ) ; } if ( ! base . _unsetProperties . contains ( Property . FULLY_CHECKED_CAST ) && ( defaults . _unsetProperties . contains ( Property . FULLY_CHECKED_CAST ) || template . isFullyCheckedCast ( ) != defaults . isFullyCheckedCast ( ) ) ) { setFullyCheckedCast ( template . isFullyCheckedCast ( ) ) ; } addAllAccessorAnnotations ( base . accessorAnnotations ) ; return ( org . inferred . freebuilder . processor . property . Property . Builder ) this ;
public class ContinuousExportDescription { /** * An object which describes how the data is stored . * < ul > * < li > * < code > databaseName < / code > - the name of the Glue database used to store the schema . * < / li > * < / ul > * @ param schemaStorageConfig * An object which describes how the data is stored . < / p > * < ul > * < li > * < code > databaseName < / code > - the name of the Glue database used to store the schema . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public ContinuousExportDescription withSchemaStorageConfig ( java . util . Map < String , String > schemaStorageConfig ) { } }
setSchemaStorageConfig ( schemaStorageConfig ) ; return this ;
public class CodeChunk { /** * Temporary method to ease migration to the CodeChunk DSL . * < p > Because of the recursive nature of the JS codegen system , it is generally not possible to * convert one codegen method at a time to use the CodeChunk DSL . However , the business logic * inside those methods can be migrated incrementally . Methods that do not yet use the CodeChunk * DSL can " unwrap " inputs using this method and " wrap " results using { @ link * CodeChunk # fromExpr ( JsExpr ) } . This is safe as long as each CodeChunk generated for production * code is { @ link Expression # isRepresentableAsSingleExpression } . * < p > TODO ( b / 32224284 ) : remove . */ public final JsExpr assertExpr ( ) { } }
RequiresCollector . IntoImmutableSet collector = new RequiresCollector . IntoImmutableSet ( ) ; JsExpr expr = assertExprAndCollectRequires ( collector ) ; ImmutableSet < GoogRequire > requires = collector . get ( ) ; if ( ! requires . isEmpty ( ) ) { throw new IllegalStateException ( "calling assertExpr() would drop requires!: " + requires ) ; } return expr ;
public class Algorithm { /** * Calls the Algorithmia API for a given input . * Attempts to automatically serialize the input to JSON . * @ param input algorithm input , will automatically be converted into JSON * @ return algorithm result ( AlgoSuccess or AlgoFailure ) * @ throws APIException if there is a problem communication with the Algorithmia API . */ public AlgoResponse pipe ( Object input ) throws APIException { } }
if ( input instanceof String ) { return pipeRequest ( ( String ) input , ContentType . Text ) ; } else if ( input instanceof byte [ ] ) { return pipeBinaryRequest ( ( byte [ ] ) input ) ; } else { return pipeRequest ( gson . toJsonTree ( input ) . toString ( ) , ContentType . Json ) ; }
public class Path { /** * Initializes a path object given the scheme , authority and path string . * @ param scheme * the scheme string . * @ param authority * the authority string . * @ param path * the path string . */ private void initialize ( String scheme , String authority , String path ) { } }
try { this . uri = new URI ( scheme , authority , normalizePath ( path ) , null , null ) . normalize ( ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; }
public class RestCallbackBuilder { /** * create rest callback implementation . * @ param pcallbackOnSuccess on success callback * @ param pcallbackOnFailure on failure callback * @ param < R > rest result type * @ return RestCallbackImpl */ public static < R > AsyncCallback < R > build ( final AsyncCallbackOnSuccess < R > pcallbackOnSuccess , final AsyncCallbackOnFailure pcallbackOnFailure ) { } }
return new AsyncCallback < R > ( ) { @ Override public void onFailure ( final Throwable pcaught ) { pcallbackOnFailure . onFailure ( pcaught ) ; } @ Override public void onSuccess ( final R presult ) { pcallbackOnSuccess . onSuccess ( presult ) ; } } ;
public class MultiChangeBuilder { /** * Inserts the given rich - text content at the given position . * @ param position The position to insert the text . * @ param document The rich - text content to insert . */ public MultiChangeBuilder < PS , SEG , S > insert ( int position , StyledDocument < PS , SEG , S > document ) { } }
return replace ( position , position , document ) ;
public class CookieUtil { /** * US - ASCII characters excluding CTLs , whitespace , DQUOTE , comma , semicolon , and backslash */ private static BitSet validCookieValueOctets ( ) { } }
BitSet bits = new BitSet ( ) ; bits . set ( 0x21 ) ; for ( int i = 0x23 ; i <= 0x2B ; i ++ ) { bits . set ( i ) ; } for ( int i = 0x2D ; i <= 0x3A ; i ++ ) { bits . set ( i ) ; } for ( int i = 0x3C ; i <= 0x5B ; i ++ ) { bits . set ( i ) ; } for ( int i = 0x5D ; i <= 0x7E ; i ++ ) { bits . set ( i ) ; } return bits ;
public class PrimitiveIntegerArrayJsonDeserializer { /** * { @ inheritDoc } */ @ Override public int [ ] doDeserializeArray ( JsonReader reader , JsonDeserializationContext ctx , JsonDeserializerParameters params ) { } }
FastArrayInteger jsArray = new FastArrayInteger ( ) ; reader . beginArray ( ) ; while ( JsonToken . END_ARRAY != reader . peek ( ) ) { if ( JsonToken . NULL == reader . peek ( ) ) { reader . skipValue ( ) ; jsArray . push ( DEFAULT ) ; } else { jsArray . push ( reader . nextInt ( ) ) ; } } reader . endArray ( ) ; return jsArray . reinterpretCast ( ) ;
public class PdfBoxGraphics2DFontTextDrawerDefaultFonts { /** * Get a PDType1Font . TIMES - variant , which matches the given font * @ param font * Font to get the styles from * @ return a PDFont Times variant which matches the style in the given Font * object . */ public static PDFont chooseMatchingTimes ( Font font ) { } }
if ( ( font . getStyle ( ) & ( Font . ITALIC | Font . BOLD ) ) == ( Font . ITALIC | Font . BOLD ) ) return PDType1Font . TIMES_BOLD_ITALIC ; if ( ( font . getStyle ( ) & Font . ITALIC ) == Font . ITALIC ) return PDType1Font . TIMES_ITALIC ; if ( ( font . getStyle ( ) & Font . BOLD ) == Font . BOLD ) return PDType1Font . TIMES_BOLD ; return PDType1Font . TIMES_ROMAN ;
public class RemoteAdministrationThread { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . utils . concurrent . SynchronizableThread # run ( ) */ @ Override public void run ( ) { } }
log . info ( "Starting remote administration thread ..." ) ; try { LocalQueue inputQueue = engine . getLocalQueue ( FFMQConstants . ADM_REQUEST_QUEUE ) ; LocalQueue outputQueue = engine . getLocalQueue ( FFMQConstants . ADM_REPLY_QUEUE ) ; conn = new LocalQueueConnection ( engine , null , null ) ; session = conn . createQueueSession ( true , Session . SESSION_TRANSACTED ) ; receiver = session . createReceiver ( inputQueue ) ; sender = session . createSender ( outputQueue ) ; conn . start ( ) ; // Flush input queue on startup inputQueue . purge ( null ) ; outputQueue . purge ( null ) ; // Enter listening loop notifyStartup ( ) ; while ( ! stopRequired ) { Message message = receiver . receive ( ) ; if ( message == null ) break ; // Interrupted log . debug ( "Received message " + message ) ; try { // Process the command String errorMsg = process ( message ) ; // Build response message Message response = session . createMessage ( ) ; response . setJMSCorrelationID ( message . getJMSMessageID ( ) ) ; if ( errorMsg != null ) response . setStringProperty ( FFMQAdminConstants . ADM_HEADER_ERRMSG , errorMsg ) ; sender . send ( response , DeliveryMode . NON_PERSISTENT , Message . DEFAULT_PRIORITY , Message . DEFAULT_TIME_TO_LIVE ) ; } catch ( JMSException e ) { log . error ( "Cannot process admin command" , e ) ; } finally { session . commit ( ) ; } } log . debug ( "Remote administration thread has stopped" ) ; } catch ( Throwable e ) { log . fatal ( "Administration thread failed" , e ) ; notifyStartup ( ) ; } finally { try { if ( sender != null ) sender . close ( ) ; } catch ( JMSException e ) { ErrorTools . log ( e , log ) ; } try { if ( receiver != null ) receiver . close ( ) ; } catch ( JMSException e ) { ErrorTools . log ( e , log ) ; } try { if ( session != null ) session . close ( ) ; } catch ( JMSException e ) { ErrorTools . log ( e , log ) ; } try { if ( conn != null ) conn . close ( ) ; } catch ( JMSException e ) { ErrorTools . log ( e , log ) ; } }
public class CommercePriceListUserSegmentEntryRelPersistenceImpl { /** * Returns all the commerce price list user segment entry rels where commercePriceListId = & # 63 ; . * @ param commercePriceListId the commerce price list ID * @ return the matching commerce price list user segment entry rels */ @ Override public List < CommercePriceListUserSegmentEntryRel > findByCommercePriceListId ( long commercePriceListId ) { } }
return findByCommercePriceListId ( commercePriceListId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class CommercePriceListAccountRelServiceBaseImpl { /** * Sets the commerce tier price entry remote service . * @ param commerceTierPriceEntryService the commerce tier price entry remote service */ public void setCommerceTierPriceEntryService ( com . liferay . commerce . price . list . service . CommerceTierPriceEntryService commerceTierPriceEntryService ) { } }
this . commerceTierPriceEntryService = commerceTierPriceEntryService ;
public class PhpWebAppSshDriver { /** * TODO modify this script using new script functionality */ private int installGit ( ) { } }
int resultOfCommand ; log . info ( "Installing git {}" , new Object [ ] { this } ) ; resultOfCommand = getMachine ( ) . execCommands ( "install Git" , ImmutableList . of ( "sudo apt-get -y install git" ) ) ; if ( resultOfCommand != 0 ) log . warn ( "Installing problem installing result {}" , resultOfCommand ) ; return resultOfCommand ;
public class CommonOps_DSCC { /** * This computes the trace of the matrix : < br > * < br > * trace = & sum ; < sub > i = 1 : n < / sub > { a < sub > ii < / sub > } < br > * where n = min ( numRows , numCols ) * @ param A ( Input ) Matrix . Not modified . */ public static double trace ( DMatrixSparseCSC A ) { } }
double output = 0 ; int o = Math . min ( A . numCols , A . numRows ) ; for ( int col = 0 ; col < o ; col ++ ) { int idx0 = A . col_idx [ col ] ; int idx1 = A . col_idx [ col + 1 ] ; for ( int i = idx0 ; i < idx1 ; i ++ ) { if ( A . nz_rows [ i ] == col ) { output += A . nz_values [ i ] ; break ; } } } return output ;
public class GetMapConfigurationCommand { /** * Clone a { @ link org . geomajas . configuration . LayerInfo # extraInfo } considering what may be copied to the client . * @ param extraInfoMap map of extra info * @ return cloned copy including only records which are not { @ link ServerSideOnlyInfo } */ public Map < String , LayerExtraInfo > securityCloneLayerExtraInfo ( Map < String , LayerExtraInfo > extraInfoMap ) { } }
Map < String , LayerExtraInfo > res = new HashMap < String , LayerExtraInfo > ( ) ; for ( Map . Entry < String , LayerExtraInfo > entry : extraInfoMap . entrySet ( ) ) { LayerExtraInfo value = entry . getValue ( ) ; if ( ! ( value instanceof ServerSideOnlyInfo ) ) { res . put ( entry . getKey ( ) , value ) ; } } return res ;
public class PcTargetHostsUtils { /** * Gets the node list from string line seperate or space seperate . * @ param listStr * the list str * @ param removeDuplicate * the remove duplicate * @ return the node list from string line seperate or space seperate */ public static List < String > getNodeListFromStringLineSeperateOrSpaceSeperate ( String listStr , boolean removeDuplicate ) { } }
List < String > nodes = new ArrayList < String > ( ) ; for ( String token : listStr . split ( "[\\r?\\n| +]+" ) ) { // 20131025 : fix if fqdn has space in the end . if ( token != null && ! token . trim ( ) . isEmpty ( ) ) { nodes . add ( token . trim ( ) ) ; } } if ( removeDuplicate ) { removeDuplicateNodeList ( nodes ) ; } logger . info ( "Target hosts size : " + nodes . size ( ) ) ; return nodes ;
public class HungarianAlgorithm { /** * Compute an initial feasible solution by assigning zero labels to the * workers and by assigning to each job a label equal to the minimum cost * among its incident edges . */ protected void computeInitialFeasibleSolution ( ) { } }
for ( int j = 0 ; j < dim ; j ++ ) { labelByJob [ j ] = Double . POSITIVE_INFINITY ; } for ( int w = 0 ; w < dim ; w ++ ) { for ( int j = 0 ; j < dim ; j ++ ) { if ( costMatrix [ w ] [ j ] < labelByJob [ j ] ) { labelByJob [ j ] = costMatrix [ w ] [ j ] ; } } }
public class QueryContext { /** * Matches an index for the given pattern and match hint . * @ param pattern the pattern to match an index for . May be either an * attribute name or an exact index name . * @ param matchHint the match hint . * @ return the matched index or { @ code null } if nothing matched . * @ see QueryContext . IndexMatchHint */ public Index matchIndex ( String pattern , IndexMatchHint matchHint ) { } }
return indexes . matchIndex ( pattern , matchHint ) ;
public class DefaultBitmapLruCache { /** * Measure item size in kilobytes rather than units which is more practical * for a bitmap cache */ @ Override protected int sizeOf ( String key , Bitmap value ) { } }
final int bitmapSize = BitmapLruPool . getBitmapSize ( value ) ; return bitmapSize == 0 ? 1 : bitmapSize ;
public class VisibleBufferedInputStream { /** * { @ inheritDoc } */ public int read ( byte [ ] to , int off , int len ) throws IOException { } }
if ( ( off | len | ( off + len ) | ( to . length - ( off + len ) ) ) < 0 ) { throw new IndexOutOfBoundsException ( ) ; } else if ( len == 0 ) { return 0 ; } // if the read would go to wrapped stream , but would result // in a small read then try read to the buffer instead int avail = endIndex - index ; if ( len - avail < MINIMUM_READ ) { ensureBytes ( len ) ; avail = endIndex - index ; } // first copy from buffer if ( avail > 0 ) { if ( len <= avail ) { System . arraycopy ( buffer , index , to , off , len ) ; index += len ; return len ; } System . arraycopy ( buffer , index , to , off , avail ) ; len -= avail ; off += avail ; } int read = avail ; // good place to reset index because the buffer is fully drained index = 0 ; endIndex = 0 ; // then directly from wrapped stream do { int r = wrapped . read ( to , off , len ) ; if ( r <= 0 ) { return ( read == 0 ) ? r : read ; } read += r ; off += r ; len -= r ; } while ( len > 0 ) ; return read ;
public class InternalCallContextFactory { /** * Create an internal call callcontext from a call callcontext , and retrieving the account _ record _ id from another table * @ param objectId the id of the row in the table pointed by object type where to look for account _ record _ id * @ param objectType the object type pointed by this objectId * @ param context original call callcontext * @ return internal call callcontext from callcontext , with a non null account _ record _ id ( if found ) */ public InternalCallContext createInternalCallContext ( final UUID objectId , final ObjectType objectType , final CallContext context ) { } }
// The callcontext may come from a user API - for security , check we ' re not doing cross - tenants operations // final Long tenantRecordIdFromObject = retrieveTenantRecordIdFromObject ( objectId , objectType ) ; // final Long tenantRecordIdFromContext = getTenantRecordIdSafe ( callcontext ) ; // Preconditions . checkState ( tenantRecordIdFromContext . equals ( tenantRecordIdFromObject ) , // " tenant of the pointed object ( % s ) and the callcontext ( % s ) don ' t match ! " , tenantRecordIdFromObject , tenantRecordIdFromContext ) ; final Long tenantRecordId = getTenantRecordIdSafe ( context ) ; final Long accountRecordId = getAccountRecordIdSafe ( objectId , objectType , context ) ; return createInternalCallContext ( tenantRecordId , accountRecordId , context . getUserName ( ) , context . getCallOrigin ( ) , context . getUserType ( ) , context . getUserToken ( ) , context . getReasonCode ( ) , context . getComments ( ) , context . getCreatedDate ( ) , context . getUpdatedDate ( ) ) ;
public class FrameworkManager { /** * Create and launch the OSGi framework * @ param config * BootstrapConfig object encapsulating active initial framework * properties * @ param logProvider * The initialized / active log provider that must be included in * framework management activities ( start / stop / . . ) , or null * @ param callback */ public void launchFramework ( BootstrapConfig config , LogProvider logProvider ) { } }
if ( config == null ) throw new IllegalArgumentException ( "bootstrap config must not be null" ) ; boolean isClient = config . getProcessType ( ) . equals ( BootstrapConstants . LOC_PROCESS_TYPE_CLIENT ) ; try { String nTime = config . get ( BootstrapConstants . LAUNCH_TIME ) ; startTime = nTime == null ? System . nanoTime ( ) : Long . parseLong ( nTime ) ; if ( isClient ) { Tr . audit ( tc , "audit.launchTime.client" , config . getProcessName ( ) ) ; } else { Tr . audit ( tc , "audit.launchTime" , config . getProcessName ( ) ) ; } outputLicenseRestrictionMessage ( ) ; outputEmbeddedProductExtensions ( ) ; outputEnvironmentVariableProductExtensions ( ) ; // Save the bootstrap config locally this . config = config ; boolean j2secManager = false ; if ( config . get ( BootstrapConstants . JAVA_2_SECURITY_PROPERTY ) != null ) { j2secManager = true ; } String j2secNoRethrow = config . get ( BootstrapConstants . JAVA_2_SECURITY_NORETHROW ) ; if ( j2secManager ) { if ( j2secNoRethrow == null || j2secNoRethrow . equals ( "false" ) ) { try { AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < Void > ( ) { @ Override public Void run ( ) throws Exception { System . setSecurityManager ( new SecurityManager ( ) ) ; return null ; } } ) ; } catch ( Exception ex ) { Tr . error ( tc , "error.set.securitymanager" , ex . getMessage ( ) ) ; } } else { if ( "true" . equals ( config . get ( BootstrapConstants . JAVA_2_SECURITY_UNIQUE ) ) ) MissingDoPrivDetectionSecurityManager . setUniqueOnly ( true ) ; try { AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < Void > ( ) { @ Override public Void run ( ) throws Exception { System . setSecurityManager ( new MissingDoPrivDetectionSecurityManager ( ) ) ; return null ; } } ) ; } catch ( Exception ex ) { Tr . error ( tc , "error.set.trace.securitymanager" , ex . getMessage ( ) ) ; } } Tr . info ( tc , "info.java2security.started" , config . getProcessName ( ) ) ; } // Start the framework . Framework fwk = startFramework ( config ) ; if ( fwk == null ) { Tr . error ( tc , "error.unableToLaunch" ) ; return ; } // Set the framework variables only if everything succeeded . systemBundleCtx = fwk . getBundleContext ( ) ; framework = fwk ; } catch ( BundleException e ) { throw new RuntimeException ( e ) ; } finally { // If an error occurred , notify anyone that was waiting for the // framework so they know it ' s not coming . frameworkLatch . countDown ( ) ; try { if ( framework != null ) { try { addShutdownHook ( isClient ) ; startServerCommandListener ( ) ; innerLaunchFramework ( isClient ) ; // Indicate that kernel has been started Tr . info ( tc , "audit.kernelStartTime" , getElapsedTime ( false ) ) ; frameworkLaunchSuccess = true ; } finally { // If an error occurred , notify anyone that was waiting // for launch so they know we ' re done . frameworkLaunched . countDown ( ) ; try { if ( ! frameworkLaunchSuccess ) { stopFramework ( ) ; } else if ( isClient ) { try { if ( waitForReady ( ) ) { launchClient ( ) ; } } catch ( InterruptedException e ) { // Ignore } catch ( Throwable t ) { throw new ClientRunnerException ( "Error while executing running the application" , BootstrapConstants . messages . getString ( "error.client.runner" ) , t ) ; } finally { stopFramework ( ) ; } } } finally { // Run the server : wait indefinitely until framework stop . // ( It might have been stopped above if an error occurred . ) waitForFrameworkStop ( ) ; // Remove the shutdown hook in case someone stopped the OSGi // framework without calling our shutdownFramework ( ) method . removeShutdownHook ( ) ; // Close the command listener port , and stop any of its threads . if ( sc != null ) { sc . close ( ) ; } if ( frameworkLaunchSuccess ) { if ( isClient ) { Tr . audit ( tc , "audit.kernelUpTime.client" , config . getProcessName ( ) , getElapsedTime ( true ) ) ; } else { Tr . audit ( tc , "audit.kernelUpTime" , config . getProcessName ( ) , getElapsedTime ( true ) ) ; } } } } } } finally { // Stop the log provider . if ( logProvider != null ) { logProvider . stop ( ) ; } // Finally , notify any waiters that the kernel has been shutdown . // This is done after stopping the log provider so that logs are // flushed in case the shutdown hook will immediately exit the JVM . frameworkShutdownLatch . countDown ( ) ; } }
public class SocketFactory { /** * On - demand creation of an SSL server socket factory for an ssl alias * @ return The SSLServerSocketFactory this connection should be using to create * secure connections . * @ throws java . io . IOException if we can ' t get a server socket factory */ private SSLServerSocketFactory getServerSocketFactory ( String id ) throws IOException { } }
// first use ? SSLServerSocketFactory serverSocketFactory = serverSocketFactoryMap . get ( id ) ; if ( serverSocketFactory == null ) { // the SSLConfig is optional , so if it ' s not there , use the default SSLSocketFactory . if ( id == null ) { serverSocketFactory = ( SSLServerSocketFactory ) SSLServerSocketFactory . getDefault ( ) ; } else { try { serverSocketFactory = sslConfig . createSSLServerFactory ( id ) ; } catch ( Exception e ) { Tr . error ( tc , "Unable to create server SSL socket factory" , e ) ; throw ( IOException ) new IOException ( "Unable to create server SSL socket factory: " + e . getMessage ( ) ) . initCause ( e ) ; } serverSocketFactoryMap . put ( id , serverSocketFactory ) ; } // There ' s a bit of a timing problem with server - side ORBs . Part of the ORB shutdown is to // establish a self - connection to shutdown the acceptor threads . This requires a client // SSL socket factory . Unfortunately , if this is occurring during server shutdown , the // FileKeystoreManager will get a NullPointerException because some name queries fail because // things are getting shutdown . Therefore , if we need the server factory , assume we ' ll also // need the client factory to shutdown , and request it now . getSocketFactory ( id ) ; } return serverSocketFactory ;
public class SemanticHeadFinder { /** * Determine which daughter of the current parse tree is the * head . It assumes that the daughters already have had their * heads determined . Uses special rule for VP heads * @ param t The parse tree to examine the daughters of . * This is assumed to never be a leaf * @ return The parse tree that is the head */ @ Override protected Tree determineNonTrivialHead ( Tree t , Tree parent ) { } }
String motherCat = tlp . basicCategory ( t . label ( ) . value ( ) ) ; if ( DEBUG ) { System . err . println ( "At " + motherCat + ", my parent is " + parent ) ; } // do VPs with auxiliary as special case if ( ( motherCat . equals ( "VP" ) || motherCat . equals ( "SQ" ) || motherCat . equals ( "SINV" ) ) ) { Tree [ ] kids = t . children ( ) ; // try to find if there is an auxiliary verb if ( DEBUG ) { System . err . println ( "Semantic head finder: at VP" ) ; System . err . println ( "Class is " + t . getClass ( ) . getName ( ) ) ; t . pennPrint ( System . err ) ; // System . err . println ( " hasVerbalAuxiliary = " + hasVerbalAuxiliary ( kids , verbalAuxiliaries ) ) ; } // looks for auxiliaries if ( hasVerbalAuxiliary ( kids , verbalAuxiliaries ) || hasPassiveProgressiveAuxiliary ( kids , passiveAuxiliaries ) ) { // String [ ] how = new String [ ] { " left " , " VP " , " ADJP " , " NP " } ; // Including NP etc seems okay for copular sentences but is // problematic for other auxiliaries , like ' he has an answer ' // But maybe doing ADJP is fine ! String [ ] how = { "left" , "VP" , "ADJP" } ; Tree pti = traverseLocate ( kids , how , false ) ; if ( DEBUG ) { System . err . println ( "Determined head (case 1) for " + t . value ( ) + " is: " + pti ) ; } if ( pti != null ) { return pti ; } else { // System . err . println ( " - - - - - " ) ; // System . err . println ( " SemanticHeadFinder failed to reassign head for " ) ; // t . pennPrint ( System . err ) ; // System . err . println ( " - - - - - " ) ; } } // looks for copular verbs if ( hasVerbalAuxiliary ( kids , copulars ) && ! isExistential ( t , parent ) && ! isWHQ ( t , parent ) ) { String [ ] how ; if ( motherCat . equals ( "SQ" ) ) { how = new String [ ] { "right" , "VP" , "ADJP" , "NP" , "WHADJP" , "WHNP" } ; } else { how = new String [ ] { "left" , "VP" , "ADJP" , "NP" , "WHADJP" , "WHNP" } ; } Tree pti = traverseLocate ( kids , how , false ) ; // don ' t allow a temporal to become head if ( pti != null && pti . label ( ) != null && pti . label ( ) . value ( ) . contains ( "-TMP" ) ) { pti = null ; } // In SQ , only allow an NP to become head if there is another one to the left ( then it ' s probably predicative ) if ( motherCat . equals ( "SQ" ) && pti != null && pti . label ( ) != null && pti . label ( ) . value ( ) . startsWith ( "NP" ) ) { boolean foundAnotherNp = false ; for ( Tree kid : kids ) { if ( kid == pti ) { break ; } else if ( kid . label ( ) != null && kid . label ( ) . value ( ) . startsWith ( "NP" ) ) { foundAnotherNp = true ; break ; } } if ( ! foundAnotherNp ) { pti = null ; } } if ( DEBUG ) { System . err . println ( "Determined head (case 2) for " + t . value ( ) + " is: " + pti ) ; } if ( pti != null ) { return pti ; } else { if ( DEBUG ) { System . err . println ( "------" ) ; System . err . println ( "SemanticHeadFinder failed to reassign head for" ) ; t . pennPrint ( System . err ) ; System . err . println ( "------" ) ; } } } } Tree hd = super . determineNonTrivialHead ( t , parent ) ; // Heuristically repair punctuation heads Tree [ ] hdChildren = hd . children ( ) ; if ( hdChildren != null && hdChildren . length > 0 && hdChildren [ 0 ] . isLeaf ( ) ) { if ( tlp . isPunctuationWord ( hdChildren [ 0 ] . label ( ) . value ( ) ) ) { Tree [ ] tChildren = t . children ( ) ; if ( DEBUG ) { System . err . printf ( "head is punct: %s\n" , hdChildren [ 0 ] . label ( ) ) ; } for ( int i = tChildren . length - 1 ; i >= 0 ; i -- ) { if ( ! tlp . isPunctuationWord ( tChildren [ i ] . children ( ) [ 0 ] . label ( ) . value ( ) ) ) { hd = tChildren [ i ] ; if ( DEBUG ) { System . err . printf ( "New head: %s %s" , hd . label ( ) , hd . children ( ) [ 0 ] . label ( ) ) ; } break ; } } } } if ( DEBUG ) { System . err . println ( "Determined head (case 3) for " + t . value ( ) + " is: " + hd ) ; } return hd ;
public class DescribeDirectoriesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeDirectoriesRequest describeDirectoriesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeDirectoriesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeDirectoriesRequest . getDirectoryIds ( ) , DIRECTORYIDS_BINDING ) ; protocolMarshaller . marshall ( describeDirectoriesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeDirectoriesRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Initialization { /** * Find all the hadoop dependencies that should be loaded by druid * @ param hadoopDependencyCoordinates e . g . [ " org . apache . hadoop : hadoop - client : 2.3.0 " ] * @ param extensionsConfig ExtensionsConfig configured by druid . extensions . xxx * @ return an array of hadoop dependency files that will be loaded by druid process */ public static File [ ] getHadoopDependencyFilesToLoad ( List < String > hadoopDependencyCoordinates , ExtensionsConfig extensionsConfig ) { } }
final File rootHadoopDependenciesDir = new File ( extensionsConfig . getHadoopDependenciesDir ( ) ) ; if ( rootHadoopDependenciesDir . exists ( ) && ! rootHadoopDependenciesDir . isDirectory ( ) ) { throw new ISE ( "Root Hadoop dependencies directory [%s] is not a directory!?" , rootHadoopDependenciesDir ) ; } final File [ ] hadoopDependenciesToLoad = new File [ hadoopDependencyCoordinates . size ( ) ] ; int i = 0 ; for ( final String coordinate : hadoopDependencyCoordinates ) { final DefaultArtifact artifact = new DefaultArtifact ( coordinate ) ; final File hadoopDependencyDir = new File ( rootHadoopDependenciesDir , artifact . getArtifactId ( ) ) ; final File versionDir = new File ( hadoopDependencyDir , artifact . getVersion ( ) ) ; // find the hadoop dependency with the version specified in coordinate if ( ! hadoopDependencyDir . isDirectory ( ) || ! versionDir . isDirectory ( ) ) { throw new ISE ( "Hadoop dependency [%s] didn't exist!?" , versionDir . getAbsolutePath ( ) ) ; } hadoopDependenciesToLoad [ i ++ ] = versionDir ; } return hadoopDependenciesToLoad ;
public class SdkUtils { /** * Utility method to write given inputStream to given outputStream and update the messageDigest while transferring the bytes * @ param inputStream the inputStream to copy from . * @ param outputStream the outputStream to write to . * @ param messageDigest messageDigest to update with the outpu * @ throws IOException thrown if there was a problem reading from inputStream or writing to outputStream . * @ throws InterruptedException thrown if the thread is interrupted which indicates cancelling . */ private static void copyStream ( final InputStream inputStream , final OutputStream outputStream , MessageDigest messageDigest ) throws IOException , InterruptedException { } }
// Read the rest of the stream and write to the destination OutputStream . final byte [ ] buffer = new byte [ BUFFER_SIZE ] ; int bufferLength = 0 ; Exception exception = null ; try { while ( ( bufferLength = inputStream . read ( buffer ) ) > 0 ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { InterruptedException e = new InterruptedException ( ) ; throw e ; } outputStream . write ( buffer , 0 , bufferLength ) ; if ( messageDigest != null ) { messageDigest . update ( buffer , 0 , bufferLength ) ; } } } catch ( Exception e ) { exception = e ; if ( exception instanceof IOException ) { throw ( IOException ) e ; } if ( exception instanceof InterruptedException ) { throw ( InterruptedException ) e ; } } finally { // Try to flush the OutputStream if ( exception == null ) { outputStream . flush ( ) ; } }
public class Codec { /** * Initializes the { @ code Codec } with a dictionary that may contain specific * codec parameters . * @ param avDictionary dictionary with codec parameters . * @ return zero on success , a negative value on error . * @ throws JavaAVException if the codec could not be opened . */ int open ( AVDictionary avDictionary ) throws JavaAVException { } }
AVCodecContext avContext = avcodec_alloc_context3 ( avCodec ) ; return open ( avDictionary , avContext ) ;
public class SimpleElement { /** * @ param tagName the tag name * @ param value the value * @ return the current { @ link Builder } * @ deprecated will be removed in next version . Use { @ link ExtensionElements # simpleElement ( String , String ) } instead . */ @ Deprecated public static Builder builder ( String tagName , String value ) { } }
return ExtensionElements . simpleElement ( tagName , value ) ;
public class NodeReferenceFactory { /** * Returns a valid { @ link Noderef } based on the specified arguments or * < code > null < / null > if that ' s not possible . < br / > * < strong > This method returns < code > null < / code > if the pathref cannot be * resolved to a node on the specified layout . < / strong > It is the * responsibility of calling code to handle this case appropriately . * @ return a valid { @ link Noderef } or < code > null < / null > */ public Noderef getNoderefFromPathref ( String layoutOwner , String pathref , String fname , boolean isStructRef , org . dom4j . Element layoutElement ) { } }
Validate . notNull ( layoutOwner , "Argument 'layoutOwner' cannot be null." ) ; Validate . notNull ( pathref , "Argument 'pathref' cannot be null." ) ; if ( log . isTraceEnabled ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "getDlmNoderef: [layoutOwner='" ) . append ( layoutOwner ) . append ( "', pathref='" ) . append ( pathref ) . append ( "', fname='" ) . append ( fname ) . append ( "', isStructRef='" ) . append ( isStructRef ) . append ( "']" ) ; log . trace ( msg . toString ( ) ) ; log . trace ( "getDlmNoderef: user layout document follows...\n" + layoutElement . asXML ( ) ) ; } final String [ ] pathTokens = DLM_PATH_REF_DELIM . split ( pathref ) ; if ( pathTokens . length <= 1 ) { this . log . warn ( "Invalid DLM PathRef, no delimiter: " + pathref ) ; return null ; } if ( pathTokens [ 0 ] . equals ( layoutOwner ) ) { // This an internal reference ( our own layout ) ; we have to // use the layoutExment ( instead of load - limited - layout ) b / c // our layout may not be in the db . . . final org . dom4j . Element target = ( org . dom4j . Element ) layoutElement . selectSingleNode ( pathTokens [ 1 ] ) ; if ( target != null ) { return new Noderef ( target . valueOf ( "@ID" ) ) ; } this . log . warn ( "Unable to resolve pathref '" + pathref + "' for layoutOwner '" + layoutOwner + "'" ) ; return null ; } /* * We know this Noderef refers to a node on a DLM fragment */ final String layoutOwnerName = pathTokens [ 0 ] ; final String layoutPath = pathTokens [ 1 ] ; final Integer layoutOwnerUserId = this . userIdentityStore . getPortalUserId ( layoutOwnerName ) ; if ( layoutOwnerUserId == null ) { this . log . warn ( "Unable to resolve pathref '" + pathref + "' for layoutOwner '" + layoutOwner + "', no userId found for userName: " + layoutOwnerName ) ; return null ; } final Tuple < String , DistributedUserLayout > userLayoutInfo = getUserLayoutTuple ( layoutOwnerName , layoutOwnerUserId ) ; final Document userLayout = userLayoutInfo . second . getLayout ( ) ; final Node targetNode = this . xPathOperations . evaluate ( layoutPath , userLayout , XPathConstants . NODE ) ; if ( targetNode == null ) { this . log . warn ( "No layout node found for pathref: " + pathref ) ; return null ; } final NamedNodeMap attributes = targetNode . getAttributes ( ) ; if ( fname != null ) { final Node fnameAttr = attributes . getNamedItem ( "fname" ) ; if ( fnameAttr == null ) { this . log . warn ( "Layout node for pathref does not have fname attribute: " + pathref ) ; return null ; } final String nodeFname = fnameAttr . getTextContent ( ) ; if ( ! fname . equals ( nodeFname ) ) { this . log . warn ( "fname '" + nodeFname + "' on layout node not match specified fname '" + fname + "' for pathref: " + pathref ) ; return null ; } } final Node structIdAttr = attributes . getNamedItem ( "struct-id" ) ; if ( structIdAttr != null ) { final String structId = structIdAttr . getTextContent ( ) ; if ( isStructRef ) { return new Noderef ( layoutOwnerUserId , 1 /* TODO : remove hard - coded layoutId = 1 */ , "s" + structId ) ; } return new Noderef ( layoutOwnerUserId , 1 /* TODO : remove hard - coded layoutId = 1 */ , "n" + structId ) ; } final Node idAttr = attributes . getNamedItem ( "ID" ) ; return new Noderef ( layoutOwnerUserId , 1 /* TODO : remove hard - coded layoutId = 1 */ , idAttr . getTextContent ( ) ) ;
public class RxLifecycleAndroid { /** * Binds the given source to a Fragment lifecycle . * This helper automatically determines ( based on the lifecycle sequence itself ) when the source * should stop emitting items . In the case that the lifecycle sequence is in the * creation phase ( CREATE , START , etc ) it will choose the equivalent destructive phase ( DESTROY , * STOP , etc ) . If used in the destructive phase , the notifications will cease at the next event ; * for example , if used in PAUSE , it will unsubscribe in STOP . * Due to the differences between the Activity and Fragment lifecycles , this method should only * be used for a Fragment lifecycle . * @ param lifecycle the lifecycle sequence of a Fragment * @ return a reusable { @ link LifecycleTransformer } that unsubscribes the source during the Fragment lifecycle */ @ NonNull @ CheckResult public static < T > LifecycleTransformer < T > bindFragment ( @ NonNull final Observable < FragmentEvent > lifecycle ) { } }
return bind ( lifecycle , FRAGMENT_LIFECYCLE ) ;
public class TableUtils { /** * Creates a new { @ link ImmutableTable } from the provided { @ link com . google . common . collect . Table . Cell } s . * The iteration order of the resulting table will respect the iteration order of the input cells . * Null keys and values are forbidden . */ public static < R , C , V > ImmutableTable < R , C , V > copyOf ( Iterable < Table . Cell < R , C , V > > cells ) { } }
final ImmutableTable . Builder < R , C , V > ret = ImmutableTable . builder ( ) ; for ( final Table . Cell < R , C , V > cell : cells ) { ret . put ( cell . getRowKey ( ) , cell . getColumnKey ( ) , cell . getValue ( ) ) ; } return ret . build ( ) ;
public class FileLog { /** * Write a pre - formated record . This is used for when we have * signed and / or encrypted audit records . * @ param record */ public synchronized void writeRecord ( byte [ ] record , String header ) { } }
long length = record . length + nlen ; PrintStream ps = getPrintStream ( length , header ) ; if ( isWindows ) ps . write ( record , 0 , ( int ) length - 2 ) ; else ps . write ( record , 0 , ( int ) length - 1 ) ; ps . println ( ) ;
public class Interface { /** * Use this API to clear Interface resources . */ public static base_responses clear ( nitro_service client , Interface resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { Interface clearresources [ ] = new Interface [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { clearresources [ i ] = new Interface ( ) ; clearresources [ i ] . id = resources [ i ] . id ; } result = perform_operation_bulk_request ( client , clearresources , "clear" ) ; } return result ;
public class ConfigSiftOrientation { /** * Creates a configuration similar to how it was originally described in the paper */ public static ConfigSiftOrientation createPaper ( ) { } }
ConfigSiftOrientation config = new ConfigSiftOrientation ( ) ; config . histogramSize = 36 ; config . sigmaEnlarge = 1.5 ; return config ;
public class JarProbeOption { /** * Adds the given resources from the current class path to the JAR . * @ param resourcePaths * list of resource paths , relative to the class path root * @ return { @ code this } for fluent syntax */ public JarProbeOption resources ( String ... resourcePaths ) { } }
for ( String resource : resourcePaths ) { resources . add ( resource ) ; } return this ;
public class ResolvableType { /** * Return a { @ link ResolvableType } for the specified { @ link Class } , doing assignability checks against the raw class only * ( analogous to { @ link Class # isAssignableFrom } , which this serves as a wrapper for . For example : * { @ code ResolvableType . forClass ( MyArrayList . class ) } . * @ param sourceClass the source class ( { @ code null } is semantically equivalent to { @ code Object . class } for typical use * cases here } * @ return a { @ link ResolvableType } for the specified class * @ see # forClass ( Class ) * @ see # getRawClass ( ) */ public static ResolvableType forRawClass ( Class < ? > sourceClass ) { } }
return new ResolvableType ( sourceClass ) { @ Override public boolean isAssignableFrom ( Class < ? > other ) { return LettuceClassUtils . isAssignable ( getRawClass ( ) , other ) ; } } ;
public class Master { /** * Returns an iterator over the generators specified in the master file ; that * is , the parsed contents of $ GENERATE statements . * @ see Generator */ public Iterator generators ( ) { } }
if ( generators != null ) return Collections . unmodifiableList ( generators ) . iterator ( ) ; else return Collections . EMPTY_LIST . iterator ( ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcConnectionPointEccentricity ( ) { } }
if ( ifcConnectionPointEccentricityEClass == null ) { ifcConnectionPointEccentricityEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 103 ) ; } return ifcConnectionPointEccentricityEClass ;
public class Dot { /** * Setting up layout dimensional parameters for the view */ private void setLayoutParameters ( ) { } }
final int dotDiameter = styledAttributes . getDimensionPixelOffset ( R . styleable . PinLock_statusDotDiameter , 50 ) ; final int margin = styledAttributes . getDimensionPixelOffset ( R . styleable . PinLock_statusDotSpacing , 30 ) ; final LinearLayout . LayoutParams params = new LinearLayout . LayoutParams ( dotDiameter , dotDiameter ) ; params . setMargins ( margin , 0 , margin , 0 ) ; setLayoutParams ( params ) ;
public class MoreLikeThis { /** * Return a query that will return docs like the passed URL . * @ return a query that will return docs like the passed URL . */ public Query like ( URL u ) throws IOException { } }
return like ( new InputStreamReader ( u . openConnection ( ) . getInputStream ( ) ) ) ;
public class DruidRunner { /** * Starts the Druid instance and returns the server it is running on * @ return server hosting Druid * @ throws Exception */ public Server run ( ) throws Exception { } }
if ( server == null ) { server = new Server ( port ) ; String basePath = "/druid" ; buildSwagger ( basePath ) ; HandlerList handlers = new HandlerList ( ) ; handlers . addHandler ( buildContext ( basePath ) ) ; server . setHandler ( handlers ) ; server . start ( ) ; } else { throw new IllegalStateException ( "Server already running" ) ; } return server ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "displayName" ) public JAXBElement < String > createDisplayName ( String value ) { } }
return new JAXBElement < String > ( _DisplayName_QNAME , String . class , null , value ) ;
public class DefaultHudsonClient { /** * does not save the auth user info and we need to add it back . */ public static String rebuildJobUrl ( String build , String server ) throws URISyntaxException , MalformedURLException , UnsupportedEncodingException { } }
URL instanceUrl = new URL ( server ) ; String userInfo = instanceUrl . getUserInfo ( ) ; String instanceProtocol = instanceUrl . getProtocol ( ) ; // decode to handle spaces in the job name . URL buildUrl = new URL ( URLDecoder . decode ( build , "UTF-8" ) ) ; String buildPath = buildUrl . getPath ( ) ; String host = buildUrl . getHost ( ) ; int port = buildUrl . getPort ( ) ; URI newUri = new URI ( instanceProtocol , userInfo , host , port , buildPath , null , null ) ; return newUri . toString ( ) ;
public class VasSubscriberServiceProxyFactory { /** * Creates a vas subscriber web service proxy based on the delivery address . * @ param deliveryAddress delivery address * @ return subscriber web service proxy */ public VasSubscriberService createVasSubscriberService ( String deliveryAddress ) { } }
JaxWsProxyFactoryBean factory = new JaxWsProxyFactoryBean ( ) ; factory . setServiceClass ( VasSubscriberService . class ) ; factory . setAddress ( deliveryAddress ) ; SoapBindingConfiguration conf = new SoapBindingConfiguration ( ) ; conf . setVersion ( Soap12 . getInstance ( ) ) ; factory . setBindingConfig ( conf ) ; factory . getFeatures ( ) . add ( new WSAddressingFeature ( ) ) ; VasSubscriberService vasSubscriberService = ( VasSubscriberService ) factory . create ( ) ; // Force the use of the Async transport , even for synchronous calls ( ( BindingProvider ) vasSubscriberService ) . getRequestContext ( ) . put ( "use.async.http.conduit" , Boolean . TRUE ) ; return vasSubscriberService ;
public class DeleteIdentitiesRequest { /** * A list of 1-60 identities that you want to delete . * @ param identityIdsToDelete * A list of 1-60 identities that you want to delete . */ public void setIdentityIdsToDelete ( java . util . Collection < String > identityIdsToDelete ) { } }
if ( identityIdsToDelete == null ) { this . identityIdsToDelete = null ; return ; } this . identityIdsToDelete = new java . util . ArrayList < String > ( identityIdsToDelete ) ;
public class SibRaManagedConnection { /** * Destroys this managed connection . Invalidates any current connection * handles and closes the core SPI connection . */ public void destroy ( ) throws ResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "destroy" ) ; } // In the event of an error the connection manager may call destroy // without calling cleanup . We need to do the cleanup logic without // calling cleanup as we could throw an exception from cleanup . // Invalidate any currently associated connections for ( Iterator iterator = _connections . iterator ( ) ; iterator . hasNext ( ) ; ) { final SibRaConnection connection = ( SibRaConnection ) iterator . next ( ) ; connection . invalidate ( ) ; } try { // Close the core connection // when destroy calls close connections then connection cannot be reset for future usage so force close by passing boolean true . - PM39926 _coreConnection . close ( true ) ; } catch ( final SIConnectionLostException exception ) { // No FFDC code needed // d352473 // We are remote to the ME and the ME connection has been lost // we shall surpress this exception as we want the destroy to complete } catch ( final SIConnectionDroppedException exception ) { // No FFDC code needed // d352473 // We are remote to the ME and the ME connection has been dropped // we shall surpress this exception as we want the destroy to complete } catch ( SIException exception ) { FFDCFilter . processException ( exception , "com.ibm.ws.sib.ra.impl.SibRaManagedConnection.destroy" , FFDC_PROBE_2 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw new ResourceException ( NLS . getFormattedMessage ( "CONNECTION_CLOSE_CWSIV0402" , new Object [ ] { exception } , null ) , exception ) ; } catch ( SIErrorException exception ) { FFDCFilter . processException ( exception , "com.ibm.ws.sib.ra.impl.SibRaManagedConnection.destroy" , FFDC_PROBE_8 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw new ResourceException ( NLS . getFormattedMessage ( "CONNECTION_CLOSE_CWSIV0402" , new Object [ ] { exception } , null ) , exception ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "destroy" ) ; }
public class CmsLoginManager { /** * Checks whether the given user has been inactive for longer than the configured limit . < p > * If no max inactivity time is configured , always returns false . * @ param user the user to check * @ return true if the user has been inactive for longer than the configured limit */ public boolean checkInactive ( CmsUser user ) { } }
if ( m_maxInactive == null ) { return false ; } try { long maxInactive = CmsStringUtil . parseDuration ( m_maxInactive , Long . MAX_VALUE ) ; return ( System . currentTimeMillis ( ) - user . getLastlogin ( ) ) > maxInactive ; } catch ( Exception e ) { LOG . warn ( e . getLocalizedMessage ( ) , e ) ; return false ; }
public class JsUtils { /** * Returns a QueryString representation of a JavascriptObject . * TODO : jquery implementation accepts a second parameter ( traditional ) */ public static String param ( JavaScriptObject js ) { } }
Properties prop = js . cast ( ) ; String ret = "" ; for ( String k : prop . keys ( ) ) { ret += ret . isEmpty ( ) ? "" : "&" ; JsCache o = prop . getArray ( k ) . cast ( ) ; if ( o != null ) { for ( int i = 0 , l = o . length ( ) ; i < l ; i ++ ) { ret += i > 0 ? "&" : "" ; Properties p = o . < JsCache > cast ( ) . getJavaScriptObject ( i ) ; if ( p != null ) { ret += k + "[]=" + p . toJsonString ( ) ; } else { ret += k + "[]=" + o . getString ( i ) ; } } } else { Properties p = prop . getJavaScriptObject ( k ) ; if ( p != null ) { ret += k + "=" + p . tostring ( ) ; } else { String v = prop . getStr ( k ) ; if ( v != null && ! v . isEmpty ( ) && ! "null" . equalsIgnoreCase ( v ) ) { ret += k + "=" + v ; } } } } return ret ;
public class Span { /** * Returns a Span that covers all rows beginning with a prefix String parameters will be encoded * as UTF - 8 */ public static Span prefix ( CharSequence rowPrefix ) { } }
Objects . requireNonNull ( rowPrefix ) ; return prefix ( Bytes . of ( rowPrefix ) ) ;
public class IdentifierType { /** * Sets the value of the provided property to the provided value . * @ param propName * allowed object is { @ link String } * @ param value * allowed object is { @ link Object } */ public void set ( String propName , Object value ) { } }
if ( propName . equals ( "uniqueId" ) ) { setUniqueId ( ( ( String ) value ) ) ; } if ( propName . equals ( "uniqueName" ) ) { setUniqueName ( ( ( String ) value ) ) ; } if ( propName . equals ( "externalId" ) ) { setExternalId ( ( ( String ) value ) ) ; } if ( propName . equals ( "externalName" ) ) { setExternalName ( ( ( String ) value ) ) ; } if ( propName . equals ( "repositoryId" ) ) { setRepositoryId ( ( ( String ) value ) ) ; }
public class CacheHeader { /** * Set headers to disallow caching in browser , proxy servers and dispatcher for the current response . * @ param response Current response */ public static void setNonCachingHeaders ( @ NotNull HttpServletResponse response ) { } }
response . setHeader ( HEADER_PRAGMA , "no-cache" ) ; response . setHeader ( HEADER_CACHE_CONTROL , "no-cache" ) ; response . setHeader ( HEADER_EXPIRES , "0" ) ; response . setHeader ( HEADER_DISPATCHER , "no-cache" ) ;
public class Matrices { /** * Returns a copied version of a given matrix . The returned matrix will * have the same dimensionality , values , and sparsity , but it may not have * the same exact sub - type . * @ param matrix the matrix to be copied * @ throws IllegalArgumentException when the dimensionality of matrix and * output do not match * @ return a copied version of matrix */ public static Matrix copy ( Matrix matrix ) { } }
Matrix copiedMatrix = null ; if ( matrix instanceof SparseMatrix ) copiedMatrix = Matrices . create ( matrix . rows ( ) , matrix . columns ( ) , Type . SPARSE_IN_MEMORY ) ; else copiedMatrix = Matrices . create ( matrix . rows ( ) , matrix . columns ( ) , Type . DENSE_IN_MEMORY ) ; return copyTo ( matrix , copiedMatrix ) ;