signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class GetCampaignTargetingCriteria { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session ) throws RemoteException { } }
// Get the CampaignService . CampaignCriterionServiceInterface campaignCriterionService = adWordsServices . get ( session , CampaignCriterionServiceInterface . class ) ; int offset = 0 ; // Create selector . SelectorBuilder builder = new SelectorBuilder ( ) ; Selector selector = builder . fields ( CampaignCriterionField . CampaignId , CampaignCriterionField . Id , CampaignCriterionField . CriteriaType , CampaignCriterionField . PlatformName , CampaignCriterionField . LanguageName , CampaignCriterionField . LocationName , CampaignCriterionField . KeywordText ) . in ( CampaignCriterionField . CriteriaType , "KEYWORD" , "LANGUAGE" , "LOCATION" , "PLATFORM" ) . offset ( 0 ) . limit ( PAGE_SIZE ) . build ( ) ; CampaignCriterionPage page = null ; do { page = campaignCriterionService . get ( selector ) ; if ( page . getEntries ( ) != null ) { // Display campaigns . for ( CampaignCriterion campaignCriterion : page . getEntries ( ) ) { System . out . printf ( "Campaign criterion with campaign ID %d, criterion ID %d, " + "and type '%s' was found.%n" , campaignCriterion . getCampaignId ( ) , campaignCriterion . getCriterion ( ) . getId ( ) , campaignCriterion . getCriterion ( ) . getCriterionType ( ) ) ; } } else { System . out . println ( "No campaign criteria were found." ) ; } offset += PAGE_SIZE ; selector = builder . increaseOffsetBy ( PAGE_SIZE ) . build ( ) ; } while ( offset < page . getTotalNumEntries ( ) ) ;
public class JavaTranscoder { /** * Deserialize given stream using java deserialization . * @ param in * data to deserialize * @ return deserialized object */ protected Object deserialize ( final byte [ ] in ) { } }
Object o = null ; ByteArrayInputStream bis = null ; ConfigurableObjectInputStream is = null ; try { if ( in != null ) { bis = new ByteArrayInputStream ( in ) ; is = new ConfigurableObjectInputStream ( bis , Thread . currentThread ( ) . getContextClassLoader ( ) ) ; o = is . readObject ( ) ; is . close ( ) ; bis . close ( ) ; } } catch ( IOException e ) { LOGGER . warn ( String . format ( "Caught IOException decoding %d bytes of data" , in . length ) , e ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( String . format ( "Caught CNFE decoding %d bytes of data" , in . length ) , e ) ; } finally { close ( is ) ; close ( bis ) ; } return o ;
public class QueryResult { /** * object and add it to the linked object cache for the owning object and link . */ private void parseLinkValue ( DBObject owningObj , UNode linkNode , FieldDefinition linkFieldDef ) { } }
// Prerequisites : assert owningObj != null ; assert linkNode != null ; assert linkFieldDef != null ; assert linkFieldDef . isLinkField ( ) ; TableDefinition tableDef = linkFieldDef . getTableDef ( ) ; // Value should be an array , though it could be a map with one child . Utils . require ( linkNode . isCollection ( ) , "Value of link field should be a collection: " + linkNode ) ; // Iterate through child nodes . TableDefinition extentTableDef = tableDef . getAppDef ( ) . getTableDef ( linkFieldDef . getLinkExtent ( ) ) ; for ( UNode childNode : linkNode . getMemberList ( ) ) { // Ensure this element is " doc " node . Utils . require ( childNode . getName ( ) . equals ( "doc" ) , "link field array values should be 'doc' objects: " + childNode ) ; // Recurse and build a DBObject from the doc node . DBObject linkedObject = parseObject ( extentTableDef , childNode ) ; // Add the linked object to the cache and add its object ID to the set . String objID = linkedObject . getObjectID ( ) ; cacheLinkedObject ( owningObj . getObjectID ( ) , linkFieldDef . getName ( ) , linkedObject ) ; owningObj . addFieldValues ( linkFieldDef . getName ( ) , Arrays . asList ( objID ) ) ; }
public class PtrCLog { /** * Send a VERBOSE log message . * @ param tag * @ param msg * @ param throwable */ public static void v ( String tag , String msg , Throwable throwable ) { } }
if ( sLevel > LEVEL_VERBOSE ) { return ; } Log . v ( tag , msg , throwable ) ;
public class DigitalSignature { /** * Verifies whether the given content matches the given signature . * @ param content The content to be verified . * @ param publicKey The public key to use in the verification process . * @ param signature The signature with which the content is to be verified . This can be obtained via * { @ link Keys # newKeyPair ( ) } . * @ return If the content matches the given signature , using the given key , true . */ public boolean verify ( String content , PublicKey publicKey , String signature ) { } }
byte [ ] bytes = content . getBytes ( StandardCharsets . UTF_8 ) ; InputStream input = new ByteArrayInputStream ( bytes ) ; return verify ( input , publicKey , signature ) ; // ByteArrayInputStream does not need to be closed .
public class RtfField { /** * Writes the end of the field result area * @ param result The < code > OutputStream < / code > to write to . * @ throws IOException on i / o errors . */ private void writeFieldResultEnd ( final OutputStream result ) throws IOException { } }
result . write ( DELIMITER ) ; result . write ( CLOSE_GROUP ) ;
public class GuiceDynamicProxyProvider { /** * A MethodHandler that proxies the Method invocation through to a Guice - acquired instance */ @ Override public Object invoke ( Object self , Method thisMethod , Method proceed , Object [ ] args ) throws Throwable { } }
// Get an instance of the implementing class via Guice final Object instance = registry . getInjector ( ) . getInstance ( clazz ) ; return thisMethod . invoke ( instance , args ) ;
public class OauthAPI { /** * 生成回调url , 这个结果要求用户在微信中打开 , 即可获得token , 并指向redirectUrl * @ param redirectUrl 用户自己设置的回调地址 * @ param scope 授权作用域 * @ param state 用户自带参数 * @ return 回调url , 用户在微信中打开即可开始授权 */ public String getOauthPageUrl ( String redirectUrl , OauthScope scope , String state ) { } }
BeanUtil . requireNonNull ( redirectUrl , "redirectUrl is null" ) ; BeanUtil . requireNonNull ( scope , "scope is null" ) ; String userState = StrUtil . isBlank ( state ) ? "STATE" : state ; String url = null ; try { url = URLEncoder . encode ( redirectUrl , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { LOG . error ( "异常" , e ) ; } StringBuilder stringBuilder = new StringBuilder ( "https://open.weixin.qq.com/connect/oauth2/authorize?" ) ; stringBuilder . append ( "appid=" ) . append ( this . config . getAppid ( ) ) . append ( "&redirect_uri=" ) . append ( url ) . append ( "&response_type=code&scope=" ) . append ( scope . toString ( ) ) . append ( "&state=" ) . append ( userState ) . append ( "#wechat_redirect" ) ; return stringBuilder . toString ( ) ;
public class RequestParam { /** * Returns a request parameter . < br > * In addition the method fixes problems with incorrect UTF - 8 characters returned by the servlet engine . * All character data is converted from ISO - 8859-1 to UTF - 8 if not ' _ charset _ ' parameter is provided . * @ param request Request . * @ param param Parameter name . * @ param defaultValue Default value . * @ return Parameter value or the default value if it is not set . */ public static @ Nullable String get ( @ NotNull ServletRequest request , @ NotNull String param , @ Nullable String defaultValue ) { } }
String value = request . getParameter ( param ) ; if ( value != null ) { // convert encoding to UTF - 8 if not form encoding parameter is set if ( ! hasFormEncodingParam ( request ) ) { value = convertISO88591toUTF8 ( value ) ; } return value ; } else { return defaultValue ; }
public class Vector3d { /** * Adds vector v1 to v2 and places the result in this vector . * @ param v1 * left - hand vector * @ param v2 * right - hand vector */ public void add ( Vector3d v1 , Vector3d v2 ) { } }
x = v1 . x + v2 . x ; y = v1 . y + v2 . y ; z = v1 . z + v2 . z ;
public class Etag { /** * 计算二进制数据的etag * @ param data 二进制数据 * @ param offset 起始字节索引 * @ param length 需要计算的字节长度 * @ return 二进制数据的etag */ public static String data ( byte [ ] data , int offset , int length ) { } }
InputStream in = null ; try { in = new ByteArrayInputStream ( data , offset , length ) ; return stream ( in , length ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } finally { if ( in != null ) { try { in . close ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } } // never reach return null ;
public class JwkDefinitionSource { /** * Fetches the JWK Set from the provided < code > URL < / code > and * returns a < code > Map < / code > keyed by the JWK keyId ( & quot ; kid & quot ; ) * and mapped to an association of the { @ link JwkDefinition } and { @ link SignatureVerifier } . * Uses a { @ link JwkSetConverter } to convert the JWK Set URL source to a set of { @ link JwkDefinition } ( s ) * followed by the instantiation of a { @ link SignatureVerifier } which is associated to it ' s { @ link JwkDefinition } . * @ param jwkSetUrl the JWK Set URL * @ return a < code > Map < / code > keyed by the JWK keyId and mapped to an association of { @ link JwkDefinition } and { @ link SignatureVerifier } * @ see JwkSetConverter */ static Map < String , JwkDefinitionHolder > loadJwkDefinitions ( URL jwkSetUrl ) { } }
InputStream jwkSetSource ; try { jwkSetSource = jwkSetUrl . openStream ( ) ; } catch ( IOException ex ) { throw new JwkException ( "An I/O error occurred while reading from the JWK Set source: " + ex . getMessage ( ) , ex ) ; } Set < JwkDefinition > jwkDefinitionSet = jwkSetConverter . convert ( jwkSetSource ) ; Map < String , JwkDefinitionHolder > jwkDefinitions = new LinkedHashMap < String , JwkDefinitionHolder > ( ) ; for ( JwkDefinition jwkDefinition : jwkDefinitionSet ) { if ( JwkDefinition . KeyType . RSA . equals ( jwkDefinition . getKeyType ( ) ) ) { jwkDefinitions . put ( jwkDefinition . getKeyId ( ) , new JwkDefinitionHolder ( jwkDefinition , createRsaVerifier ( ( RsaJwkDefinition ) jwkDefinition ) ) ) ; } else if ( JwkDefinition . KeyType . EC . equals ( jwkDefinition . getKeyType ( ) ) ) { jwkDefinitions . put ( jwkDefinition . getKeyId ( ) , new JwkDefinitionHolder ( jwkDefinition , createEcVerifier ( ( EllipticCurveJwkDefinition ) jwkDefinition ) ) ) ; } } return jwkDefinitions ;
public class PhotosApi { /** * Set the meta information for a photo . * < br > * This method requires authentication with ' write ' permission . * @ param photoId Required . The id of the photo to set metadata for . * @ param title Required . Title for the photo . * @ param description Required . Description for the photo . * @ return response object with the result of the requested operation . * @ throws JinxException if required parameters are null or empty , or if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . setMeta . html " > flickr . photos . setMeta < / a > */ public Response setMeta ( String photoId , String title , String description ) throws JinxException { } }
JinxUtils . validateParams ( photoId , title , description ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.setMeta" ) ; params . put ( "photo_id" , photoId ) ; params . put ( "title" , title ) ; params . put ( "description" , description ) ; return jinx . flickrPost ( params , Response . class ) ;
public class CFG { /** * Get the first predecessor reachable from given edge type . * @ param target * the target block * @ param edgeType * the edge type leading from the predecessor * @ return the predecessor , or null if there is no incoming edge with the * specified edge type */ public BasicBlock getPredecessorWithEdgeType ( BasicBlock target , @ Type int edgeType ) { } }
Edge edge = getIncomingEdgeWithType ( target , edgeType ) ; return edge != null ? edge . getSource ( ) : null ;
public class DependencyGraph { /** * Removes an element and its dependencies from the graph . */ public void remove ( T element ) { } }
DependencyNode < T > node = _nodes . remove ( element ) ; _orphans . remove ( element ) ; // Remove ourselves as a child of our parents . for ( DependencyNode < T > parent : node . parents ) { parent . children . remove ( node ) ; } // Remove ourselves as a parent of our children , possibly orphaning them . for ( DependencyNode < T > child : node . children ) { child . parents . remove ( node ) ; if ( child . parents . isEmpty ( ) ) { _orphans . add ( child . content ) ; } }
public class Adler32 { /** * Updates the checksum with the specified array of bytes . */ public void update ( byte [ ] b , int off , int len ) { } }
if ( b == null ) { throw new NullPointerException ( ) ; } if ( off < 0 || len < 0 || off > b . length - len ) { throw new ArrayIndexOutOfBoundsException ( ) ; } adler = updateBytes ( adler , b , off , len ) ;
public class JavaUtils { /** * Erases a type according to the JLS type erasure rules * @ param t type to erase * @ return erased type */ public static Class < ? > erasure ( Type type ) { } }
if ( type instanceof ParameterizedType ) { return erasure ( ( ( ParameterizedType ) type ) . getRawType ( ) ) ; } if ( type instanceof TypeVariable < ? > ) { return erasure ( ( ( TypeVariable < ? > ) type ) . getBounds ( ) [ 0 ] ) ; } if ( type instanceof WildcardType ) { return erasure ( ( ( WildcardType ) type ) . getUpperBounds ( ) [ 0 ] ) ; } if ( type instanceof GenericArrayType ) { return Array . newInstance ( erasure ( ( ( GenericArrayType ) type ) . getGenericComponentType ( ) ) , 0 ) . getClass ( ) ; } // Only type left is class return ( Class < ? > ) type ;
public class RemotePtoPSupport { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . destination . AbstractRemoteSupport # reconstituteLocalQueuePoint ( int , com . ibm . ws . sib . processor . impl . ConsumerDispatcher ) */ public int reconstituteLocalQueuePoint ( int startMode ) throws MessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reconstituteLocalQueuePoint" , new Object [ ] { Integer . valueOf ( startMode ) } ) ; // also check if there is an AOContainerItemStream for Remote Get NonLockingCursor cursor = _baseDestinationHandler . newNonLockingItemStreamCursor ( new ClassEqualsFilter ( AOContainerItemStream . class ) ) ; int aoCount = 0 ; AOContainerItemStream aoTempItemStream = null ; do { aoTempItemStream = ( AOContainerItemStream ) cursor . next ( ) ; if ( aoTempItemStream != null ) { // NOTE : since this destination is PtoP it should NOT be // possible to end up recovering an aostream used for durable . // Still , bugs happen , so here ' s a sanity check if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) && ( aoTempItemStream . getDurablePseudoDestID ( ) != null ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstituteLocalQueuePoint" , "SIResourceException" ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "com.ibm.ws.sib.processor.impl.destination.RemotePtoPSupport" , "1:138:1.8.1.5" , null } , null ) ) ; } aoCount ++ ; _aoContainerItemStream = aoTempItemStream ; // Don ' t do flush if we are asked to start in recovery mode if ( ( ( startMode & JsConstants . ME_START_FLUSH ) == JsConstants . ME_START_FLUSH ) && ( ( startMode & JsConstants . ME_START_RECOVERY ) == 0 ) ) { getAnycastOutputHandler ( ( DestinationDefinition ) _baseDestinationHandler . getDefinition ( ) , true ) ; } else { getAnycastOutputHandler ( ( DestinationDefinition ) _baseDestinationHandler . getDefinition ( ) , false ) ; } } } while ( aoTempItemStream != null ) ; cursor . finished ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reconstituteLocalQueuePoint" , Integer . valueOf ( aoCount ) ) ; return aoCount ;
public class ReEncryptRequest { /** * Encryption context used to encrypt and decrypt the data specified in the < code > CiphertextBlob < / code > parameter . * @ param sourceEncryptionContext * Encryption context used to encrypt and decrypt the data specified in the < code > CiphertextBlob < / code > * parameter . */ public void setSourceEncryptionContext ( java . util . Map < String , String > sourceEncryptionContext ) { } }
this . sourceEncryptionContext = sourceEncryptionContext == null ? null : new com . amazonaws . internal . SdkInternalMap < String , String > ( sourceEncryptionContext ) ;
public class VerificationConditionGenerator { /** * Translate the sequence of invariant expressions which constitute the loop * invariant of a loop into one or more macros * @ param loopInvariant * The clauses making up the loop invariant * @ param environment * @ param wyalFile */ private void translateLoopInvariantMacros ( Stmt . Loop stmt , WyilFile . Decl . FunctionOrMethod declaration , WyalFile wyalFile ) { } }
Identifier [ ] prefix = declaration . getQualifiedName ( ) . toName ( ) . getAll ( ) ; Tuple < WyilFile . Expr > loopInvariant = stmt . getInvariant ( ) ; for ( int i = 0 ; i != loopInvariant . size ( ) ; ++ i ) { WyilFile . Expr clause = loopInvariant . get ( i ) ; Name ident = convert ( declaration . getQualifiedName ( ) , "_loopinvariant_" + clause . getIndex ( ) , declaration . getName ( ) ) ; // Construct fresh environment for this macro . This is necessary to // avoid name clashes with subsequent macros . GlobalEnvironment globalEnvironment = new GlobalEnvironment ( declaration ) ; LocalEnvironment localEnvironment = new LocalEnvironment ( globalEnvironment ) ; WyalFile . VariableDeclaration [ ] vars = generateLoopInvariantParameterDeclarations ( stmt , localEnvironment ) ; WyalFile . Stmt . Block e = translateAsBlock ( clause , localEnvironment . clone ( ) ) ; Named . Macro macro = new Named . Macro ( ident , vars , e ) ; wyalFile . allocate ( macro ) ; }
public class VectorClock { /** * Increment the version info associated with the given node * @ param node The node */ public void incrementVersion ( int node , long time ) { } }
if ( node < 0 || node > Short . MAX_VALUE ) throw new IllegalArgumentException ( node + " is outside the acceptable range of node ids." ) ; this . timestamp = time ; Long version = versionMap . get ( ( short ) node ) ; if ( version == null ) { version = 1L ; } else { version = version + 1L ; } versionMap . put ( ( short ) node , version ) ; if ( versionMap . size ( ) >= MAX_NUMBER_OF_VERSIONS ) { throw new IllegalStateException ( "Vector clock is full!" ) ; }
public class JsonConfig { /** * Registers a PropertyNameProcessor . < br > * [ Java - & gt ; JSON ] * @ param target the class to use as key * @ param propertyNameProcessor the processor to register */ public void registerJsonPropertyNameProcessor ( Class target , PropertyNameProcessor propertyNameProcessor ) { } }
if ( target != null && propertyNameProcessor != null ) { jsonPropertyNameProcessorMap . put ( target , propertyNameProcessor ) ; }
public class XmlParser { /** * This method pushes a string back onto input . * It is useful either as the expansion of an internal entity , or for * backtracking during the parse . * Call pushCharArray ( ) to do the actual work . * @ param s * The string to push back onto input . * @ see # pushCharArray */ private void pushString ( String ename , String s ) throws SAXException { } }
char [ ] ch = s . toCharArray ( ) ; pushCharArray ( ename , ch , 0 , ch . length ) ;
public class ConcurrentLinkedQueue { /** * Removes a single instance of the specified element from this queue , * if it is present . More formally , removes an element { @ code e } such * that { @ code o . equals ( e ) } , if this queue contains one or more such * elements . * Returns { @ code true } if this queue contained the specified element * ( or equivalently , if this queue changed as a result of the call ) . * @ param o element to be removed from this queue , if present * @ return { @ code true } if this queue changed as a result of the call */ public boolean remove ( Object o ) { } }
if ( o != null ) { Node < E > next , pred = null ; for ( Node < E > p = first ( ) ; p != null ; pred = p , p = next ) { boolean removed = false ; E item = p . item ; if ( item != null ) { if ( ! o . equals ( item ) ) { next = succ ( p ) ; continue ; } removed = casItem ( p , item , null ) ; } next = succ ( p ) ; if ( pred != null && next != null ) // unlink casNext ( pred , p , next ) ; if ( removed ) return true ; } } return false ;
public class CitrusRemoteServer { /** * Start server instance and listen for incoming requests . */ public void start ( ) { } }
application = new CitrusRemoteApplication ( configuration ) ; port ( configuration . getPort ( ) ) ; application . init ( ) ; if ( ! configuration . isSkipTests ( ) ) { new RunController ( configuration ) . run ( ) ; } if ( configuration . getTimeToLive ( ) == 0 ) { stop ( ) ; }
public class AptControlInterface { /** * Build a list of properties defined by getter / setter methods on this control interface . */ private ArrayList < AptControlInterfaceProperty > initIntfProperties ( ) { } }
HashMap < String , AptControlInterfaceProperty > intfPropMap = new HashMap < String , AptControlInterfaceProperty > ( ) ; Collection < AptOperation > ops = getOperations ( ) ; for ( AptOperation op : ops ) { String opName = op . getName ( ) ; if ( ! op . isPublic ( ) ) { continue ; } if ( isGetter ( op ) ) { String propertyName = getIntfPropertyName ( op ) ; if ( intfPropMap . containsKey ( propertyName ) ) { intfPropMap . get ( propertyName ) . setGetterName ( opName ) ; } else { intfPropMap . put ( propertyName , new AptControlInterfaceProperty ( propertyName , opName , null ) ) ; } } else if ( isSetter ( op ) ) { String propertyName = getIntfPropertyName ( op ) ; if ( intfPropMap . containsKey ( propertyName ) ) { intfPropMap . get ( propertyName ) . setSetterName ( opName ) ; } else { intfPropMap . put ( propertyName , new AptControlInterfaceProperty ( propertyName , null , opName ) ) ; } } else if ( isIsGetter ( op ) ) { String propertyName = getIntfPropertyName ( op ) ; if ( intfPropMap . containsKey ( propertyName ) ) { intfPropMap . get ( propertyName ) . setGetterName ( opName ) ; } else { intfPropMap . put ( propertyName , new AptControlInterfaceProperty ( propertyName , opName , null ) ) ; } } } return new ArrayList < AptControlInterfaceProperty > ( intfPropMap . values ( ) ) ;
public class World { /** * Gets the robot in the reference field * @ param robot the robot to use as centre * @ return the robot in front , or null if field is empty */ public Robot getNeighbour ( Robot robot ) { } }
Point neighbourPos = getReferenceField ( robot , 1 ) ; return getRobotAt ( neighbourPos ) ;
public class TemplateParser { /** * Find all the parameters this expression depends on . This is either the local variables ( from a * v - for loop ) or the $ event variable . * @ param expression An expression from the Template * @ param parameters The parameters this expression depends on */ private void findExpressionParameters ( Expression expression , List < VariableInfo > parameters ) { } }
if ( expression instanceof NameExpr ) { NameExpr nameExpr = ( ( NameExpr ) expression ) ; if ( "$event" . equals ( nameExpr . getNameAsString ( ) ) ) { processEventParameter ( expression , nameExpr , parameters ) ; } else { processNameExpression ( nameExpr , parameters ) ; } } expression . getChildNodes ( ) . stream ( ) . filter ( Expression . class :: isInstance ) . map ( Expression . class :: cast ) . forEach ( exp -> findExpressionParameters ( exp , parameters ) ) ;
public class ClassPathMapperScanner { /** * 从环境变量中获取 mapper 配置信息 * @ param environment */ public void setMapperProperties ( Environment environment ) { } }
Config config = SpringBootBindUtil . bind ( environment , Config . class , Config . PREFIX ) ; if ( mapperHelper == null ) { mapperHelper = new MapperHelper ( ) ; } if ( config != null ) { mapperHelper . setConfig ( config ) ; }
public class ReactiveTypes { /** * Returns a collection of Single - Value wrapper types . * @ return a collection of Single - Value wrapper types . */ public static Collection < Class < ? > > getSingleValueTypes ( ) { } }
return REACTIVE_WRAPPERS . entrySet ( ) . stream ( ) . filter ( entry -> ! entry . getValue ( ) . isMultiValue ( ) ) . map ( Entry :: getKey ) . collect ( Collectors . toList ( ) ) ;
public class KeyConverter { /** * Converts the specified value to * { @ code com . google . appengine . api . datastore . Key } . * @ see org . apache . commons . beanutils . converters . AbstractConverter # convertToType ( java . lang . Class , java . lang . Object ) */ @ SuppressWarnings ( "rawtypes" ) @ Override protected Object convertToType ( Class type , Object value ) throws Throwable { } }
return KeyFactory . stringToKey ( value . toString ( ) ) ;
public class Messenger { /** * Signing Up * @ param name name * @ param sex sex of user * @ param transactionHash transaction hash * @ return promise of AuthRes */ @ NotNull @ ObjectiveCName ( "doSignupWithName:withSex:withTransaction:" ) public Promise < AuthRes > doSignup ( String name , Sex sex , String transactionHash ) { } }
return modules . getAuthModule ( ) . doSignup ( name , sex , transactionHash ) ;
public class SetTopBoxCreative { /** * Gets the licenseWindowEndDateTime value for this SetTopBoxCreative . * @ return licenseWindowEndDateTime * The date and time that this creative can no longer be served * from a local cable video - on - demand * server . This attribute is optional . */ public com . google . api . ads . admanager . axis . v201808 . DateTime getLicenseWindowEndDateTime ( ) { } }
return licenseWindowEndDateTime ;
public class PebbleKit { /** * Get the version information of the firmware running on a connected watch . * @ param context * The Android context used to perform the query . * < em > Protip : < / em > You probably want to use your ApplicationContext here . * @ return null if the watch is disconnected or we can ' t get the version . Otherwise , * a FirmwareVersionObject containing info on the watch FW version */ public static FirmwareVersionInfo getWatchFWVersion ( final Context context ) { } }
Cursor c = null ; try { c = queryProvider ( context ) ; if ( c == null || ! c . moveToNext ( ) ) { return null ; } int majorVersion = c . getInt ( KIT_STATE_COLUMN_VERSION_MAJOR ) ; int minorVersion = c . getInt ( KIT_STATE_COLUMN_VERSION_MINOR ) ; int pointVersion = c . getInt ( KIT_STATE_COLUMN_VERSION_POINT ) ; String versionTag = c . getString ( KIT_STATE_COLUMN_VERSION_TAG ) ; return new FirmwareVersionInfo ( majorVersion , minorVersion , pointVersion , versionTag ) ; } finally { if ( c != null ) { c . close ( ) ; } }
public class DateTimePickerBase { /** * { @ inheritDoc } */ @ Override public Date getValue ( ) { } }
try { return dateTimeFormat != null && textBox . getValue ( ) != null ? dateTimeFormat . parse ( textBox . getValue ( ) ) : null ; } catch ( final Exception e ) { return null ; }
public class FileSmoosher { /** * Merges temporary files created by delegated SmooshedWriters on to the main * smoosh file . * @ throws IOException */ private void mergeWithSmoosher ( ) throws IOException { } }
// Get processed elements from the stack and write . List < File > fileToProcess = new ArrayList < > ( completedFiles ) ; completedFiles = new ArrayList < > ( ) ; for ( File file : fileToProcess ) { add ( file ) ; if ( ! file . delete ( ) ) { LOG . warn ( "Unable to delete file [%s]" , file ) ; } }
public class RuntimeDataServiceImpl { /** * end * predicates for collection filtering */ protected void applyQueryFilter ( Map < String , Object > params , QueryFilter queryFilter ) { } }
if ( queryFilter != null ) { applyQueryContext ( params , queryFilter ) ; if ( queryFilter . getFilterParams ( ) != null && ! queryFilter . getFilterParams ( ) . isEmpty ( ) ) { params . put ( FILTER , queryFilter . getFilterParams ( ) ) ; for ( String key : queryFilter . getParams ( ) . keySet ( ) ) { params . put ( key , queryFilter . getParams ( ) . get ( key ) ) ; } } }
public class JUnit3FloatingPointComparisonWithoutDelta { /** * Gets the unboxed type , or the original type if it is not unboxable . */ private Type unboxedTypeOrType ( VisitorState state , Type type ) { } }
Types types = state . getTypes ( ) ; return types . unboxedTypeOrType ( type ) ;
public class Session { /** * Adds a new connection to this session with the next free connection ID ( if the maximum number is not reached ) . * @ return The connection ID of the newly created connection . * @ throws Exception if any error occurs . */ protected final short addNewConnection ( ) throws Exception { } }
if ( connections . size ( ) < maxConnections ) { final Connection connection = factory . getConnection ( this , configuration , inetSocketAddress , nextFreeConnectionID ) ; connection . nextState ( new LoginRequestState ( connection , LoginStage . FULL_FEATURE_PHASE ) ) ; // login phase successful , so we can add a new connection connections . add ( connection ) ; // only needed on the leading login connection if ( connections . size ( ) == 1 ) { phase . getCapacity ( this , capacityInformations ) ; if ( connection . getSettingAsInt ( OperationalTextKey . MAX_CONNECTIONS ) > 1 ) { phase . login ( this ) ; } } return nextFreeConnectionID ++ ; } else { LOGGER . warn ( "Unused new connection -> ignored!" ) ; return nextFreeConnectionID ; }
public class ManagedProperties { /** * Load the given property from the given file . If the property ' s value does * not currently equal the value in the file , the value will be changed . If * the property does not exist in the file or the defaults , it will be * removed . Also , after this call completes , { @ link # isModified ( String ) } * will return < code > false < / code > for this property . * @ param file * the file containing the current property values ( this file * does not have to exist , in which case the default value is * assumed ) * @ param propertyName * the property to update from the file ( this can be * < code > null < / code > , in which case all properties will be * updated ) * @ throws IOException * if there is a file system error while attempting to read the * file */ public void load ( File file , String propertyName ) throws IOException { } }
Properties tmpProperties = new Properties ( defaults ) ; /* * Do not throw a FileNotFoundException here because it is OK if the * file does not exist . In this case , default values will be used . */ if ( file . isFile ( ) ) { InputStream inputStream = new FileInputStream ( file ) ; try { tmpProperties . load ( inputStream ) ; } finally { inputStream . close ( ) ; } } /* * If a property name was specified , only load that property and leave * the rest alone . */ if ( propertyName != null ) { String value = tmpProperties . getProperty ( propertyName ) ; if ( value == null ) { properties . remove ( propertyName ) ; } else { setValue ( propertyName , value , true ) ; } return ; } Set < String > tmpPropertyNames = tmpProperties . stringPropertyNames ( ) ; gatekeeper . signIn ( ) ; try { /* * Throw away any property that is not in the file or in the * defaults . */ properties . keySet ( ) . retainAll ( tmpPropertyNames ) ; /* * Set every value to either the value read from the file or the * default . */ for ( String tmpPropertyName : tmpPropertyNames ) { setValue ( tmpPropertyName , tmpProperties . getProperty ( tmpPropertyName ) , true ) ; } } finally { gatekeeper . signOut ( ) ; }
public class LocationCriterionServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . adwords . axis . v201809 . cm . LocationCriterionServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . LocationCriterionServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . LocationCriterionServiceSoapBindingStub ( new java . net . URL ( LocationCriterionServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getLocationCriterionServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class Mappings { /** * Safely retrieves a value from a mapping . If the mapping is < code > null < / code > or returns a < code > null < / code > * value , the given fallback value is returned . * @ param mapping * the mapping . * @ param key * the key . * @ param fallback * the fallback value to return if either the mapping or the originally returned value are * < code > null < / code > . * @ return the value returned by the specified mapping , or the fallback value . */ public static < D , R > R safeGet ( Mapping < ? super D , R > mapping , D key , R fallback ) { } }
if ( mapping == null ) { return fallback ; } R val = mapping . get ( key ) ; if ( val == null ) { return fallback ; } return val ;
public class ForestDBViewStore { /** * Queries the view , with reducing or grouping as per the options . * in CBL _ ForestDBViewStorage . m * - ( CBLQueryIteratorBlock ) reducedQueryWithOptions : ( CBLQueryOptions * ) options * status : ( CBLStatus * ) outStatus */ @ Override public List < QueryRow > reducedQuery ( QueryOptions options ) throws CouchbaseLiteException { } }
Predicate < QueryRow > postFilter = options . getPostFilter ( ) ; int groupLevel = options . getGroupLevel ( ) ; boolean group = options . isGroup ( ) || ( groupLevel > 0 ) ; Reducer reduce = delegate . getReduce ( ) ; if ( options . isReduceSpecified ( ) ) { if ( options . isReduce ( ) && reduce == null ) { Log . w ( TAG , "Cannot use reduce option in view %s which has no reduce block defined" , name ) ; throw new CouchbaseLiteException ( new Status ( Status . BAD_PARAM ) ) ; } } final List < Object > keysToReduce = new ArrayList < Object > ( REDUCE_BATCH_SIZE ) ; final List < Object > valuesToReduce = new ArrayList < Object > ( REDUCE_BATCH_SIZE ) ; final Object [ ] lastKeys = new Object [ 1 ] ; lastKeys [ 0 ] = null ; final ForestDBViewStore that = this ; final List < QueryRow > rows = new ArrayList < QueryRow > ( ) ; try { openIndex ( ) ; } catch ( ForestException e ) { throw new CouchbaseLiteException ( e . code ) ; } QueryIterator itr ; try { itr = forestQuery ( options ) ; while ( itr . next ( ) ) { byte [ ] bKey = itr . keyJSON ( ) ; byte [ ] bValue = itr . valueJSON ( ) ; Object keyObject = fromJSON ( bKey , Object . class ) ; Object valueObject = fromJSON ( bValue , Object . class ) ; if ( group && ! groupTogether ( keyObject , lastKeys [ 0 ] , groupLevel ) ) { if ( lastKeys [ 0 ] != null ) { // This pair starts a new group , so reduce & record the last one : Object key = groupKey ( lastKeys [ 0 ] , groupLevel ) ; Object reduced = ( reduce != null ) ? reduce . reduce ( keysToReduce , valuesToReduce , false ) : null ; QueryRow row = new QueryRow ( null , 0 , key , reduced , null ) ; if ( postFilter == null || postFilter . apply ( row ) ) rows . add ( row ) ; keysToReduce . clear ( ) ; valuesToReduce . clear ( ) ; } lastKeys [ 0 ] = keyObject ; } keysToReduce . add ( keyObject ) ; valuesToReduce . add ( valueObject ) ; } } catch ( ForestException e ) { Log . e ( TAG , "Error in reducedQuery()" , e ) ; } catch ( IOException e ) { Log . e ( TAG , "Error in reducedQuery()" , e ) ; throw new CouchbaseLiteException ( Status . UNKNOWN ) ; } if ( keysToReduce != null && keysToReduce . size ( ) > 0 ) { // Finish the last group ( or the entire list , if no grouping ) : Object key = group ? groupKey ( lastKeys [ 0 ] , groupLevel ) : null ; Object reduced = ( reduce != null ) ? reduce . reduce ( keysToReduce , valuesToReduce , false ) : null ; Log . v ( TAG , String . format ( Locale . ENGLISH , "Query %s: Reduced to key=%s, value=%s" , name , key , reduced ) ) ; QueryRow row = new QueryRow ( null , 0 , key , reduced , null ) ; if ( postFilter == null || postFilter . apply ( row ) ) rows . add ( row ) ; } return rows ;
public class Operand { /** * Gets the campaignSharedSet value for this Operand . * @ return campaignSharedSet */ public com . google . api . ads . adwords . axis . v201809 . cm . CampaignSharedSet getCampaignSharedSet ( ) { } }
return campaignSharedSet ;
public class AmazonECSClient { /** * Create a task set in the specified cluster and service . This is used when a service uses the * < code > EXTERNAL < / code > deployment controller type . For more information , see < a * href = " http : / / docs . aws . amazon . com / AmazonECS / latest / developerguide / deployment - types . html " > Amazon ECS Deployment * Types < / a > in the < i > Amazon Elastic Container Service Developer Guide < / i > . * @ param createTaskSetRequest * @ return Result of the CreateTaskSet operation returned by the service . * @ throws ServerException * These errors are usually caused by a server issue . * @ throws ClientException * These errors are usually caused by a client action , such as using an action or resource on behalf of a * user that doesn ' t have permissions to use the action or resource , or specifying an identifier that is not * valid . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClusterNotFoundException * The specified cluster could not be found . You can view your available clusters with < a > ListClusters < / a > . * Amazon ECS clusters are Region - specific . * @ throws UnsupportedFeatureException * The specified task is not supported in this Region . * @ throws PlatformUnknownException * The specified platform version does not exist . * @ throws PlatformTaskDefinitionIncompatibilityException * The specified platform version does not satisfy the task definition ' s required capabilities . * @ throws AccessDeniedException * You do not have authorization to perform the requested action . * @ throws ServiceNotFoundException * The specified service could not be found . You can view your available services with < a > ListServices < / a > . * Amazon ECS services are cluster - specific and Region - specific . * @ throws ServiceNotActiveException * The specified service is not active . You can ' t update a service that is inactive . If you have previously * deleted a service , you can re - create it with < a > CreateService < / a > . * @ sample AmazonECS . CreateTaskSet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecs - 2014-11-13 / CreateTaskSet " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateTaskSetResult createTaskSet ( CreateTaskSetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateTaskSet ( request ) ;
public class OpenstackMachineConfigurator { /** * Checks whether a VM is created . * @ return true if it is online , false otherwise */ private boolean checkVmIsOnline ( ) { } }
String zoneName = OpenstackIaasHandler . findZoneName ( this . novaApi , this . targetProperties ) ; Server server = this . novaApi . getServerApiForZone ( zoneName ) . get ( this . machineId ) ; return Status . ACTIVE . equals ( server . getStatus ( ) ) ;
public class MethodKit { /** * 获取方法用于缓存的 key */ private static Long getMethodKey ( Class < ? > targetClass , String methodName , Class < ? > [ ] argTypes ) { } }
return MethodKeyBuilder . instance . getMethodKey ( targetClass , methodName , argTypes ) ;
public class Cursor { /** * Synchronized . Get the next entry in the list ( but do not move the cursor to it ) . * @ return the previous entry in the list */ public synchronized Entry getNextEntry ( ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getNextEntry" ) ; checkEntryParent ( ) ; Entry nextEntry = null ; synchronized ( parentList ) { if ( atTop ) { // if the cursor is at the top of the list , return the first entry in the list nextEntry = parentList . first ; } else if ( ! atBottom ) { // otherwise just return the next entry in the list nextEntry = current . getNext ( ) ; } // if the cursor is at the bottom of the list then we should // just drop through and the next entry is null } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getNextEntry" , nextEntry ) ; return nextEntry ;
public class MicroMetaDao { /** * 锟斤拷荼锟斤拷锟斤拷锟斤拷 */ public int updateObjByCondition ( String tableName , String condition , String setStr ) { } }
// JdbcTemplate jdbcTemplate = ( JdbcTemplate ) MicroDbHolder . getDbSource ( dbName ) ; // String tableName = changeTableNameCase ( otableName ) ; JdbcTemplate jdbcTemplate = getMicroJdbcTemplate ( ) ; String timeName = getTimeName ( ) ; if ( autoOperTime ) { setStr = "update_time=" + timeName + "," + setStr ; } String sql = "update " + tableName + " set " + setStr + " where " + condition ; logger . debug ( sql ) ; Integer retStatus = jdbcTemplate . update ( sql ) ; return retStatus ;
public class DynamicByteBuffer { /** * Writes a range of bytes from the specified array . * @ param bytes Array containing the bytes to be written . * @ param start Start of the range ( inclusive ) . * @ param end End of the range ( exclusive ) . */ public void put ( byte [ ] bytes , int start , int end ) { } }
byte [ ] buffer = chunk ( ) ; for ( int i = start ; i < end ; i ++ ) { if ( position == buffer . length ) { buffer = alloc ( ) ; } buffer [ position ++ ] = bytes [ i ] ; size ++ ; }
public class BeadledomClientModule { /** * Creates a new instance of { @ code BeadledomClientModule } for the specified binding * annotation . */ public static BeadledomClientModule with ( Class < ? extends Annotation > clientBindingAnnotation ) { } }
BindingAnnotations . checkIsBindingAnnotation ( clientBindingAnnotation ) ; return new BeadledomClientModule ( clientBindingAnnotation ) ;
public class PersistableTransfer { /** * Writes the serialized representation of the paused transfer state to the * given < code > OutputStream < / code > . Caller of this method should explicitly * close the < code > OutputStream < / code > . */ public final void serialize ( OutputStream out ) throws IOException { } }
out . write ( Jackson . toJsonString ( this ) . getBytes ( UTF8 ) ) ; out . flush ( ) ;
public class StringReplacer { /** * Applies string replacements using the pattern - replacement pairs provided * by the given map ( associative array ) . The longest matching pattern is * used for selecting an appropriate replacement . * @ param source the source string * @ param patternReplacements pattern - replacement pairs */ public static String replace ( String source , Map < String , String > patternReplacements ) { } }
if ( source == null ) { return null ; } int mapSize = patternReplacements . size ( ) ; String [ ] patterns = new String [ mapSize ] ; String [ ] replacements = new String [ mapSize ] ; Iterator < Map . Entry < String , String > > it = patternReplacements . entrySet ( ) . iterator ( ) ; for ( int i = 0 ; it . hasNext ( ) ; i ++ ) { Map . Entry < String , String > entry = it . next ( ) ; patterns [ i ] = entry . getKey ( ) ; replacements [ i ] = entry . getValue ( ) ; } return replace ( source , patterns , replacements ) ;
public class CmsFlexController { /** * Provides access to URI of a VFS resource that caused an exception that might have occurred in a complex include scenario . < p > * @ param req the current request * @ return to URI of a VFS resource that caused an exception , or < code > null < / code > * @ see # getThrowableResourceUri ( ) */ public static String getThrowableResourceUri ( ServletRequest req ) { } }
CmsFlexController controller = ( CmsFlexController ) req . getAttribute ( ATTRIBUTE_NAME ) ; if ( controller != null ) { return controller . getThrowableResourceUri ( ) ; } else { return null ; }
public class Dictionary { /** * 返回词典标签 * @ param word * @ return 词典列表 */ public boolean contains ( String word ) { } }
if ( word . length ( ) > maxLen ) return false ; return dict . contains ( word ) ;
public class SelectQuery { /** * Returns an < tt > SelectQuery < / tt > for the algebraic expression and optional dataset * specified . * @ param expression * the algebraic expression for the query * @ param dataset * the dataset optionally associated to the query * @ return the corresponding < tt > SelectQuery < / tt > object * @ throws ParseException * in case the supplied algebraic expression does not denote a valid SPARQL SELECT * query */ public static SelectQuery from ( final TupleExpr expression , @ Nullable final Dataset dataset ) throws ParseException { } }
Preconditions . checkNotNull ( expression ) ; try { // Sesame rendering facilities are definitely broken , so we use our own final String string = new SPARQLRenderer ( null , true ) . render ( expression , dataset ) ; SelectQuery query = CACHE . getIfPresent ( string ) ; if ( query == null ) { query = new SelectQuery ( string , expression , dataset ) ; CACHE . put ( string , query ) ; } return query ; } catch ( final Exception ex ) { throw new ParseException ( expression . toString ( ) , "The supplied algebraic expression does not denote a valid SPARQL query" , ex ) ; }
public class ListManagementImagesImpl { /** * Add an image to the list with list Id equal to list Id passed . * @ param listId List Id of the image list . * @ param addImageOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws APIErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the Image object if successful . */ public Image addImage ( String listId , AddImageOptionalParameter addImageOptionalParameter ) { } }
return addImageWithServiceResponseAsync ( listId , addImageOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Serializer { /** * Resizes original byte array if the object turns out to be bigger than expected . * @ param bytesAmount amount of bytes that needs to be added to the array . */ void ensureCapacity ( final int bytesAmount ) { } }
if ( currentByteArrayIndex + bytesAmount >= serializedData . length ) { // + 1 ensures proper behavior for 0 estimate . int newSerializedDataArrayLength = ( serializedData . length + 1 ) * 2 ; while ( currentByteArrayIndex + bytesAmount >= newSerializedDataArrayLength ) { newSerializedDataArrayLength = newSerializedDataArrayLength * 2 ; } final byte [ ] newSerializedDataArray = new byte [ newSerializedDataArrayLength ] ; // Arrays . copyOf is not emulated on GWT . System . arraycopy ( serializedData , 0 , newSerializedDataArray , 0 , currentByteArrayIndex ) ; serializedData = newSerializedDataArray ; }
public class Node { /** * compareAndSet value field . */ boolean casValue ( Object cmp , Object val ) { } }
return U . compareAndSwapObject ( this , VALUE , cmp , val ) ;
public class TileSetBundle { /** * documentation inherited from interface */ public BufferedImage loadImage ( String path ) throws IOException { } }
return _bundle . getImageResource ( path , path . endsWith ( FastImageIO . FILE_SUFFIX ) ) ;
public class CrnkBoot { /** * Set the { @ link QuerySpecUrlMapper } to use to parse and handle query parameters . * When invoked , overwrites previous QuerySpecDeserializers . */ public void setUrlMapper ( QuerySpecUrlMapper urlMapper ) { } }
checkNotConfiguredYet ( ) ; PreconditionUtil . verify ( urlMapper != null , "urlMapper parameter must not be null" ) ; moduleRegistry . setUrlMapper ( urlMapper ) ;
public class VisualDepthOps { /** * Creates a point cloud from a depth image . * @ param param Intrinsic camera parameters for depth image * @ param depth depth image . each value is in millimeters . * @ param cloud Output point cloud */ public static void depthTo3D ( CameraPinholeBrown param , GrayU16 depth , FastQueue < Point3D_F64 > cloud ) { } }
cloud . reset ( ) ; Point2Transform2_F64 p2n = LensDistortionFactory . narrow ( param ) . undistort_F64 ( true , false ) ; Point2D_F64 n = new Point2D_F64 ( ) ; for ( int y = 0 ; y < depth . height ; y ++ ) { int index = depth . startIndex + y * depth . stride ; for ( int x = 0 ; x < depth . width ; x ++ ) { int mm = depth . data [ index ++ ] & 0xFFFF ; // skip pixels with no depth information if ( mm == 0 ) continue ; // this could all be precomputed to speed it up p2n . compute ( x , y , n ) ; Point3D_F64 p = cloud . grow ( ) ; p . z = mm ; p . x = n . x * p . z ; p . y = n . y * p . z ; } }
public class LocationExtensionOperand { /** * Sets the radius value for this LocationExtensionOperand . * @ param radius * Distance in units specifying the radius around targeted locations . * Only long and double are supported constant types . * < span class = " constraint Required " > This field is * required and should not be { @ code null } . < / span > */ public void setRadius ( com . google . api . ads . adwords . axis . v201809 . cm . ConstantOperand radius ) { } }
this . radius = radius ;
public class UserGroupInformation { /** * Read a { @ link UserGroupInformation } from conf */ public static UserGroupInformation readFrom ( Configuration conf ) throws IOException { } }
try { return UnixUserGroupInformation . readFromConf ( conf , UnixUserGroupInformation . UGI_PROPERTY_NAME ) ; } catch ( LoginException e ) { throw ( IOException ) new IOException ( ) . initCause ( e ) ; }
public class HttpClientFactory { /** * Closes messaging clients or requests a replacement to the reconfigurable client . * @ param shutdown true to close , false to request . . . */ private void resetClients ( boolean shutdown ) { } }
// Only agent clients need to be reconfigured . // Make fresh snapshots of the CLIENTS , as we don ' t want to reconfigure them while holding the lock . final List < HttpAgentClient > clients ; synchronized ( this ) { // Get the snapshot . clients = new ArrayList < > ( this . agentClients ) ; // Remove the clients , new ones will be created if necessary . this . agentClients . clear ( ) ; } // Now reconfigure all the CLIENTS . for ( HttpAgentClient client : clients ) { try { final ReconfigurableClient < ? > reconfigurable = client . getReconfigurableClient ( ) ; if ( shutdown ) reconfigurable . closeConnection ( ) ; else reconfigurable . switchMessagingType ( HttpConstants . FACTORY_HTTP ) ; } catch ( Throwable t ) { // Warn but continue to reconfigure the next CLIENTS ! this . logger . warning ( "A client has thrown an exception on reconfiguration: " + client ) ; Utils . logException ( this . logger , new RuntimeException ( t ) ) ; } }
public class BundleMonitorExtension { /** * Uninstalls the given bundle . * @ param id the bundle ' s id * @ return OK if the bundle is updated , BAD _ REQUEST if an error occurs when the bundle is uninstalled , * NOT _ FOUND if there are no bundles with the given id . */ @ Route ( method = HttpMethod . DELETE , uri = "/{id}" ) public Result uninstallBundle ( @ Parameter ( "id" ) long id ) { } }
final Bundle bundle = context . getBundle ( id ) ; if ( bundle == null ) { return notFound ( "Bundle " + id + " not found" ) ; } else { return async ( new Callable < Result > ( ) { @ Override public Result call ( ) throws Exception { try { logger ( ) . info ( "Uninstalling bundle {}" , bundle . getSymbolicName ( ) ) ; bundle . uninstall ( ) ; return ok ( ) ; } catch ( BundleException e ) { logger ( ) . error ( "Cannot uninstall bundle {}" , bundle . getSymbolicName ( ) , e ) ; return badRequest ( e ) ; } } } ) ; }
public class QueueDeliverBuilderImpl { /** * @ Override * public OutboxDeliver < M > createOutbox ( Deliver < M > deliver ) * return _ outboxFactory . createOutbox ( deliver ) ; */ @ Override public QueueDeliver < M > build ( Deliver < M > deliver ) { } }
validateBuilder ( ) ; if ( deliver == null ) { throw new IllegalArgumentException ( L . l ( "'processors' is required" ) ) ; } QueueRing < M > queue = buildQueue ( ) ; Executor executor = createExecutor ( ) ; ClassLoader loader = getClassLoader ( ) ; // OutboxDeliver < M > outbox = _ outboxFactory . createOutbox ( deliver ) ; WorkerDeliverSingleThread < M > worker = new WorkerDeliverSingleThread < M > ( deliver , _outboxContext , executor , loader , queue ) ; return new QueueDeliverImpl < M > ( queue , worker ) ;
public class AbstractCacheableLockManager { /** * Remove lock , used by Lock remover . * @ param nodeIdentifier String */ protected void removeLock ( String nodeIdentifier ) { } }
try { NodeData nData = ( NodeData ) dataManager . getItemData ( nodeIdentifier ) ; // Skip removing , because that node was removed in other node of cluster . if ( nData == null ) { return ; } PlainChangesLog changesLog = new PlainChangesLogImpl ( new ArrayList < ItemState > ( ) , IdentityConstants . SYSTEM , ExtendedEvent . UNLOCK ) ; ItemData lockOwner = copyItemData ( ( PropertyData ) dataManager . getItemData ( nData , new QPathEntry ( Constants . JCR_LOCKOWNER , 1 ) , ItemType . PROPERTY ) ) ; // Skip removing , because that lock was removed in other node of cluster . if ( lockOwner == null ) { return ; } changesLog . add ( ItemState . createDeletedState ( lockOwner ) ) ; ItemData lockIsDeep = copyItemData ( ( PropertyData ) dataManager . getItemData ( nData , new QPathEntry ( Constants . JCR_LOCKISDEEP , 1 ) , ItemType . PROPERTY ) ) ; // Skip removing , because that lock was removed in other node of cluster . if ( lockIsDeep == null ) { return ; } changesLog . add ( ItemState . createDeletedState ( lockIsDeep ) ) ; // lock probably removed by other thread if ( lockOwner == null && lockIsDeep == null ) { return ; } dataManager . save ( new TransactionChangesLog ( changesLog ) ) ; } catch ( JCRInvalidItemStateException e ) { // Skip property not found in DB , because that lock property was removed in other node of cluster . if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "The propperty was removed in other node of cluster." , e ) ; } } catch ( RepositoryException e ) { LOG . error ( "Error occur during removing lock" + e . getLocalizedMessage ( ) , e ) ; }
public class FilterJoinVisitor { /** * Executes the pipeline of async actions to compute the terms for this node . */ protected void executeAsyncOperation ( final FilterJoinNode node ) { } }
logger . debug ( "Executing async actions" ) ; node . setState ( FilterJoinNode . State . RUNNING ) ; // set state before execution to avoid race conditions with listener NodePipelineManager pipeline = new NodePipelineManager ( ) ; pipeline . addListener ( new NodePipelineListener ( ) { @ Override public void onSuccess ( ) { node . setState ( FilterJoinNode . State . COMPLETED ) ; // set state before unblocking the queue to avoid race conditions FilterJoinVisitor . this . unblock ( ) ; } @ Override public void onFailure ( Throwable e ) { node . setFailure ( e ) ; node . setState ( FilterJoinNode . State . COMPLETED ) ; // set state before unblocking the queue to avoid race conditions FilterJoinVisitor . this . unblock ( ) ; } } ) ; // Adds the list of tasks to be executed pipeline . addTask ( new IndicesVersionTask ( ) ) ; pipeline . addTask ( new CacheLookupTask ( ) ) ; pipeline . addTask ( new CardinalityEstimationTask ( ) ) ; pipeline . addTask ( new TermsByQueryTask ( ) ) ; // Starts the execution of the pipeline pipeline . execute ( new NodeTaskContext ( client , node , this ) ) ;
public class AbstractChartRenderer { /** * Draws label text and label background if isValueLabelBackgroundEnabled is true . */ protected void drawLabelTextAndBackground ( Canvas canvas , char [ ] labelBuffer , int startIndex , int numChars , int autoBackgroundColor ) { } }
final float textX ; final float textY ; if ( isValueLabelBackgroundEnabled ) { if ( isValueLabelBackgroundAuto ) { labelBackgroundPaint . setColor ( autoBackgroundColor ) ; } canvas . drawRect ( labelBackgroundRect , labelBackgroundPaint ) ; textX = labelBackgroundRect . left + labelMargin ; textY = labelBackgroundRect . bottom - labelMargin ; } else { textX = labelBackgroundRect . left ; textY = labelBackgroundRect . bottom ; } canvas . drawText ( labelBuffer , startIndex , numChars , textX , textY , labelPaint ) ;
public class Hdf5Archive { /** * Get list of data sets from group path . * @ param groups Array of zero or more ancestor groups from root to parent . * @ return List of HDF5 data set names */ public List < String > getDataSets ( String ... groups ) { } }
synchronized ( Hdf5Archive . LOCK_OBJECT ) { if ( groups . length == 0 ) return getObjects ( this . file , H5O_TYPE_DATASET ) ; Group [ ] groupArray = openGroups ( groups ) ; List < String > ls = getObjects ( groupArray [ groupArray . length - 1 ] , H5O_TYPE_DATASET ) ; closeGroups ( groupArray ) ; return ls ; }
public class NearestVertexSnapAlgorithm { /** * Return a new and sorted list of coordinates . They should be sorted by their Y values . */ private List < Coordinate > sortY ( List < Coordinate > coordinates ) { } }
List < Coordinate > sorted = new ArrayList < Coordinate > ( coordinates ) ; Collections . sort ( sorted , new YComparator ( ) ) ; return sorted ;
public class CmsGalleryService { /** * Returns the workplace locale from the current user ' s settings . < p > * @ return the workplace locale */ Locale getWorkplaceLocale ( ) { } }
if ( m_wpLocale == null ) { m_wpLocale = OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( getCmsObject ( ) ) ; } return m_wpLocale ;
public class ApiOvhMe { /** * Add a partition in this partitioning scheme * REST : POST / me / installationTemplate / { templateName } / partitionScheme / { schemeName } / partition * @ param type [ required ] * @ param volumeName [ required ] The volume name needed for proxmox distribution * @ param raid [ required ] * @ param mountpoint [ required ] partition mount point * @ param size [ required ] size of partition in Mb , 0 = > rest of the space * @ param step [ required ] * @ param filesystem [ required ] Partition filesytem * @ param templateName [ required ] This template name * @ param schemeName [ required ] name of this partitioning scheme */ public void installationTemplate_templateName_partitionScheme_schemeName_partition_POST ( String templateName , String schemeName , OvhTemplateOsFileSystemEnum filesystem , String mountpoint , Long raid , Long size , Long step , OvhTemplatePartitionTypeEnum type , String volumeName ) throws IOException { } }
String qPath = "/me/installationTemplate/{templateName}/partitionScheme/{schemeName}/partition" ; StringBuilder sb = path ( qPath , templateName , schemeName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "filesystem" , filesystem ) ; addBody ( o , "mountpoint" , mountpoint ) ; addBody ( o , "raid" , raid ) ; addBody ( o , "size" , size ) ; addBody ( o , "step" , step ) ; addBody ( o , "type" , type ) ; addBody ( o , "volumeName" , volumeName ) ; exec ( qPath , "POST" , sb . toString ( ) , o ) ;
public class SDBaseOps { /** * TODO doc string * @ param name * @ param x * @ param y * @ param dimensions * @ return */ public SDVariable dot ( String name , SDVariable x , SDVariable y , int ... dimensions ) { } }
SDValidation . validateNumerical ( "dot" , x , y ) ; SDVariable ret = f ( ) . dot ( x , y , dimensions ) ; return updateVariableNameAndReference ( ret , name ) ;
public class EmailApi { /** * Save email information to UCS * Save email information of interaction specified in the id path parameter * @ param id id of interaction to save ( required ) * @ param saveData Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > saveEmailWithHttpInfo ( String id , SaveData saveData ) throws ApiException { } }
com . squareup . okhttp . Call call = saveEmailValidateBeforeCall ( id , saveData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class StandardScheduler { /** * Starts scheduler thread that pages pageables . */ public void start ( ) { } }
if ( ( schedulerThread == null || ! schedulerThread . isAlive ( ) ) && interval > 0 ) { schedulerThread = new Thread ( this ) ; // act as internal request to be able to bind internal process // Request currentRequest = application . getCurrentRequest ( ) ; // currentRequest . startRepresentingRequest ( initialRequest ) ; // application . bindInternalProcess ( schedulerThread , " scheduler " ) ; // currentRequest . stopRepresentingRequest ( ) ; schedulerThread . start ( ) ; System . out . println ( new LogEntry ( "scheduler thread started..." ) ) ; }
public class LookupDeveloperIdentityRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( LookupDeveloperIdentityRequest lookupDeveloperIdentityRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( lookupDeveloperIdentityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( lookupDeveloperIdentityRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; protocolMarshaller . marshall ( lookupDeveloperIdentityRequest . getIdentityId ( ) , IDENTITYID_BINDING ) ; protocolMarshaller . marshall ( lookupDeveloperIdentityRequest . getDeveloperUserIdentifier ( ) , DEVELOPERUSERIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( lookupDeveloperIdentityRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( lookupDeveloperIdentityRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DetachedMockFactory { /** * Creates a spy with the specified options and type . The mock name will be the types simple name . * Example : * < pre > * def person = Spy ( Person , name : " myPerson " ) / / type is Person . class , name is " myPerson " * < / pre > * @ param options optional options for creating the spy * @ param type the class type of the spy * @ param < T > the class type of the spy * @ return a spy with the specified options and type */ @ Override public < T > T Spy ( Map < String , Object > options , Class < T > type ) { } }
return createMock ( inferNameFromType ( type ) , type , MockNature . SPY , options ) ;
public class TodoActivity { /** * Called by TasksModel when it receives a replication complete callback . * TasksModel takes care of calling this on the main thread . */ void replicationComplete ( ) { } }
reloadTasksFromModel ( ) ; Toast . makeText ( getApplicationContext ( ) , R . string . replication_completed , Toast . LENGTH_LONG ) . show ( ) ; dismissDialog ( DIALOG_PROGRESS ) ;
public class Launch { /** * Main method that runs the example . * @ param args command line parameters . */ public static void main ( final String [ ] args ) { } }
try { final Configuration config = getClientConfiguration ( args ) ; LOG . log ( Level . INFO , "Configuration:\n--\n{0}--" , Configurations . toString ( config , true ) ) ; final Injector injector = Tang . Factory . getTang ( ) . newInjector ( config ) ; final SuspendClient client = injector . getInstance ( SuspendClient . class ) ; client . submit ( ) ; client . waitForCompletion ( ) ; LOG . info ( "Done!" ) ; } catch ( final BindException | IOException | InjectionException ex ) { LOG . log ( Level . SEVERE , "Cannot launch: configuration error" , ex ) ; } catch ( final Exception ex ) { LOG . log ( Level . SEVERE , "Cleanup error" , ex ) ; }
public class KxReactiveStreams { /** * exposes a publisher on the network via kontraktor ' s generic remoting . Usually not called directly ( see EventSink + RxPublisher ) * @ param source * @ param networRxPublisher - the appropriate network publisher ( TCP , TCPNIO , WS ) * @ param disconCB - called once a client disconnects / stops . can be null * @ param < OUT > * @ return */ public < OUT > IPromise serve ( Publisher < OUT > source , ActorPublisher networRxPublisher , boolean closeConnectionOnCompleteOrError , Consumer < Actor > disconCB ) { } }
if ( networRxPublisher . getClass ( ) . getSimpleName ( ) . equals ( "HttpPublisher" ) ) { throw new RuntimeException ( "Http long poll cannot be supported. Use WebSockets instead." ) ; } if ( source instanceof KxPublisherActor == false || source instanceof ActorProxy == false ) { Processor < OUT , OUT > proc = newAsyncProcessor ( a -> a ) ; // we need a queue before going to network source . subscribe ( proc ) ; source = proc ; } ( ( KxPublisherActor ) source ) . setCloseOnComplete ( closeConnectionOnCompleteOrError ) ; return networRxPublisher . facade ( ( Actor ) source ) . publish ( disconCB ) ;
public class AutoFormTaskValuesProvider { /** * Minus runtime values . */ protected List < Value > getDefinedValues ( TaskRuntimeContext runtimeContext ) { } }
List < Value > values = new ArrayList < Value > ( ) ; String varAttr = runtimeContext . getTaskAttribute ( TaskAttributeConstant . VARIABLES ) ; if ( ! StringHelper . isEmpty ( varAttr ) ) { List < String [ ] > parsed = StringHelper . parseTable ( varAttr , ',' , ';' , 5 ) ; for ( String [ ] one : parsed ) { String name = one [ 0 ] ; Value value = new Value ( name ) ; if ( one [ 1 ] != null && ! one [ 1 ] . isEmpty ( ) ) value . setLabel ( one [ 1 ] ) ; value . setDisplay ( Value . getDisplay ( one [ 2 ] ) ) ; if ( one [ 3 ] != null && ! one [ 3 ] . isEmpty ( ) ) value . setSequence ( Integer . parseInt ( one [ 3 ] ) ) ; if ( one [ 4 ] != null && ! one [ 4 ] . isEmpty ( ) ) value . setIndexKey ( one [ 4 ] ) ; if ( value . isExpression ( ) ) { value . setType ( String . class . getName ( ) ) ; } else { Variable var = runtimeContext . getProcess ( ) . getVariable ( name ) ; if ( var != null ) value . setType ( var . getType ( ) ) ; } values . add ( value ) ; } } return values ;
public class TextUtil { /** * Replies a base 26 encoding string for the given * number . * @ param number the number to encode . * @ return the base 26 encoding . * @ since 4.0 */ @ Pure @ SuppressWarnings ( "checkstyle:magicnumber" ) public static String encodeBase26 ( int number ) { } }
final StringBuilder value = new StringBuilder ( ) ; int code = number ; do { final int rest = code % 26 ; value . insert ( 0 , ( char ) ( 'A' + rest ) ) ; code = code / 26 - 1 ; } while ( code >= 0 ) ; return value . toString ( ) ;
public class Barrier { /** * Adds multiple slots to this barrier ' s waiting - on list representing a single * Job argument of type list . * @ param slotList A list of slots that will be added to the barrier and used * as the elements of the list Job argument . * @ throws IllegalArgumentException if intialSlot is not filled . */ public void addListArgumentSlots ( Slot initialSlot , List < Slot > slotList ) { } }
if ( ! initialSlot . isFilled ( ) ) { throw new IllegalArgumentException ( "initialSlot must be filled" ) ; } verifyStateBeforAdd ( initialSlot ) ; int groupSize = slotList . size ( ) + 1 ; addSlotDescriptor ( new SlotDescriptor ( initialSlot , groupSize ) ) ; for ( Slot slot : slotList ) { addSlotDescriptor ( new SlotDescriptor ( slot , groupSize ) ) ; }
public class MD5MD5CRC32FileChecksum { /** * { @ inheritDoc } */ public void write ( DataOutput out ) throws IOException { } }
out . writeInt ( bytesPerCRC ) ; out . writeLong ( crcPerBlock ) ; md5 . write ( out ) ;
public class SourceStreamManager { /** * Wait for the current stream to quiesce , remove it * from storage , then create a replacement stream ID . * We assume that all production has already been * stopped and that no new production will occur until * after the flush has been completed . The reference * passed to this method contains the callback for * signalling when the flush has completed . * @ param complete An instance of the FlushComplete interface * which we ' ll invoke when the flush of the current stream * has completed . * @ throws FlushAlreadyInProgressException if someone calls * this method but a flush is already in progress . */ public void startFlush ( FlushComplete complete ) throws FlushAlreadyInProgressException , SIRollbackException , SIConnectionLostException , SIResourceException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "startFlush" , new Object [ ] { complete } ) ; // Synchronize here to get a consistent view of " flushInProgress " synchronized ( this ) { if ( flushInProgress != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "startFlush" , "FlushAlreadyInProgressException" ) ; // Somebody already called us so bail throw new FlushAlreadyInProgressException ( ) ; } // Otherwise , update flush in progress and // flush immediately if possible . Note that // this means the caller may receive the callback // on their own thread . flushInProgress = complete ; } attemptFlush ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "startFlush" ) ;
public class Client { /** * Reset the state of the environment and return an initial observation . * @ return initial observation */ public O reset ( ) { } }
JsonNode resetRep = ClientUtils . post ( url + ENVS_ROOT + instanceId + RESET , new JSONObject ( ) ) ; return observationSpace . getValue ( resetRep . getObject ( ) , "observation" ) ;
public class DataBuilder { /** * Appends a copy of the given { @ code Device . Builder } to the internal data structure . * @ param deviceBuilder * { @ code Device . Builder } to be copied and appended * @ return this { @ code Builder } , for chaining * @ throws net . sf . qualitycheck . exception . IllegalNullArgumentException * if the given argument is { @ code null } * @ throws net . sf . qualitycheck . exception . IllegalStateOfArgumentException * if the ID of the given builder is invalid * @ throws net . sf . qualitycheck . exception . IllegalStateOfArgumentException * if a builder with the same ID already exists */ @ Nonnull public DataBuilder appendDeviceBuilder ( @ Nonnull final Device . Builder deviceBuilder ) { } }
Check . notNull ( deviceBuilder , "deviceBuilder" ) ; Check . notNegative ( deviceBuilder . getId ( ) , "deviceBuilder.getId()" ) ; if ( deviceBuilders . containsKey ( deviceBuilder . getId ( ) ) ) { throw new IllegalStateOfArgumentException ( "The device builder '" + deviceBuilder . getName ( ) + "' is already in the map." ) ; } final Device . Builder builder = deviceBuilder . copy ( ) ; deviceBuilders . put ( builder . getId ( ) , builder ) ; return this ;
public class IfcGridImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcGridAxis > getWAxes ( ) { } }
return ( EList < IfcGridAxis > ) eGet ( Ifc4Package . Literals . IFC_GRID__WAXES , true ) ;
public class Logging { /** * Checks to see within the flags if a reload , i . e . not a full restart , is required . * @ param flags the flags to check * @ return { @ code true } if a reload is required , otherwise { @ code false } */ public static boolean requiresReload ( final Set < Flag > flags ) { } }
return flags . contains ( Flag . RESTART_ALL_SERVICES ) || flags . contains ( Flag . RESTART_RESOURCE_SERVICES ) ;
public class DelegationSet { /** * A complex type that contains a list of the authoritative name servers for a hosted zone or for a reusable * delegation set . * @ return A complex type that contains a list of the authoritative name servers for a hosted zone or for a reusable * delegation set . */ public java . util . List < String > getNameServers ( ) { } }
if ( nameServers == null ) { nameServers = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return nameServers ;
public class Agent { /** * Helper to load config files specified by the user . */ static Config loadConfig ( String userResources ) { } }
Config config = ConfigFactory . load ( "agent" ) ; if ( userResources != null && ! "" . equals ( userResources ) ) { for ( String userResource : userResources . split ( "[,\\s]+" ) ) { if ( userResource . startsWith ( "file:" ) ) { File file = new File ( userResource . substring ( "file:" . length ( ) ) ) ; LOGGER . info ( "loading configuration from file: {}" , file ) ; Config user = ConfigFactory . parseFile ( file ) ; config = user . withFallback ( config ) ; } else { LOGGER . info ( "loading configuration from resource: {}" , userResource ) ; Config user = ConfigFactory . parseResourcesAnySyntax ( userResource ) ; config = user . withFallback ( config ) ; } } } return config . resolve ( ) . getConfig ( "netflix.spectator.agent" ) ;
public class ReflectionDatabaseFactory { /** * Try to create the database using a static create ( ) method . * @ return the database , or null if there is no static create ( ) method * @ throws CheckedAnalysisException */ private E createUsingStaticCreateMethod ( ) throws CheckedAnalysisException { } }
Method createMethod ; try { createMethod = databaseClass . getMethod ( "create" , new Class [ 0 ] ) ; } catch ( NoSuchMethodException e ) { return null ; } if ( ! Modifier . isStatic ( createMethod . getModifiers ( ) ) ) { return null ; } if ( createMethod . getReturnType ( ) != databaseClass ) { return null ; } try { return databaseClass . cast ( createMethod . invoke ( null , new Object [ 0 ] ) ) ; } catch ( InvocationTargetException e ) { throw new CheckedAnalysisException ( "Could not create " + databaseClass . getName ( ) , e ) ; } catch ( IllegalAccessException e ) { throw new CheckedAnalysisException ( "Could not create " + databaseClass . getName ( ) , e ) ; }
public class BatchGetOnPremisesInstancesResult { /** * Information about the on - premises instances . * @ return Information about the on - premises instances . */ public java . util . List < InstanceInfo > getInstanceInfos ( ) { } }
if ( instanceInfos == null ) { instanceInfos = new com . amazonaws . internal . SdkInternalList < InstanceInfo > ( ) ; } return instanceInfos ;
public class Examples { /** * Modify order . */ public void modifyOrder ( KiteConnect kiteConnect ) throws KiteException , IOException { } }
// Order modify request will return order model which will contain only order _ id . OrderParams orderParams = new OrderParams ( ) ; orderParams . quantity = 1 ; orderParams . orderType = Constants . ORDER_TYPE_LIMIT ; orderParams . tradingsymbol = "ASHOKLEY" ; orderParams . product = Constants . PRODUCT_CNC ; orderParams . exchange = Constants . EXCHANGE_NSE ; orderParams . transactionType = Constants . TRANSACTION_TYPE_BUY ; orderParams . validity = Constants . VALIDITY_DAY ; orderParams . price = 122.25 ; Order order21 = kiteConnect . modifyOrder ( "180116000984900" , orderParams , Constants . VARIETY_REGULAR ) ; System . out . println ( order21 . orderId ) ;
public class DriverFiles { /** * Fills out a ConfigurationModule . * @ param input The ConfigurationModule to start with . * @ param globalFileField the field on which to set ( ) the global files . * @ param globalLibField the field on which to set ( ) the global libraries . * @ param localFileField the field on which to set ( ) the local files . * @ param localLibField the field on which to set ( ) the local libraries . * @ return a copy of input with files and libraries added to the given fields . */ public ConfigurationModule addNamesTo ( final ConfigurationModule input , final OptionalParameter < String > globalFileField , final OptionalParameter < String > globalLibField , final OptionalParameter < String > localFileField , final OptionalParameter < String > localLibField ) { } }
ConfigurationModule result = input ; result = this . globalFiles . addNamesTo ( result , globalFileField ) ; result = this . globalLibs . addNamesTo ( result , globalLibField ) ; result = this . localFiles . addNamesTo ( result , localFileField ) ; result = this . localLibs . addNamesTo ( result , localLibField ) ; return result ;
public class Boxing { /** * Transforms any array into an array of { @ code boolean } . * @ param src source array * @ param srcPos start position * @ param len length * @ return boolean array */ public static boolean [ ] unboxBooleans ( Object src , int srcPos , int len ) { } }
return unboxBooleans ( array ( src ) , srcPos , len ) ;
public class CmsHistoryResourceHandler { /** * Appends the < code > version < / code > parameter to the URI if needed . < p > * @ param uri the resource URI * @ param req the current request * @ return the same URI , with additional parameters in case of a historical request */ public static String getHistoryResourceURI ( String uri , ServletRequest req ) { } }
String histUri = uri ; if ( CmsHistoryResourceHandler . isHistoryRequest ( req ) ) { String version = req . getParameter ( CmsHistoryResourceHandler . PARAM_VERSION ) ; histUri = CmsRequestUtil . appendParameter ( uri , CmsHistoryResourceHandler . PARAM_VERSION , version ) ; } return histUri ;
public class Log { /** * Simple method for logging a single error exception . */ public void error ( Throwable t ) { } }
if ( isEnabled ( ) && isErrorEnabled ( ) ) { dispatchLogException ( new LogEvent ( this , LogEvent . ERROR_TYPE , t ) ) ; }