signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BaseDestinationHandler { /** * Removes the subscription from the MatchSpace and removes the associated itemstream * if non durable . * @ param cd The consumer dispatcher * @ param isNonDurable If a nondurable subscription . * @ param callProxyCode If we need to call the proxy code at all . */ public void dereferenceSubscriptionConsumerDispatcher ( ConsumerDispatcher cd , boolean isNonDurable , boolean callProxyCode ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "dereferenceSubscriptionConsumerDispatcher" , new Object [ ] { cd , Boolean . valueOf ( isNonDurable ) , Boolean . valueOf ( callProxyCode ) } ) ; _pubSubRealization . dereferenceSubscriptionConsumerDispatcher ( cd , isNonDurable , callProxyCode ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "dereferenceSubscriptionConsumerDispatcher" ) ;
public class BucketSnippets { /** * [ VARIABLE 42] */ public Blob getBlob ( String blobName , long generation ) { } }
// [ START getBlob ] Blob blob = bucket . get ( blobName , BlobGetOption . generationMatch ( generation ) ) ; // [ END getBlob ] return blob ;
public class IpCamDevice { /** * Put image RGB data into the { @ link ByteBuffer } . Please note that data from { @ link ByteBuffer } * consumed by this method does not contain original JPEG data bytes , but bytes representing RGB * data of the image constructed from JPEG data . */ @ Override public void getImageBytes ( ByteBuffer buffer ) { } }
final BufferedImage bi = getImage ( ) ; if ( bi != null ) { buffer . put ( ImageUtils . imageToBytes ( bi ) ) ; }
public class LazyOffHeapValueHolder { /** * Must be called under offheap lock , may read invalid memory content otherwise */ @ Override void detach ( ) { } }
if ( mode == Mode . ATTACHED ) { byte [ ] bytes = new byte [ binaryValue . remaining ( ) ] ; binaryValue . get ( bytes ) ; binaryValue = ByteBuffer . wrap ( bytes ) ; mode = Mode . DETACHED ; } else { throw new IllegalStateException ( "OffHeapValueHolder in mode " + mode + " cannot be prepared for delayed deserialization" ) ; }
public class ValidationUtil { /** * " Check " value for validity of usage as a golang identifier . From : * https : / / golang . org / ref / spec # Identifiers * identifier = letter { letter | unicode _ digit } * letter = unicode _ letter | " _ " . * unicode _ letter and unicode _ digit are defined in section 4.5 of the the unicode * standard at http : / / www . unicode . org / versions / Unicode8.0.0 / and * the Java Character and Digit functions are unicode friendly * @ param value to check * @ return true for validity as a golang name . false if not . */ public static boolean isSbeGolangName ( final String value ) { } }
if ( possibleGolangKeyword ( value ) ) { if ( isGolangKeyword ( value ) ) { return false ; } } else { return false ; } return true ;
public class Quaternionf { /** * / * ( non - Javadoc ) * @ see org . joml . Quaternionfc # nlerp ( org . joml . Quaternionfc , float , org . joml . Quaternionf ) */ public Quaternionf nlerp ( Quaternionfc q , float factor , Quaternionf dest ) { } }
float cosom = x * q . x ( ) + y * q . y ( ) + z * q . z ( ) + w * q . w ( ) ; float scale0 = 1.0f - factor ; float scale1 = ( cosom >= 0.0f ) ? factor : - factor ; dest . x = scale0 * x + scale1 * q . x ( ) ; dest . y = scale0 * y + scale1 * q . y ( ) ; dest . z = scale0 * z + scale1 * q . z ( ) ; dest . w = scale0 * w + scale1 * q . w ( ) ; float s = ( float ) ( 1.0 / Math . sqrt ( dest . x * dest . x + dest . y * dest . y + dest . z * dest . z + dest . w * dest . w ) ) ; dest . x *= s ; dest . y *= s ; dest . z *= s ; dest . w *= s ; return dest ;
public class IndexedOsgiResourceLocator { /** * Removes the leading ' / ' because it does not have any meaning * ( with and without should point to the same resource ) * @ param path the resource - path to clean * @ return resource - path without leading ' / ' */ private String cleanSlashesFromPath ( final String path ) { } }
if ( path == null ) { throw new IllegalArgumentException ( "createResource must be called with non-null resourceName!" ) ; } String workPath = path ; if ( workPath . charAt ( 0 ) == '/' ) { workPath = path . substring ( 1 ) ; } if ( workPath . charAt ( path . length ( ) - 1 ) == '/' ) { workPath = path . substring ( 0 , path . length ( ) - 1 ) ; } return workPath ;
public class DictionaryDatabase { /** * Returns a String [ ] with the id ' s of all entries in the specified database . * @ return The entry names for the specified dictionary * @ param dictionaryName The name of the dictionary */ public String [ ] getDictionaryEntries ( String dictionaryName ) { } }
Dictionary dictionary = getDictionary ( dictionaryName ) ; if ( dictionary == null ) { logger . error ( "Cannot find requested dictionary" ) ; return new String [ 0 ] ; } else { // FIXME : dummy method that needs an implementation Entry [ ] entries = dictionary . getEntries ( ) ; String [ ] entryNames = new String [ entries . length ] ; logger . info ( "Found " , "" + entryNames . length , " entries in dictionary " , dictionaryName ) ; for ( int i = 0 ; i < entries . length ; i ++ ) { entryNames [ i ] = entries [ i ] . getLabel ( ) ; } return entryNames ; }
public class AbstractToString { /** * Tests if the given expression is converted to a String by its parent ( i . e . its parent is a * string concat expression , { @ code String . format } , or { @ code println ( Object ) } ) . */ private Description checkToString ( ExpressionTree tree , VisitorState state ) { } }
Symbol sym = ASTHelpers . getSymbol ( tree ) ; if ( ! ( sym instanceof VarSymbol || sym instanceof MethodSymbol ) ) { return NO_MATCH ; } Type type = ASTHelpers . getType ( tree ) ; if ( type instanceof MethodType ) { type = type . getReturnType ( ) ; } Tree parent = state . getPath ( ) . getParentPath ( ) . getLeaf ( ) ; ToStringKind toStringKind = isToString ( parent , tree , state ) ; if ( toStringKind == ToStringKind . NONE ) { return NO_MATCH ; } if ( ! typePredicate ( ) . apply ( type , state ) ) { return NO_MATCH ; } Optional < Fix > fix ; switch ( toStringKind ) { case IMPLICIT : fix = implicitToStringFix ( tree , state ) ; break ; case EXPLICIT : fix = toStringFix ( parent , tree , state ) ; break ; default : throw new AssertionError ( toStringKind ) ; } return maybeFix ( tree , state , type , fix ) ;
public class HelpUtil { /** * Returns an instance of the viewer for the current page . If no instance yet exists and * forceCreate is true , one is created . * @ param forceCreate If true , a viewer instance will be created if it does not exist . * @ return The help viewer ( may be null ) . */ public static IHelpViewer getViewer ( boolean forceCreate ) { } }
Page page = getPage ( ) ; IHelpViewer viewer = ( IHelpViewer ) page . getAttribute ( VIEWER_ATTRIB ) ; return viewer != null ? viewer : forceCreate ? createViewer ( page ) : null ;
public class PageSourceImpl { /** * throws only an exception when compilation fails * @ param pc * @ param page * @ return * @ throws PageException */ private Page loadPhysical ( PageContext pc , Page page ) throws TemplateException { } }
if ( ! mapping . hasPhysical ( ) ) return null ; ConfigWeb config = pc . getConfig ( ) ; PageContextImpl pci = ( PageContextImpl ) pc ; if ( ( mapping . getInspectTemplate ( ) == Config . INSPECT_NEVER || pci . isTrusted ( page ) ) && isLoad ( LOAD_PHYSICAL ) ) return page ; Resource srcFile = getPhyscalFile ( ) ; long srcLastModified = srcFile . lastModified ( ) ; if ( srcLastModified == 0L ) return null ; // Page exists if ( page != null ) { // if ( page ! = null & & ! recompileAlways ) { if ( srcLastModified != page . getSourceLastModified ( ) ) { // same size , maybe the content has not changed ? boolean same = false ; if ( page instanceof PagePro && ( ( PagePro ) page ) . getSourceLength ( ) == srcFile . length ( ) ) { PagePro pp = ( PagePro ) page ; try { same = pp . getHash ( ) == PageSourceCode . toString ( this , config . getTemplateCharset ( ) ) . hashCode ( ) ; } catch ( IOException e ) { /* * in case this exception happen , the following compile process will fail as well and report the * error */ } } if ( ! same ) { this . page = page = compile ( config , mapping . getClassRootDirectory ( ) , page , false , pc . ignoreScopes ( ) ) ; page . setPageSource ( this ) ; page . setLoadType ( LOAD_PHYSICAL ) ; } } } // page doesn ' t exist else { Resource classRootDir = mapping . getClassRootDirectory ( ) ; Resource classFile = classRootDir . getRealResource ( getJavaName ( ) + ".class" ) ; boolean isNew = false ; // new class if ( flush || ! classFile . exists ( ) ) { this . page = page = compile ( config , classRootDir , null , false , pc . ignoreScopes ( ) ) ; flush = false ; isNew = true ; } // load page else { try { this . page = page = newInstance ( mapping . getPhysicalClass ( this . getClassName ( ) ) ) ; } catch ( Throwable t ) { ExceptionUtil . rethrowIfNecessary ( t ) ; this . page = page = null ; } if ( page == null ) this . page = page = compile ( config , classRootDir , null , false , pc . ignoreScopes ( ) ) ; } // check if version changed or lasMod if ( ! isNew && ( srcLastModified != page . getSourceLastModified ( ) || page . getVersion ( ) != pc . getConfig ( ) . getFactory ( ) . getEngine ( ) . getInfo ( ) . getFullVersionInfo ( ) ) ) { isNew = true ; this . page = page = compile ( config , classRootDir , page , false , pc . ignoreScopes ( ) ) ; } page . setPageSource ( this ) ; page . setLoadType ( LOAD_PHYSICAL ) ; } pci . setPageUsed ( page ) ; return page ;
public class LevelInequalityRule { /** * Populate list of levels . */ private static void populateLevels ( ) { } }
levelList = new LinkedList ( ) ; levelList . add ( Level . FATAL . toString ( ) ) ; levelList . add ( Level . ERROR . toString ( ) ) ; levelList . add ( Level . WARN . toString ( ) ) ; levelList . add ( Level . INFO . toString ( ) ) ; levelList . add ( Level . DEBUG . toString ( ) ) ; Level trace = Level . toLevel ( 5000 , null ) ; if ( trace != null ) { levelList . add ( trace . toString ( ) ) ; } utilLoggingLevelList = new LinkedList ( ) ; utilLoggingLevelList . add ( UtilLoggingLevel . SEVERE . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . WARNING . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . INFO . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . CONFIG . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . FINE . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . FINER . toString ( ) ) ; utilLoggingLevelList . add ( UtilLoggingLevel . FINEST . toString ( ) ) ;
public class OrderItemUrl { /** * Get Resource Url for UpdateQuoteItem * @ param quoteId * @ param quoteItemId * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl updateQuoteItemUrl ( String quoteId , String quoteItemId , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/quotes/{quoteId}/items/{quoteItemId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "quoteId" , quoteId ) ; formatter . formatUrl ( "quoteItemId" , quoteItemId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class DictionaryMatcher { /** * Internal function to recursively build the list of un - leet possibilities . * @ param replacements TreeMap of replacement index , and the possible characters at that index to be replaced * @ param current _ index internal use for the function * @ param password a Character array of the original password * @ param final _ passwords List of the final passwords to be filled */ private static void replaceAtIndex ( final TreeMap < Integer , Character [ ] > replacements , Integer current_index , final char [ ] password , final List < String > final_passwords ) { } }
for ( final char replacement : replacements . get ( current_index ) ) { password [ current_index ] = replacement ; if ( current_index . equals ( replacements . lastKey ( ) ) ) { final_passwords . add ( new String ( password ) ) ; } else if ( final_passwords . size ( ) > 100 ) { // Give up if we ' ve already made 100 replacements return ; } else { replaceAtIndex ( replacements , replacements . higherKey ( current_index ) , password , final_passwords ) ; } }
public class PasswordHashGenerator { /** * perform message digest and then append a salt at the end . */ public static byte [ ] digest ( char [ ] plainBytes , byte [ ] salt , String algorithm , int iteration , int length ) throws InvalidPasswordCipherException { } }
if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( "algorithm : " + algorithm + " iteration : " + iteration ) ; logger . fine ( "input length: " + plainBytes . length ) ; logger . fine ( "salt length: " + salt . length ) ; logger . fine ( "output length: " + length ) ; } byte [ ] oBytes = null ; if ( plainBytes != null && plainBytes . length > 0 && algorithm != null && algorithm . length ( ) > 0 && iteration > 0 ) { long begin = 0 ; if ( logger . isLoggable ( Level . FINE ) ) { begin = System . nanoTime ( ) ; } try { SecretKeyFactory skf = SecretKeyFactory . getInstance ( algorithm ) ; PBEKeySpec ks = new PBEKeySpec ( plainBytes , salt , iteration , length ) ; SecretKey s = skf . generateSecret ( ks ) ; oBytes = s . getEncoded ( ) ; } catch ( Exception e ) { throw ( InvalidPasswordCipherException ) new InvalidPasswordCipherException ( e . getMessage ( ) ) . initCause ( e ) ; } if ( logger . isLoggable ( Level . FINE ) ) { long elapsed = System . nanoTime ( ) - begin ; logger . fine ( "Elapsed time : " + elapsed + " ns " + ( elapsed / 1000000 ) + " ms" ) ; // debug } } if ( ( logger . isLoggable ( Level . FINE ) ) && oBytes != null ) { logger . fine ( "digest length: " + oBytes . length ) ; logger . fine ( hexDump ( oBytes ) ) ; } return oBytes ;
public class DescribePipelinesResult { /** * An array of descriptions for the specified pipelines . * @ param pipelineDescriptionList * An array of descriptions for the specified pipelines . */ public void setPipelineDescriptionList ( java . util . Collection < PipelineDescription > pipelineDescriptionList ) { } }
if ( pipelineDescriptionList == null ) { this . pipelineDescriptionList = null ; return ; } this . pipelineDescriptionList = new com . amazonaws . internal . SdkInternalList < PipelineDescription > ( pipelineDescriptionList ) ;
public class ChartResources { /** * Return a list of charts owned by a user . Optionally , provide an entityId to filter charts associated * with a given entity . * @ param req The HttpServlet request object . Cannot be null . * @ param ownerName Optional . The username for which to retrieve charts . For non - privileged this must be null * or equal to the logged in user . * @ param entityId Optional . The entity id associated with these charts . * @ return A list of charts filtered using the provided parameters . * @ throws WebApplicationException An exception with 404 NOT _ FOUND will be thrown if the user does not exist . */ @ GET @ Produces ( MediaType . APPLICATION_JSON ) @ Description ( "Return a list of charts owned by a user. Optionally, provide an entityId to filter charts associated " + "with a given entity. " ) public List < ChartDto > getCharts ( @ Context HttpServletRequest req , @ QueryParam ( "ownerName" ) String ownerName , @ QueryParam ( "entityId" ) BigInteger entityId ) { } }
PrincipalUser remoteUser = getRemoteUser ( req ) ; PrincipalUser owner ; owner = validateAndGetOwner ( req , ownerName ) ; List < Chart > result = new ArrayList < > ( ) ; if ( entityId == null ) { result . addAll ( _chartService . getChartsByOwner ( owner ) ) ; } else { Dashboard dashboard = _dService . findDashboardByPrimaryKey ( entityId ) ; if ( dashboard == null ) { throw new WebApplicationException ( entityId + ": Dashboard does not exist." , Status . NOT_FOUND ) ; } else if ( dashboard . isShared ( ) || remoteUser . isPrivileged ( ) ) { result . addAll ( _chartService . getChartsForEntity ( entityId ) ) ; } else { result . addAll ( _chartService . getChartsByOwnerForEntity ( owner , entityId ) ) ; } } return ChartDto . transformToDto ( result ) ;
public class Wills { /** * Creates successful { @ link Will } from provided object */ public static < A > Will < A > of ( A value ) { } }
return forListenableFuture ( Futures . immediateFuture ( value ) ) ;
public class BCWFactory { /** * 获得字节码 * @ param c * @ param name * @ param methodName * @ param returnType * @ return */ public BCW getAttribyteBCW ( Class c , String name , String methodName , Class returnType ) { } }
FieldAccessBCW f = new FieldAccessBCW ( c , name , methodName , returnType ) ; return f ;
public class AWSMigrationHubClient { /** * Provides identifying details of the resource being migrated so that it can be associated in the Application * Discovery Service ( ADS ) ' s repository . This association occurs asynchronously after * < code > PutResourceAttributes < / code > returns . * < important > * < ul > * < li > * Keep in mind that subsequent calls to PutResourceAttributes will override previously stored attributes . For * example , if it is first called with a MAC address , but later , it is desired to < i > add < / i > an IP address , it will * then be required to call it with < i > both < / i > the IP and MAC addresses to prevent overiding the MAC address . * < / li > * < li > * Note the instructions regarding the special use case of the < a href = * " https : / / docs . aws . amazon . com / migrationhub / latest / ug / API _ PutResourceAttributes . html # migrationhub - PutResourceAttributes - request - ResourceAttributeList " * > < code > ResourceAttributeList < / code > < / a > parameter when specifying any " VM " related value . * < / li > * < / ul > * < / important > < note > * Because this is an asynchronous call , it will always return 200 , whether an association occurs or not . To confirm * if an association was found based on the provided details , call < code > ListDiscoveredResources < / code > . * < / note > * @ param putResourceAttributesRequest * @ return Result of the PutResourceAttributes operation returned by the service . * @ throws AccessDeniedException * You do not have sufficient access to perform this action . * @ throws InternalServerErrorException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws ServiceUnavailableException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws DryRunOperationException * Exception raised to indicate a successfully authorized action when the < code > DryRun < / code > flag is set to * " true " . * @ throws UnauthorizedOperationException * Exception raised to indicate a request was not authorized when the < code > DryRun < / code > flag is set to * " true " . * @ throws InvalidInputException * Exception raised when the provided input violates a policy constraint or is entered in the wrong format * or data type . * @ throws ResourceNotFoundException * Exception raised when the request references a resource ( ADS configuration , update stream , migration * task , etc . ) that does not exist in ADS ( Application Discovery Service ) or in Migration Hub ' s repository . * @ sample AWSMigrationHub . PutResourceAttributes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / AWSMigrationHub - 2017-05-31 / PutResourceAttributes " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutResourceAttributesResult putResourceAttributes ( PutResourceAttributesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutResourceAttributes ( request ) ;
public class DisruptorRunable { /** * This function need to be implements * @ see com . lmax . disruptor . EventHandler # onEvent ( java . lang . Object , long , boolean ) */ @ Override public void onEvent ( Object event , long sequence , boolean endOfBatch ) throws Exception { } }
if ( event == null ) { return ; } handleEvent ( event , endOfBatch ) ;
public class ModbusClient { /** * ParserObject */ public ParserObjectResponse createParserObject ( CreateParserObjectRequest request ) { } }
InternalRequest internalRequest = createRequest ( request , HttpMethodName . POST , PARSER_OBJECT ) ; return this . invokeHttpClient ( internalRequest , ParserObjectResponse . class ) ;
public class JsMessageVisitor { /** * Checks if message already processed . If so - it generates ' message * duplicated ' compiler error . * @ param msgName the name of the message * @ param msgNode the node that represents JS message */ private void checkIfMessageDuplicated ( String msgName , Node msgNode ) { } }
if ( messageNames . containsKey ( msgName ) ) { MessageLocation location = messageNames . get ( msgName ) ; compiler . report ( JSError . make ( msgNode , MESSAGE_DUPLICATE_KEY , msgName , location . messageNode . getSourceFileName ( ) , Integer . toString ( location . messageNode . getLineno ( ) ) ) ) ; }
public class Fade { /** * Fades out the given view and then removes in from its parent . * If the view is not currently parented , the method simply returns without doing anything . * @ param view The view that will be faded and removed . */ public void remove ( final View view ) { } }
if ( view . getParent ( ) instanceof ViewGroup ) { final ViewGroup parent = ( ViewGroup ) view . getParent ( ) ; if ( durationSet ) { view . animate ( ) . setDuration ( duration ) ; } view . animate ( ) . alpha ( 0f ) . setListener ( new AnimatorListenerAdapter ( ) { @ Override public void onAnimationEnd ( Animator animation ) { parent . removeView ( view ) ; view . setAlpha ( 1 ) ; view . animate ( ) . setListener ( null ) ; } } ) ; }
public class CollectionLiterals { /** * Creates a mutable { @ link LinkedList } instance containing the given initial elements . * @ param initial * the initial elements that the list should contain , in order . May not be < code > null < / code > but may * contain < code > null < / code > values . * @ return a new { @ link LinkedList } containing those elements */ @ SafeVarargs @ Pure public static < T > LinkedList < T > newLinkedList ( T ... initial ) { } }
if ( initial . length > 0 ) return Lists . newLinkedList ( Arrays . asList ( initial ) ) ; return newLinkedList ( ) ;
public class StripeJsonUtils { /** * Converts a { @ link List } into a { @ link JSONArray } . A { @ link ClassCastException } will be * thrown if any object in the list ( or any sub - list or sub - map ) is a { @ link Map } whose keys * are not { @ link String Strings } . * @ param values a { @ link List } of values to be put in a { @ link JSONArray } * @ return a { @ link JSONArray } , or { @ code null } if the input was { @ code null } */ @ Nullable @ SuppressWarnings ( "unchecked" ) static JSONArray listToJsonArray ( @ Nullable List values ) { } }
if ( values == null ) { return null ; } JSONArray jsonArray = new JSONArray ( ) ; for ( Object object : values ) { if ( object instanceof Map < ? , ? > ) { try { Map < String , Object > mapObject = ( Map < String , Object > ) object ; jsonArray . put ( mapToJsonObject ( mapObject ) ) ; } catch ( ClassCastException classCastException ) { // We don ' t include the item in the array if the keys are not Strings . } } else if ( object instanceof List < ? > ) { jsonArray . put ( listToJsonArray ( ( List ) object ) ) ; } else if ( object instanceof Number || object instanceof Boolean ) { jsonArray . put ( object ) ; } else { jsonArray . put ( object . toString ( ) ) ; } } return jsonArray ;
public class MediaType { /** * Parse the given list of ( potentially ) comma - separated strings into a list of { @ code MediaType } objects . * < p > This method can be used to parse an Accept or Content - Type header . * @ param mediaTypes the string to parse . * @ return the list of media types . * @ throws InvalidMediaTypeException if the media type value cannot be parsed . */ public static List < MediaType > parseMediaTypes ( List < String > mediaTypes ) { } }
if ( CollectionUtils . isEmpty ( mediaTypes ) ) { return Collections . emptyList ( ) ; } else if ( mediaTypes . size ( ) == 1 ) { return parseMediaTypes ( mediaTypes . get ( 0 ) ) ; } else { List < MediaType > result = new ArrayList < > ( 8 ) ; for ( String mediaType : mediaTypes ) { result . addAll ( parseMediaTypes ( mediaType ) ) ; } return result ; }
public class ExceptionDestinationHandlerImpl { /** * Checks that a message is valid for delivery to an exception * destination * @ param message * @ return */ private UndeliverableReturnCode checkMessage ( SIMPMessage message ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkMessage" , message ) ; UndeliverableReturnCode rc = UndeliverableReturnCode . OK ; // F001333 : E3 // If the message ' s reliability equals or is less than the configured ExceptionDiscardReliability // then the message shouldn ' t be sent on to the exception destination , instead it should simply // be thrown away ( the default setting is BestEffort ) . // We ' ll always chuck away BestEffort messages , but if we have the original destination we ' ll base // our decision on its configuration // ( If the _ originalDestination is null then we do not have the original destination ' s config to hand , // for example , in the case of cleaning up a deleted destination ) Reliability discardReliabilityThreshold = Reliability . BEST_EFFORT_NONPERSISTENT ; if ( _originalDestination != null ) discardReliabilityThreshold = _originalDestination . getExceptionDiscardReliability ( ) ; if ( message . getReliability ( ) . compareTo ( discardReliabilityThreshold ) <= 0 ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Message reliability (" + message . getReliability ( ) + ") <= Exception reliability (" + discardReliabilityThreshold + ")" ) ; rc = UndeliverableReturnCode . DISCARD ; } // Discard messages from temporary destinations . else if ( _originalDestination != null && _originalDestination . isTemporary ( ) ) rc = UndeliverableReturnCode . DISCARD ; // If the discardMessage option is set , then we discard the message rather // than send to the exception destination else if ( Boolean . TRUE . equals ( message . getMessage ( ) . getReportDiscardMsg ( ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Message discarded at sender's request" ) ; rc = UndeliverableReturnCode . DISCARD ; } // Decide whether we want to block the message or not . else if ( isBlockRequired ( message ) ) rc = UndeliverableReturnCode . BLOCK ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkMessage" , rc ) ; return rc ;
public class OriginTrackedPropertiesLoader { /** * Load { @ code . properties } data and return a map of { @ code String } - > * { @ link OriginTrackedValue } . * @ param expandLists if list { @ code name [ ] = a , b , c } shortcuts should be expanded * @ return the loaded properties * @ throws IOException on read error */ public Map < String , OriginTrackedValue > load ( boolean expandLists ) throws IOException { } }
try ( CharacterReader reader = new CharacterReader ( this . resource ) ) { Map < String , OriginTrackedValue > result = new LinkedHashMap < > ( ) ; StringBuilder buffer = new StringBuilder ( ) ; while ( reader . read ( ) ) { String key = loadKey ( buffer , reader ) . trim ( ) ; if ( expandLists && key . endsWith ( "[]" ) ) { key = key . substring ( 0 , key . length ( ) - 2 ) ; int index = 0 ; do { OriginTrackedValue value = loadValue ( buffer , reader , true ) ; put ( result , key + "[" + ( index ++ ) + "]" , value ) ; if ( ! reader . isEndOfLine ( ) ) { reader . read ( ) ; } } while ( ! reader . isEndOfLine ( ) ) ; } else { OriginTrackedValue value = loadValue ( buffer , reader , false ) ; put ( result , key , value ) ; } } return result ; }
public class ProxyUtil { /** * 转换classloader * @ param loader 原classloader * @ return 转换后的classloader */ public static ProxyClassLoader convertClassloader ( ClassLoader loader ) { } }
if ( loader == null ) { return ProxyClient . DEFAULT_LOADER ; } else if ( loader instanceof ProxyClassLoader ) { return ( ProxyClassLoader ) loader ; } else { return new ProxyClassLoader ( loader ) ; }
public class XmlUtil { /** * XML格式字符串转换为Map * @ param node XML节点 * @ return XML数据转换后的Map * @ since 4.0.8 */ public static Map < String , Object > xmlToMap ( Node node ) { } }
return xmlToMap ( node , new HashMap < String , Object > ( ) ) ;
public class JsonDumpFileProcessor { /** * Handles a { @ link EntityDocument } that was retrieved by * parsing the JSON input . It will call appropriate processing methods * depending on the type of document . * @ param document * the document to process */ private void handleDocument ( EntityDocument document ) { } }
if ( document instanceof ItemDocument ) { this . entityDocumentProcessor . processItemDocument ( ( ItemDocument ) document ) ; } else if ( document instanceof PropertyDocument ) { this . entityDocumentProcessor . processPropertyDocument ( ( PropertyDocument ) document ) ; } else if ( document instanceof LexemeDocument ) { this . entityDocumentProcessor . processLexemeDocument ( ( LexemeDocument ) document ) ; }
public class MemcachedServerConfigurationBuilder { /** * The encoding to be used by clients of the memcached text protocol . When not specified , " application / octet - stream " is assumed . * When encoding is set , the memcached text server will assume clients will be reading and writing values in that encoding , and * will perform the necessary conversions between this encoding and the storage format . */ public MemcachedServerConfigurationBuilder clientEncoding ( MediaType payloadType ) { } }
attributes . attribute ( MemcachedServerConfiguration . CLIENT_ENCODING ) . set ( payloadType ) ; return this ;
public class GwtQueryBenchModule { /** * Insert the iframes for benchmarking . * Depending on the parameter share , we will generate one iframe * for each benchmark or we will share the same one . */ private void initIFrames ( ) { } }
String i = "<iframe class=ibench id=%ID%bench src=html/%ID%bench.html></iframe>" ; if ( ! shareIframes ) { $ ( i . replaceAll ( "%ID%" , "gwt" ) ) . appendTo ( document ) . hide ( ) ; for ( Benchmark b : benchmarks ) { if ( b instanceof IframeBenchmark ) { $ ( i . replaceAll ( "%ID%" , b . getId ( ) ) ) . appendTo ( document ) . hide ( ) ; } } } else { $ ( i . replaceAll ( "%ID%" , "iframe" ) ) . appendTo ( document ) . hide ( ) ; }
public class JavaScriptBindings { /** * { @ inheritDoc } */ @ Override public Object get ( Object key ) { } }
String keyStr = ( String ) key ; if ( keyStr . startsWith ( "nashorn." ) ) { return super . get ( key ) ; } Expression var = formatter . getVariable ( '@' + keyStr ) ; if ( var == null ) { return null ; } Object obj ; switch ( var . getDataType ( formatter ) ) { case Expression . NUMBER : obj = new Double ( var . doubleValue ( formatter ) ) ; break ; case Expression . BOOLEAN : obj = Boolean . valueOf ( var . booleanValue ( formatter ) ) ; break ; case Expression . STRING : default : obj = var . stringValue ( formatter ) ; } return new JavaScriptObject ( obj ) ;
public class ExtendedAttributeDefinitionImpl { /** * { @ inheritDoc } */ @ Override public Map < String , String > getExtensions ( String extensionUri ) { } }
return delegate instanceof EquinoxAttributeDefinition ? ( ( EquinoxAttributeDefinition ) delegate ) . getExtensionAttributes ( extensionUri ) : Collections . < String , String > emptyMap ( ) ;
public class SqlgUtil { /** * This is called for inserts */ public static int setKeyValuesAsParameterUsingPropertyColumn ( SqlgGraph sqlgGraph , int i , PreparedStatement preparedStatement , Map < String , Pair < PropertyType , Object > > properties ) throws SQLException { } }
i = setKeyValuesAsParameterUsingPropertyColumn ( sqlgGraph , true , i , preparedStatement , properties . values ( ) ) ; return i ;
public class FileStoreDataSet { /** * Find all of the data set directories in a cache root . * @ param cacheRoot Cache root to search . * @ return List of data sets in the specified cache . */ public static List < FileStoreDataSet > getDataSets ( File cacheRoot ) { } }
if ( cacheRoot == null ) { String message = Logging . getMessage ( "nullValue.FileStorePathIsNull" ) ; Logging . logger ( ) . severe ( message ) ; throw new IllegalArgumentException ( message ) ; } ArrayList < FileStoreDataSet > datasets = new ArrayList < FileStoreDataSet > ( ) ; File [ ] cacheDirs = FileStoreDataSet . listDirs ( cacheRoot ) ; for ( File cacheDir : cacheDirs ) { if ( cacheDir . getName ( ) . equals ( "license" ) ) continue ; File [ ] subDirs = FileStoreDataSet . listDirs ( cacheDir ) ; if ( subDirs . length == 0 ) { datasets . add ( new FileStoreDataSet ( cacheDir , cacheRoot . getPath ( ) ) ) ; } else { // If the directory should be treated as a single dataset , add just one entry to the list . if ( isSingleDataSet ( subDirs ) ) { datasets . add ( new FileStoreDataSet ( cacheDir , cacheRoot . getPath ( ) ) ) ; } // Otherwise add each subdirectory as a separate data set . else { for ( File sd : subDirs ) { FileStoreDataSet ds = new FileStoreDataSet ( sd , cacheRoot . getPath ( ) ) ; datasets . add ( ds ) ; } } } } return datasets ;
public class BaseBigtableTableAdminClient { /** * Creates a new table from the specified snapshot . The target table must not exist . The snapshot * and the table must be in the same instance . * < p > Note : This is a private alpha release of Cloud Bigtable snapshots . This feature is not * currently available to most Cloud Bigtable customers . This feature might be changed in * backward - incompatible ways and is not recommended for production use . It is not subject to any * SLA or deprecation policy . * < p > Sample code : * < pre > < code > * try ( BaseBigtableTableAdminClient baseBigtableTableAdminClient = BaseBigtableTableAdminClient . create ( ) ) { * InstanceName parent = InstanceName . of ( " [ PROJECT ] " , " [ INSTANCE ] " ) ; * String tableId = " " ; * SnapshotName sourceSnapshot = SnapshotName . of ( " [ PROJECT ] " , " [ INSTANCE ] " , " [ CLUSTER ] " , " [ SNAPSHOT ] " ) ; * Table response = baseBigtableTableAdminClient . createTableFromSnapshotAsync ( parent , tableId , sourceSnapshot ) . get ( ) ; * < / code > < / pre > * @ param parent The unique name of the instance in which to create the table . Values are of the * form ` projects / & lt ; project & gt ; / instances / & lt ; instance & gt ; ` . * @ param tableId The name by which the new table should be referred to within the parent * instance , e . g . , ` foobar ` rather than ` & lt ; parent & gt ; / tables / foobar ` . * @ param sourceSnapshot The unique name of the snapshot from which to restore the table . The * snapshot and the table must be in the same instance . Values are of the form * ` projects / & lt ; project & gt ; / instances / & lt ; instance & gt ; / clusters / & lt ; cluster & gt ; / snapshots / & lt ; snapshot & gt ; ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Table , CreateTableFromSnapshotMetadata > createTableFromSnapshotAsync ( InstanceName parent , String tableId , SnapshotName sourceSnapshot ) { } }
CreateTableFromSnapshotRequest request = CreateTableFromSnapshotRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setTableId ( tableId ) . setSourceSnapshot ( sourceSnapshot == null ? null : sourceSnapshot . toString ( ) ) . build ( ) ; return createTableFromSnapshotAsync ( request ) ;
public class StartWithMatcher { /** * Begin from the specified position , test if the text string can be matched * by any of the regular expression , * return list of the attachments of all the matching regular expressions . < br > * 从指定位置开始判断文本字符串是否可以被任意一个正则表达式所匹配 , * 返回所有能匹配到的正则表达式的附件对象的列表 。 * The test starts from the specified position . * Caution : if the beginning part matches , the whole text is considered to match . * 匹配从指定的位置开始 。 注意 : 只要开头匹配了就算匹配 。 * @ param textText string to be tested for matching . < br > 对这个字符串进行匹配判断 。 * @ param startIndex The starting position of the testing . < br > 从文本的这个位置开始匹配 。 * @ returnList of attachment objects of the regular expressions that matche the text string ; * null is returned if no matching can be found . < br > * 匹配到的那些正则表达式所对应的附件的列表 ; 返回null表示没有匹配到任何一个正则表达式 。 */ public List < Object > matchAll ( CharSequence text , int startIndex ) { } }
List < Integer > states = getAllAcceptedStates ( text , startIndex ) ; if ( states == null ) { return null ; } else { List < Object > result = new ArrayList < Object > ( states . size ( ) ) ; for ( int p : states ) { result . add ( attachments [ p ] ) ; } return result ; }
public class ICUHumanize { /** * Same as { @ link # naturalTime ( Date , Date ) naturalTime } for the specified * locale . * @ param reference * Date to be used as reference * @ param duration * Date to be used as duration from reference * @ param locale * Target locale * @ return String representing the relative date */ public static String naturalTime ( final Date reference , final Date duration , final Locale locale ) { } }
return withinLocale ( new Callable < String > ( ) { public String call ( ) { return naturalTime ( reference , duration ) ; } } , locale ) ;
public class DataManager { /** * Initialise Session Manager . * @ param context Application context . * @ param suffix Log tag suffix to extend the SDK details in a tag with any additional SDK module details . * @ param log Logger instance for logging output . */ public void init ( @ NonNull final Context context , @ Nullable final String suffix , @ NonNull final Logger log ) { } }
deviceDAO = new DeviceDAO ( context , suffix ) ; onetimeDeviceSetup ( context ) ; logInfo ( log ) ; sessionDAO = new SessionDAO ( context , suffix ) ;
public class AbstractWrapAdapter { /** * overwrite the getItemId to correctly return the value from the FastAdapter * @ param position * @ return */ @ Override public long getItemId ( int position ) { } }
if ( shouldInsertItemAtPosition ( position ) ) { return getItem ( position ) . getIdentifier ( ) ; } else { return mAdapter . getItemId ( position - itemInsertedBeforeCount ( position ) ) ; }
public class TextDocument { /** * @ param jspPath * @ param httpRequest * @ param httpResponse * @ param wbRequest * @ param results * @ param result * @ param resource * @ return * @ throws IOException * @ throws ServletException */ public String includeJspString ( String jspPath , HttpServletRequest httpRequest , HttpServletResponse httpResponse , WaybackRequest wbRequest , CaptureSearchResults results , CaptureSearchResult result , Resource resource ) throws ServletException , IOException { } }
if ( wbRequest . isAjaxRequest ( ) ) { return "" ; } UIResults uiResults = new UIResults ( wbRequest , uriConverter , results , result , resource ) ; StringHttpServletResponseWrapper wrappedResponse = new StringHttpServletResponseWrapper ( httpResponse ) ; uiResults . forward ( httpRequest , wrappedResponse , jspPath ) ; return wrappedResponse . getStringResponse ( ) ;
public class TangoEventsAdapter { public void addTangoAttConfigListener ( ITangoAttConfigListener listener , String attrName , String [ ] filters ) throws DevFailed { } }
addTangoAttConfigListener ( listener , attrName , filters , false ) ;
public class SpillDispatcher { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . persistence . Dispatcher # start ( ) */ public void start ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "start" ) ; int priorityDelta = obtainIntConfigParameter ( _msi , MessageStoreConstants . PROP_JDBC_SPILL_THREAD_PRIORITY_DELTA , MessageStoreConstants . PROP_JDBC_SPILL_THREAD_PRIORITY_DELTA_DEFAULT , Thread . MIN_PRIORITY - Thread . NORM_PRIORITY , Thread . MAX_PRIORITY - Thread . NORM_PRIORITY ) ; _threads = new Thread [ _maxThreads ] ; _workers = new SpillDispatcherThread [ _maxThreads ] ; synchronized ( this ) { _stopRequested = false ; _running = true ; } // Get the ME _ UUID so that we can tell which ME our // threads belong to . String meUUID = "" ; if ( _msi != null ) { JsMessagingEngine me = _msi . _getMessagingEngine ( ) ; if ( me != null ) { meUUID = me . getUuid ( ) . toString ( ) + "-" ; } } for ( int i = 0 ; i < _maxThreads ; i ++ ) { String threadName = "sib.SpillDispatcher-" + meUUID + i ; _workers [ i ] = new SpillDispatcherThread ( i , threadName ) ; _threads [ i ] = new Thread ( _workers [ i ] , threadName ) ; _threads [ i ] . setDaemon ( true ) ; _threads [ i ] . setPriority ( Thread . NORM_PRIORITY + priorityDelta ) ; _threads [ i ] . start ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "start" ) ;
public class IndexedCollectionCertStore { /** * Iterate through all the X509CRLs and add matches to the * collection . */ private void matchX509CRLs ( CRLSelector selector , Collection < CRL > matches ) { } }
for ( Object obj : crlIssuers . values ( ) ) { if ( obj instanceof X509CRL ) { X509CRL crl = ( X509CRL ) obj ; if ( selector . match ( crl ) ) { matches . add ( crl ) ; } } else { // See crlIssuers javadoc . @ SuppressWarnings ( "unchecked" ) List < X509CRL > list = ( List < X509CRL > ) obj ; for ( X509CRL crl : list ) { if ( selector . match ( crl ) ) { matches . add ( crl ) ; } } } }
public class DateTimeTableEditor { /** * getTableCellRendererComponent , Returns the renderer that is used for drawing the cell . This * is required by the TableCellRenderer interface . * For additional details , see the Javadocs for the function : * TableCellRenderer . getTableCellRendererComponent ( ) . */ @ Override public Component getTableCellRendererComponent ( JTable table , Object value , boolean isSelected , boolean hasFocus , int row , int column ) { } }
// Save the supplied value to the DateTimePicker . setCellEditorValue ( value ) ; // Draw the appropriate background colors to indicate a selected or unselected state . if ( isSelected ) { if ( matchTableSelectionBackgroundColor ) { Color selectionBackground = table . getSelectionBackground ( ) ; dateTimePicker . setBackground ( selectionBackground ) ; dateTimePicker . datePicker . setBackground ( selectionBackground ) ; dateTimePicker . timePicker . setBackground ( selectionBackground ) ; dateTimePicker . datePicker . getComponentDateTextField ( ) . setBackground ( selectionBackground ) ; dateTimePicker . timePicker . getComponentTimeTextField ( ) . setBackground ( selectionBackground ) ; } else { dateTimePicker . datePicker . zDrawTextFieldIndicators ( ) ; dateTimePicker . timePicker . zDrawTextFieldIndicators ( ) ; } } if ( ! isSelected ) { if ( matchTableBackgroundColor ) { Color tableBackground = table . getBackground ( ) ; dateTimePicker . setBackground ( tableBackground ) ; dateTimePicker . datePicker . setBackground ( tableBackground ) ; dateTimePicker . timePicker . setBackground ( tableBackground ) ; dateTimePicker . datePicker . getComponentDateTextField ( ) . setBackground ( tableBackground ) ; dateTimePicker . timePicker . getComponentTimeTextField ( ) . setBackground ( tableBackground ) ; } else { dateTimePicker . datePicker . zDrawTextFieldIndicators ( ) ; dateTimePicker . timePicker . zDrawTextFieldIndicators ( ) ; } } // Draw the appropriate borders to indicate a focused or unfocused state . if ( hasFocus ) { dateTimePicker . setBorder ( borderFocusedCell ) ; } else { dateTimePicker . setBorder ( borderUnfocusedCell ) ; } // If needed , adjust the minimum row height for the table . zAdjustTableRowHeightIfNeeded ( table ) ; // This fixes a bug where the picker text could " move around " during a table resize event . dateTimePicker . datePicker . getComponentDateTextField ( ) . setScrollOffset ( 0 ) ; dateTimePicker . timePicker . getComponentTimeTextField ( ) . setScrollOffset ( 0 ) ; // Return the DateTimePicker component . return dateTimePicker ;
public class HBCIDialog { /** * Processing the DialogInit stage and updating institute and user data from the server * ( mid - level API ) . * This method processes the dialog initialization stage of an HBCIDialog . It creates * a new rawMsg in the kernel and processes it . The return values will be * passed to appropriate methods in the @ c institute and @ c user objects to * update their internal state with the data received from the institute . */ private HBCIMsgStatus doDialogInit ( ) { } }
HBCIMsgStatus msgStatus = new HBCIMsgStatus ( ) ; try { log . debug ( HBCIUtils . getLocMsg ( "STATUS_DIALOG_INIT" ) ) ; passport . getCallback ( ) . status ( HBCICallback . STATUS_DIALOG_INIT , null ) ; Message message = MessageFactory . createDialogInit ( "DialogInit" , null , passport ) ; msgStatus = kernel . rawDoIt ( message , HBCIKernel . SIGNIT , HBCIKernel . CRYPTIT ) ; passport . postInitResponseHook ( msgStatus ) ; HashMap < String , String > result = msgStatus . getData ( ) ; if ( msgStatus . isOK ( ) ) { HBCIInstitute inst = new HBCIInstitute ( kernel , passport ) ; inst . updateBPD ( result ) ; inst . extractKeys ( result ) ; HBCIUser user = new HBCIUser ( kernel , passport ) ; user . updateUPD ( result ) ; msgnum = 2 ; dialogid = result . get ( "MsgHead.dialogid" ) ; log . debug ( "dialog-id set to " + dialogid ) ; HBCIInstMessage msg ; for ( int i = 0 ; true ; i ++ ) { try { String header = HBCIUtils . withCounter ( "KIMsg" , i ) ; msg = new HBCIInstMessage ( result , header ) ; } catch ( Exception e ) { break ; } passport . getCallback ( ) . callback ( HBCICallback . HAVE_INST_MSG , msg . toString ( ) , HBCICallback . TYPE_NONE , new StringBuilder ( ) ) ; } } passport . getCallback ( ) . status ( HBCICallback . STATUS_DIALOG_INIT_DONE , new Object [ ] { msgStatus , dialogid } ) ; } catch ( Exception e ) { msgStatus . addException ( e ) ; } return msgStatus ;
public class AWSWAFRegionalClient { /** * Permanently deletes a < a > RegexPatternSet < / a > . You can ' t delete a < code > RegexPatternSet < / code > if it ' s still used * in any < code > RegexMatchSet < / code > or if the < code > RegexPatternSet < / code > is not empty . * @ param deleteRegexPatternSetRequest * @ return Result of the DeleteRegexPatternSet operation returned by the service . * @ throws WAFInternalErrorException * The operation failed because of a system problem , even though the request was valid . Retry your request . * @ throws WAFInvalidAccountException * The operation failed because you tried to create , update , or delete an object by using an invalid account * identifier . * @ throws WAFNonexistentItemException * The operation failed because the referenced object doesn ' t exist . * @ throws WAFReferencedItemException * The operation failed because you tried to delete an object that is still in use . For example : < / p > * < ul > * < li > * You tried to delete a < code > ByteMatchSet < / code > that is still referenced by a < code > Rule < / code > . * < / li > * < li > * You tried to delete a < code > Rule < / code > that is still referenced by a < code > WebACL < / code > . * < / li > * @ throws WAFStaleDataException * The operation failed because you tried to create , update , or delete an object by using a change token * that has already been used . * @ throws WAFNonEmptyEntityException * The operation failed because you tried to delete an object that isn ' t empty . For example : < / p > * < ul > * < li > * You tried to delete a < code > WebACL < / code > that still contains one or more < code > Rule < / code > objects . * < / li > * < li > * You tried to delete a < code > Rule < / code > that still contains one or more < code > ByteMatchSet < / code > objects * or other predicates . * < / li > * < li > * You tried to delete a < code > ByteMatchSet < / code > that contains one or more < code > ByteMatchTuple < / code > * objects . * < / li > * < li > * You tried to delete an < code > IPSet < / code > that references one or more IP addresses . * < / li > * @ sample AWSWAFRegional . DeleteRegexPatternSet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / waf - regional - 2016-11-28 / DeleteRegexPatternSet " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteRegexPatternSetResult deleteRegexPatternSet ( DeleteRegexPatternSetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteRegexPatternSet ( request ) ;
public class CommerceShippingFixedOptionServiceBaseImpl { /** * Sets the commerce shipping fixed option rel local service . * @ param commerceShippingFixedOptionRelLocalService the commerce shipping fixed option rel local service */ public void setCommerceShippingFixedOptionRelLocalService ( com . liferay . commerce . shipping . engine . fixed . service . CommerceShippingFixedOptionRelLocalService commerceShippingFixedOptionRelLocalService ) { } }
this . commerceShippingFixedOptionRelLocalService = commerceShippingFixedOptionRelLocalService ;
public class S3Dispatcher { /** * Handles GET / bucket / id ? uploadId = uploadId * @ param ctx the context describing the current request * @ param bucket the bucket containing the object to download * @ param id name of the object to use as download */ private void getPartList ( WebContext ctx , Bucket bucket , String id , String uploadId ) { } }
if ( ! multipartUploads . contains ( uploadId ) ) { ctx . respondWith ( ) . error ( HttpResponseStatus . NOT_FOUND , ERROR_MULTIPART_UPLOAD_DOES_NOT_EXIST ) ; return ; } Response response = ctx . respondWith ( ) ; response . setHeader ( HTTP_HEADER_NAME_CONTENT_TYPE , CONTENT_TYPE_XML ) ; XMLStructuredOutput out = response . xml ( ) ; out . beginOutput ( "ListPartsResult" ) ; out . property ( RESPONSE_BUCKET , bucket . getName ( ) ) ; out . property ( "Key" , id ) ; out . property ( "UploadId" , uploadId ) ; outputOwnerInfo ( out , "Initiator" ) ; outputOwnerInfo ( out , "Owner" ) ; File uploadDir = getUploadDir ( uploadId ) ; int marker = ctx . get ( "part-number-marker" ) . asInt ( 0 ) ; int maxParts = ctx . get ( "max-parts" ) . asInt ( 0 ) ; out . property ( "StorageClass" , "STANDARD" ) ; out . property ( "PartNumberMarker" , marker ) ; if ( ( marker + maxParts ) < uploadDir . list ( ) . length ) { out . property ( "NextPartNumberMarker" , marker + maxParts + 1 ) ; } if ( Strings . isFilled ( maxParts ) ) { out . property ( "MaxParts" , maxParts ) ; } boolean truncated = 0 < maxParts && maxParts < uploadDir . list ( ) . length ; out . property ( "IsTruncated" , truncated ) ; for ( File part : uploadDir . listFiles ( ) ) { out . beginObject ( "Part" ) ; out . property ( "PartNumber" , part . getName ( ) ) ; out . property ( "LastModified" , RFC822_INSTANT . format ( Instant . ofEpochMilli ( part . lastModified ( ) ) ) ) ; try { out . property ( HTTP_HEADER_NAME_ETAG , Files . hash ( part , Hashing . md5 ( ) ) . toString ( ) ) ; } catch ( IOException e ) { Exceptions . ignore ( e ) ; } out . property ( "Size" , part . length ( ) ) ; out . endObject ( ) ; } out . endOutput ( ) ;
public class EvalPair { /** * Turns a Guava { @ link Function } into another { @ code Function } which operates over both sides of * an { @ link EvalPair } . */ public static < F , T , KeyT extends F , TestT extends F > Function < EvalPair < ? extends KeyT , ? extends TestT > , EvalPair < T , T > > functionOnBoth ( final Function < F , T > func ) { } }
return new Function < EvalPair < ? extends KeyT , ? extends TestT > , EvalPair < T , T > > ( ) { @ Override public EvalPair < T , T > apply ( final EvalPair < ? extends KeyT , ? extends TestT > input ) { return EvalPair . of ( func . apply ( input . key ( ) ) , func . apply ( input . test ( ) ) ) ; } @ Override public String toString ( ) { return "EvalPair.functionOnBoth(" + func + ")" ; } } ;
public class SeleniumBrowser { /** * Stop the browser when started . */ public void stop ( ) { } }
if ( isStarted ( ) ) { log . info ( "Stopping browser " + webDriver . getCurrentUrl ( ) ) ; try { log . info ( "Trying to close the browser " + webDriver + " ..." ) ; webDriver . quit ( ) ; } catch ( UnreachableBrowserException e ) { // It happens for Firefox . It ' s ok : browser is already closed . log . warn ( "Browser is unreachable" , e ) ; } catch ( WebDriverException e ) { log . error ( "Failed to close browser" , e ) ; } webDriver = null ; } else { log . warn ( "Browser already stopped" ) ; }
public class JDBCDatabaseMetaData { /** * Retrieves a description of table columns available in * the specified catalog . * < P > Only column descriptions matching the catalog , schema , table * and column name criteria are returned . They are ordered by * < code > TABLE _ SCHEM < / code > , < code > TABLE _ NAME < / code > , and * < code > ORDINAL _ POSITION < / code > . * < P > Each column description has the following columns : * < OL > * < LI > < B > TABLE _ CAT < / B > String = > table catalog ( may be < code > null < / code > ) * < LI > < B > TABLE _ SCHEM < / B > String = > table schema ( may be < code > null < / code > ) * < LI > < B > TABLE _ NAME < / B > String = > table name * < LI > < B > COLUMN _ NAME < / B > String = > column name * < LI > < B > DATA _ TYPE < / B > int = > SQL type from java . sql . Types * < LI > < B > TYPE _ NAME < / B > String = > Data source dependent type name , * for a UDT the type name is fully qualified * < LI > < B > COLUMN _ SIZE < / B > int = > column size . * < LI > < B > BUFFER _ LENGTH < / B > is not used . * < LI > < B > DECIMAL _ DIGITS < / B > int = > the number of fractional digits . Null is returned for data types where * DECIMAL _ DIGITS is not applicable . * < LI > < B > NUM _ PREC _ RADIX < / B > int = > Radix ( typically either 10 or 2) * < LI > < B > NULLABLE < / B > int = > is NULL allowed . * < UL > * < LI > columnNoNulls - might not allow < code > NULL < / code > values * < LI > columnNullable - definitely allows < code > NULL < / code > values * < LI > columnNullableUnknown - nullability unknown * < / UL > * < LI > < B > REMARKS < / B > String = > comment describing column ( may be < code > null < / code > ) * < LI > < B > COLUMN _ DEF < / B > String = > ( JDBC4 clarification : ) default value for the column , which should be interpreted as a string when the value is enclosed in quotes ( may be < code > null < / code > ) * < LI > < B > SQL _ DATA _ TYPE < / B > int = > unused * < p > HSQLDB - specific : CLI type from SQL 2003 Table 37, * tables 6-9 Annex A1 , and / or addendums in other * documents , such as : < br > * SQL 2003 Part 9 : Management of External Data ( SQL / MED ) : DATALINK < br > * SQL 2003 Part 14 : XML - Related Specifications ( SQL / XML ) : XML < p > * < LI > < B > SQL _ DATETIME _ SUB < / B > int = > unused ( HSQLDB - specific : SQL 2003 CLI datetime / interval subcode ) * < LI > < B > CHAR _ OCTET _ LENGTH < / B > int = > for char types the * maximum number of bytes in the column * < LI > < B > ORDINAL _ POSITION < / B > int = > index of column in table * ( starting at 1) * < LI > < B > IS _ NULLABLE < / B > String = > ISO rules are used to determine the nullability for a column . * < UL > * < LI > YES - - - if the parameter can include NULLs * < LI > NO - - - if the parameter cannot include NULLs * < LI > empty string - - - if the nullability for the * parameter is unknown * < / UL > * < LI > < B > SCOPE _ CATLOG < / B > String = > catalog of table that is the scope * of a reference attribute ( < code > null < / code > if DATA _ TYPE isn ' t REF ) * < LI > < B > SCOPE _ SCHEMA < / B > String = > schema of table that is the scope * of a reference attribute ( < code > null < / code > if the DATA _ TYPE isn ' t REF ) * < LI > < B > SCOPE _ TABLE < / B > String = > table name that this the scope * of a reference attribure ( < code > null < / code > if the DATA _ TYPE isn ' t REF ) * < LI > < B > SOURCE _ DATA _ TYPE < / B > short = > source type of a distinct type or user - generated * Ref type , SQL type from java . sql . Types ( < code > null < / code > if DATA _ TYPE * isn ' t DISTINCT or user - generated REF ) * < LI > < B > IS _ AUTOINCREMENT < / B > String = > Indicates whether this column is auto incremented * < UL > * < LI > YES - - - if the column is auto incremented * < LI > NO - - - if the column is not auto incremented * < LI > empty string - - - if it cannot be determined whether the column is auto incremented * parameter is unknown * < / UL > * < / OL > * < p > ( JDBC4 clarification : ) The COLUMN _ SIZE column represents the specified column size for the given column . * For numeric data , this is the maximum precision . For character data , this is the [ declared or implicit maximum ] length in characters . * For datetime datatypes , this is the [ maximum ] length in characters of the String representation ( assuming the * maximum allowed precision of the fractional seconds component ) . For binary data , this is the [ maximum ] length in bytes . For the ROWID datatype , * this is the length in bytes [ , as returned by the implementation - specific java . sql . RowId . getBytes ( ) method ] . 0 is returned for data types where the * column size is not applicable . < p > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * HSQLDB supports the SQL Standard . It treats unquoted identifiers as * case insensitive in SQL and stores * them in upper case ; it treats quoted identifiers as case sensitive and * stores them verbatim . All JDBCDatabaseMetaData methods perform * case - sensitive comparison between name ( pattern ) arguments and the * corresponding identifier values as they are stored in the database . * Therefore , care must be taken to specify name arguments precisely * ( including case ) as they are stored in the database . < p > * Since 1.7.0 , HSQLDB includes the new JDBC 3 columns SCOPE _ CATLOG , * SCOPE _ SCHEMA , SCOPE _ TABLE and SOURCE _ DATA _ TYPE in anticipation * of JDBC 3 compliant tools . However , these columns are never filled in ; * the engine does not support the related features . < p > * Since 1.7.2 , this feature is supported by default . If the jar is * compiled without org . hsqldb _ voltpatches . DatabaseInformationFull or * org . hsqldb _ voltpatches . DatabaseInformationMain , the feature is * not supported . The default implementation is * { @ link org . hsqldb _ voltpatches . dbinfo . DatabaseInformationFull } . * < / div > * < ! - - end release - specific documentation - - > * @ param catalog a catalog name ; must match the catalog name as it * is stored in the database ; " " retrieves those without a catalog ; * < code > null < / code > means that the catalog name should not be used to narrow * the search * @ param schemaPattern a schema name pattern ; must match the schema name * as it is stored in the database ; " " retrieves those without a schema ; * < code > null < / code > means that the schema name should not be used to narrow * the search * @ param tableNamePattern a table name pattern ; must match the * table name as it is stored in the database * @ param columnNamePattern a column name pattern ; must match the column * name as it is stored in the database * @ return < code > ResultSet < / code > - each row is a column description * @ exception SQLException if a database access error occurs * @ see # getSearchStringEscape */ public ResultSet getColumns ( String catalog , String schemaPattern , String tableNamePattern , String columnNamePattern ) throws SQLException { } }
if ( wantsIsNull ( tableNamePattern ) || wantsIsNull ( columnNamePattern ) ) { return executeSelect ( "SYSTEM_COLUMNS" , "0=1" ) ; } schemaPattern = translateSchema ( schemaPattern ) ; StringBuffer select = toQueryPrefix ( "SYSTEM_COLUMNS" ) . append ( and ( "TABLE_CAT" , "=" , catalog ) ) . append ( and ( "TABLE_SCHEM" , "LIKE" , schemaPattern ) ) . append ( and ( "TABLE_NAME" , "LIKE" , tableNamePattern ) ) . append ( and ( "COLUMN_NAME" , "LIKE" , columnNamePattern ) ) ; // by default , query already returns the result ordered // by TABLE _ SCHEM , TABLE _ NAME and ORDINAL _ POSITION return execute ( select . toString ( ) ) ;
public class Suppliers { /** * Given the string representation of a type , supplies the corresponding type . * @ param typeString a string representation of a type , e . g . , " java . util . List " */ public static Supplier < Type > typeFromString ( final String typeString ) { } }
requireNonNull ( typeString ) ; return new Supplier < Type > ( ) { @ Override public Type get ( VisitorState state ) { return state . getTypeFromString ( typeString ) ; } } ;
public class RpcInternalContext { /** * set attachment . * @ param key the key * @ param value the value * @ return context attachment */ public RpcInternalContext setAttachment ( String key , Object value ) { } }
if ( key == null ) { return this ; } if ( ! ATTACHMENT_ENABLE ) { // 未开启附件传递功能 , 只能传递隐藏key ( " . " 开头的Key ) if ( ! isHiddenParamKey ( key ) ) { return this ; } } else { if ( ! isValidInternalParamKey ( key ) ) { // 打开附件传递功能 , 只能传 " _ " 和 " . " 开头的Key throw new IllegalArgumentException ( "key must start with" + RpcConstants . INTERNAL_KEY_PREFIX + " or " + RpcConstants . HIDE_KEY_PREFIX ) ; } } if ( value == null ) { attachments . remove ( key ) ; } else { attachments . put ( key , value ) ; } return this ;
public class UserTransactionWrapper { /** * d303100 */ private void changeToLocalContext ( EJBThreadData threadData ) // d704496 throws java . rmi . RemoteException { } }
// LIDB1181.23.5.1 // For EJB 2.0 beans , ending global tx causes Tx Service to // automatically begin a local tx . Create new ContainerTx . // For EJB 1.1 beans , we transition to no tx context here . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { LocalTransactionCurrent ltcCurrent = EmbeddableTransactionManagerFactory . getLocalTransactionCurrent ( ) ; // LIDB1673.2.1.5 / / 120870.3 LocalTransactionCoordinator lCoord = ltcCurrent . getLocalTranCoord ( ) ; if ( lCoord != null ) { Tr . event ( tc , "Tx Service began LTC cntxt: tid=" + Integer . toHexString ( lCoord . hashCode ( ) ) + "(LTC)" ) ; } } container . processTxContextChange ( threadData , true ) ; // d704496
public class WebContainer { /** * Method getVirtualHost . Returns null if the input name does not * match any configured host . * @ param targetHost * @ return VirtualHost * This method is not to be used in any request processing as it * is not optimized for performance * @ throws WebAppHostNotFoundException */ public VirtualHost getVirtualHost ( String targetHost ) throws WebAppHostNotFoundException { } }
Iterator i = requestMapper . targetMappings ( ) ; while ( i . hasNext ( ) ) { RequestProcessor rp = ( RequestProcessor ) i . next ( ) ; if ( rp instanceof VirtualHost ) { VirtualHost vHost = ( VirtualHost ) rp ; if ( targetHost . equalsIgnoreCase ( vHost . getName ( ) ) ) return vHost ; } } return null ;
public class DnsOpCode { /** * Returns the { @ link DnsOpCode } instance of the specified byte value . */ public static DnsOpCode valueOf ( int b ) { } }
switch ( b ) { case 0x00 : return QUERY ; case 0x01 : return IQUERY ; case 0x02 : return STATUS ; case 0x04 : return NOTIFY ; case 0x05 : return UPDATE ; } return new DnsOpCode ( b ) ;
public class PacketHelper { /** * Creates a packet with a KNXnet / IP message header v1.0 , containing the specified * service < code > type < / code > , and generates the corresponding byte representation * of this structure . * @ param type service type to pack * @ return the packet as byte array */ public static byte [ ] toPacket ( final ServiceType type ) { } }
final KNXnetIPHeader h = new KNXnetIPHeader ( type . svcType , type . getStructLength ( ) ) ; final ByteArrayOutputStream os = new ByteArrayOutputStream ( h . getTotalLength ( ) ) ; os . write ( h . toByteArray ( ) , 0 , h . getStructLength ( ) ) ; return type . toByteArray ( os ) ;
public class AppAPI { /** * Updates an app . The update can contain an new configuration for the app , * addition of new fields as well as updates to the configuration of * existing fields . Fields not included will not be deleted . To delete a * field use the " delete field " operation . * When adding / updating / deleting apps and fields , it can be simpler to only * update the app config here and add / update / remove fields using the * field / { field _ id } sub resource . * @ param appId * The id of the app to be updated * @ param app * The updated app definition */ public void updateApp ( int appId , ApplicationUpdate app ) { } }
getResourceFactory ( ) . getApiResource ( "/app/" + appId ) . entity ( app , MediaType . APPLICATION_JSON ) . put ( ) ;
public class BaseProxy { /** * Constructor . */ public void init ( BaseProxy parentProxy , String strID ) { } }
m_parentProxy = parentProxy ; m_strID = strID ; if ( m_parentProxy != null ) m_parentProxy . addChildProxy ( this ) ;
public class JSVariant { /** * otherwise , it ' s the sum of the multiChoice counts for the cases . */ BigInteger setMultiChoiceCount ( ) { } }
if ( boxed == null ) { multiChoiceCount = BigInteger . ZERO ; for ( int i = 0 ; i < cases . length ; i ++ ) multiChoiceCount = multiChoiceCount . add ( cases [ i ] . setMultiChoiceCount ( ) ) ; } return multiChoiceCount ;
public class LocaleUtils { /** * TEST METHOD */ private static void isCorrect ( String one , String two ) { } }
if ( one != null && one . equals ( two ) ) { System . out . println ( one + " correct" ) ; } else { System . out . println ( one + " wrong - should be " + two ) ; }
public class ProviderFactory { /** * Liberty code change start */ private void addAndSortReaders ( List < ProviderInfo < MessageBodyReader < ? > > > newReaders , boolean forceSort ) { } }
Comparator < ProviderInfo < MessageBodyReader < ? > > > comparator = null ; if ( ! customComparatorAvailable ( MessageBodyReader . class ) ) { comparator = new MessageBodyReaderComparator ( readerMediaTypesMap ) ; } messageReaders . addAndSortProviders ( newReaders , comparator , forceSort ) ;
public class ProcessApplicationAttachments { /** * return true if the deployment unit is either itself a process * application or part of a process application . */ public static boolean isPartOfProcessApplication ( DeploymentUnit unit ) { } }
if ( isProcessApplication ( unit ) ) { return true ; } if ( unit . getParent ( ) != null && unit . getParent ( ) != unit ) { return unit . getParent ( ) . hasAttachment ( PART_OF_MARKER ) ; } return false ;
public class UpdateCenter { /** * Returns String with version of backup . war file , * if the file does not exists returns null */ public String getBackupVersion ( ) { } }
try { try ( JarFile backupWar = new JarFile ( new File ( Lifecycle . get ( ) . getHudsonWar ( ) + ".bak" ) ) ) { Attributes attrs = backupWar . getManifest ( ) . getMainAttributes ( ) ; String v = attrs . getValue ( "Jenkins-Version" ) ; if ( v == null ) v = attrs . getValue ( "Hudson-Version" ) ; return v ; } } catch ( IOException e ) { LOGGER . log ( Level . WARNING , "Failed to read backup version " , e ) ; return null ; }
public class ApruveClient { /** * Issues a GET request against to the Apruve REST API , using the specified * path . * @ param path * The path to issue the GET against * @ param resultType * The type of the response * @ return A single object of type resultType */ public < T > ApruveResponse < T > get ( String path , Class < T > resultType ) { } }
Response response = restRequest ( path ) . get ( ) ; return processResponse ( response , resultType ) ;
public class AndroidExporter { /** * { @ inheritDoc } */ @ Override public void export ( ITextNode [ ] nodes , String masterlanguage , String language , Status [ ] states ) throws ExportException { } }
LOG . info ( "Exporting Android XML file..." ) ; try ( OutputStream outputStream = outputStreamFactory . createOutputStream ( outputFile ) ; BufferedWriter bw = new BufferedWriter ( new OutputStreamWriter ( outputStream , "UTF-8" ) ) ) { synchronized ( this ) { // write header bw . write ( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ) ; bw . write ( "\n" ) ; bw . write ( "<!-- Generated file - do not edit -->" ) ; bw . write ( "\n" ) ; bw . write ( "<resources>" ) ; bw . write ( "\n" ) ; for ( ITextNode node : nodes ) { IValueNode valueNode = node . getValueNode ( language ) ; // get value from value node if ( valueNode != null ) { if ( states == null || TremaCoreUtil . containsStatus ( valueNode . getStatus ( ) , states ) ) { IKeyValuePair keyValuePair = new KeyValuePair ( node . getKey ( ) , valueNode . getValue ( ) ) ; for ( IExportFilter filter : iExportFilters ) { filter . filter ( keyValuePair ) ; } // validate key after it has been filtered String key = keyValuePair . getKey ( ) ; if ( ! isValidKeyName ( key ) ) { throw new IllegalArgumentException ( "Invalid string key name " + ( key != null ? "'" + key + "'" : "null" ) ) ; } // map the placeholders and write the result String value = keyValuePair . getValue ( ) ; if ( value != null ) { String formattedText = resolveIOSPlaceholders ( value ) ; String rowText = String . format ( " <string name=\"%s\">%s</string>" , key , formattedText ) ; bw . write ( rowText ) ; bw . write ( "\n" ) ; } } } } // write footer bw . write ( "</resources>" ) ; } bw . flush ( ) ; } catch ( IOException e ) { throw new ExportException ( "Could not store properties:" + e . getMessage ( ) ) ; } LOG . info ( "Exporting of Android XML file finished." ) ;
public class InterceptorInitializer { /** * This method resolves the NameService and registers the interceptor . */ public void post_init ( final ORBInitInfo info ) { } }
try { info . add_server_request_interceptor ( ServerRequestInterceptor . getInstance ( ) ) ; } catch ( final Exception e ) { logger . error ( "error registering server interceptor" , e ) ; }
public class Db { /** * for DbKit . removeConfig ( configName ) */ static void removeDbProWithConfig ( String configName ) { } }
if ( MAIN != null && MAIN . config . getName ( ) . equals ( configName ) ) { MAIN = null ; } map . remove ( configName ) ;
public class Calculate { public static void create ( HashMap m , String var , String [ ] args ) { } }
var = var . toLowerCase ( ) ; String fun = args [ 0 ] . toUpperCase ( ) ; String [ ] newArgs = Arrays . copyOfRange ( args , 1 , args . length ) ; log . debug ( "Attempting to apply DOME function: {}" , fun ) ; if ( fun . equals ( "NEW_EVENT()" ) ) { if ( newArgs . length < 1 ) { log . error ( "Not enough arguments for {}" , fun ) ; } else if ( newArgs . length % 2 != 1 ) { log . warn ( "There is unpaired variable for {}" , fun ) ; } HashMap < String , String > info = new HashMap < String , String > ( ) ; for ( int i = 2 ; i < newArgs . length ; i += 2 ) { info . put ( newArgs [ i - 1 ] . toLowerCase ( ) , newArgs [ i ] ) ; } HashMap < String , String > newEvent = ExperimentHelper . createEvent ( m , var , newArgs [ 0 ] , info , true ) ; if ( ! newEvent . isEmpty ( ) ) { MapUtil . getBucket ( m , "management" ) . getDataList ( ) . add ( newEvent ) ; } else { log . warn ( "No event has been generated" ) ; } } else { log . error ( "DOME Function {} unsupported" , fun ) ; }
public class JinjavaInterpreter { /** * Resolve property of bean . * @ param object * Bean . * @ param propertyNames * Names of properties to resolve recursively . * @ return Value of property . */ public Object resolveProperty ( Object object , List < String > propertyNames ) { } }
return expressionResolver . resolveProperty ( object , propertyNames ) ;
public class ModifyDBInstanceRequest { /** * A list of DB security groups to authorize on this DB instance . Changing this setting doesn ' t result in an outage * and the change is asynchronously applied as soon as possible . * Constraints : * < ul > * < li > * If supplied , must match existing DBSecurityGroups . * < / li > * < / ul > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDBSecurityGroups ( java . util . Collection ) } or { @ link # withDBSecurityGroups ( java . util . Collection ) } if you * want to override the existing values . * @ param dBSecurityGroups * A list of DB security groups to authorize on this DB instance . Changing this setting doesn ' t result in an * outage and the change is asynchronously applied as soon as possible . < / p > * Constraints : * < ul > * < li > * If supplied , must match existing DBSecurityGroups . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public ModifyDBInstanceRequest withDBSecurityGroups ( String ... dBSecurityGroups ) { } }
if ( this . dBSecurityGroups == null ) { setDBSecurityGroups ( new java . util . ArrayList < String > ( dBSecurityGroups . length ) ) ; } for ( String ele : dBSecurityGroups ) { this . dBSecurityGroups . add ( ele ) ; } return this ;
public class AbstractHttpRequest { /** * Write the last response to a file . * @ param f File * @ return long bytes written * @ throws HelloSignException Thrown if an exception occurs during the copy * of the response stream to the given file . */ public long getResponseAsFile ( File f ) throws HelloSignException { } }
long bytesWritten = 0 ; try { bytesWritten = Files . copy ( lastResponseStream , f . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; } catch ( Exception e ) { throw new HelloSignException ( e ) ; } return bytesWritten ;
public class PlainTextDocumentReaderAndWriter { /** * sentence splitting as now */ public Iterator < List < IN > > getIterator ( Reader r ) { } }
Tokenizer < IN > tokenizer = tokenizerFactory . getTokenizer ( r ) ; // PTBTokenizer . newPTBTokenizer ( r , false , true ) ; List < IN > words = new ArrayList < IN > ( ) ; IN previous = tokenFactory . makeToken ( ) ; StringBuilder prepend = new StringBuilder ( ) ; /* * This changes SGML tags into whitespace - - it should maybe be moved * elsewhere */ while ( tokenizer . hasNext ( ) ) { IN w = tokenizer . next ( ) ; String word = w . get ( CoreAnnotations . TextAnnotation . class ) ; Matcher m = sgml . matcher ( word ) ; if ( m . matches ( ) ) { String before = StringUtils . getNotNullString ( w . get ( CoreAnnotations . BeforeAnnotation . class ) ) ; String after = StringUtils . getNotNullString ( w . get ( CoreAnnotations . AfterAnnotation . class ) ) ; prepend . append ( before ) . append ( word ) ; String previousTokenAfter = StringUtils . getNotNullString ( previous . get ( CoreAnnotations . AfterAnnotation . class ) ) ; previous . set ( AfterAnnotation . class , previousTokenAfter + word + after ) ; // previous . appendAfter ( w . word ( ) + w . after ( ) ) ; } else { String before = StringUtils . getNotNullString ( w . get ( CoreAnnotations . BeforeAnnotation . class ) ) ; if ( prepend . length ( ) > 0 ) { w . set ( BeforeAnnotation . class , prepend . toString ( ) + before ) ; // w . prependBefore ( prepend . toString ( ) ) ; prepend = new StringBuilder ( ) ; } words . add ( w ) ; previous = w ; } } List < List < IN > > sentences = wts . process ( words ) ; String after = "" ; IN last = null ; for ( List < IN > sentence : sentences ) { int pos = 0 ; for ( IN w : sentence ) { w . set ( PositionAnnotation . class , Integer . toString ( pos ) ) ; after = StringUtils . getNotNullString ( w . get ( CoreAnnotations . AfterAnnotation . class ) ) ; w . remove ( AfterAnnotation . class ) ; last = w ; } } if ( last != null ) { last . set ( AfterAnnotation . class , after ) ; } return sentences . iterator ( ) ;
public class ItemRule { /** * Sets the name of the item . * @ param name the name to set */ public void setName ( String name ) { } }
if ( name . endsWith ( ARRAY_SUFFIX ) ) { this . name = name . substring ( 0 , name . length ( ) - 2 ) ; type = ItemType . ARRAY ; } else if ( name . endsWith ( MAP_SUFFIX ) ) { this . name = name . substring ( 0 , name . length ( ) - 2 ) ; type = ItemType . MAP ; } else { this . name = name ; if ( type == null ) { type = ItemType . SINGLE ; } }
public class JenkinsHttpClient { /** * { @ inheritDoc } */ @ Override public String post_text ( String path , String textData , boolean crumbFlag ) throws IOException { } }
return post_text ( path , textData , ContentType . DEFAULT_TEXT , crumbFlag ) ;
public class StrutsApp { /** * Returns a list of all the form beans that are defined for this StrutsApp . */ public List getFormBeansAsList ( ) { } }
ArrayList retList = new ArrayList ( ) ; for ( Iterator i = _formBeans . values ( ) . iterator ( ) ; i . hasNext ( ) ; ) { FormBeanModel fb = ( FormBeanModel ) i . next ( ) ; if ( fb != null ) retList . add ( fb ) ; } return retList ;
public class FNCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setXfrUnits ( Integer newXfrUnits ) { } }
Integer oldXfrUnits = xfrUnits ; xfrUnits = newXfrUnits ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNC__XFR_UNITS , oldXfrUnits , xfrUnits ) ) ;
public class HeapSort { /** * Creates a binary heap with the list of elements with the largest valued element at the root followed by the next * largest valued elements as parents down to the leafs . * @ param < E > the Class type of the elements in the List . * @ param elements the List of elements to heapify . * @ param startIndex an integer value indicating the starting index in the heap in the List of elements . * @ param endIndex an integer value indicating the ending index in the heap in the List of elements . */ protected < E > void siftDown ( final List < E > elements , final int startIndex , final int endIndex ) { } }
int rootIndex = startIndex ; while ( ( rootIndex * 2 + 1 ) <= endIndex ) { int swapIndex = rootIndex ; int leftChildIndex = ( rootIndex * 2 + 1 ) ; int rightChildIndex = ( leftChildIndex + 1 ) ; if ( getOrderBy ( ) . compare ( elements . get ( swapIndex ) , elements . get ( leftChildIndex ) ) < 0 ) { swapIndex = leftChildIndex ; } if ( rightChildIndex <= endIndex && getOrderBy ( ) . compare ( elements . get ( swapIndex ) , elements . get ( rightChildIndex ) ) < 0 ) { swapIndex = rightChildIndex ; } if ( swapIndex != rootIndex ) { swap ( elements , rootIndex , swapIndex ) ; rootIndex = swapIndex ; } else { return ; } }
public class AFPChainXMLParser { /** * new utility method that checks that the order of the pair in the XML alignment is correct and flips the direction if needed * @ param xml * @ param name1 * @ param name1 * @ param ca1 * @ param ca2 * @ return */ public static AFPChain fromXML ( String xml , String name1 , String name2 , Atom [ ] ca1 , Atom [ ] ca2 ) throws IOException , StructureException { } }
AFPChain [ ] afps = parseMultiXML ( xml ) ; if ( afps . length > 0 ) { AFPChain afpChain = afps [ 0 ] ; String n1 = afpChain . getName1 ( ) ; String n2 = afpChain . getName2 ( ) ; if ( n1 == null ) n1 = "" ; if ( n2 == null ) n2 = "" ; // System . out . println ( " from AFPCHAIN : " + n1 + " " + n2 ) ; if ( n1 . equals ( name2 ) && n2 . equals ( name1 ) ) { // flipped order // System . out . println ( " AfpChain in wrong order , flipping . . . " ) ; afpChain = AFPChainFlipper . flipChain ( afpChain ) ; } rebuildAFPChain ( afpChain , ca1 , ca2 ) ; return afpChain ; } return null ;
public class ResourceDMBean { /** * Finds an accessor for an attribute . Tries to find getAttrName ( ) , isAttrName ( ) , attrName ( ) methods . If not * found , tries to use reflection to get the value of attr _ name . If still not found , creates a NullAccessor . */ protected static Accessor findGetter ( Object target , String attr_name ) { } }
final String name = Util . attributeNameToMethodName ( attr_name ) ; Class < ? > clazz = target . getClass ( ) ; Method method = Util . findMethod ( target , Arrays . asList ( "get" + name , "is" + name , toLowerCase ( name ) ) ) ; if ( method != null && ( isGetMethod ( method ) || isIsMethod ( method ) ) ) return new MethodAccessor ( method , target ) ; // 4 . Find a field last _ name Field field = Util . getField ( clazz , attr_name ) ; if ( field != null ) return new FieldAccessor ( field , target ) ; return new NoopAccessor ( ) ;
public class CommonTypeFactory { /** * Called when common class is found in order to properly resolve generics for the common class . * For example , suppose we detect that { @ link Comparable } is common for { @ link Integer } and { @ link Double } * and now we need to know it ' s common generic ( in context of both types ) which is { @ code Comparable < Number > } . * Infinite generics depth is supported ( e . g . { @ code List < Set < List < T > > > > } . Types commodity resolution is simply * restarted for lower levels ( recursion ) . * @ param type common type ( may be interface ) for original types * @ param firstContext first type generics context * @ param secondContext second type generics context * @ param alwaysIncludeInterfaces always search for common interfaces * @ param cache resolution types cache * @ return complete common type */ private static Type buildCommonType ( final Class < ? > type , final Map < Class < ? > , LinkedHashMap < String , Type > > firstContext , final Map < Class < ? > , LinkedHashMap < String , Type > > secondContext , final boolean alwaysIncludeInterfaces , final PathsCache cache ) { } }
final TypeVariable < ? extends Class < ? > > [ ] typeParameters = type . getTypeParameters ( ) ; if ( typeParameters . length > 0 ) { final Type [ ] params = new Type [ typeParameters . length ] ; final Map < String , Type > firstGenerics = firstContext . get ( type ) ; final Map < String , Type > secondGenerics = secondContext . get ( type ) ; int i = 0 ; boolean notAllObject = false ; for ( TypeVariable var : typeParameters ) { final Type sub1 = firstGenerics . get ( var . getName ( ) ) ; final Type sub2 = secondGenerics . get ( var . getName ( ) ) ; final Type paramType = buildParameterType ( type , sub1 , sub2 , alwaysIncludeInterfaces , cache ) ; if ( paramType != Object . class ) { notAllObject = true ; } params [ i ++ ] = paramType ; } final Type outer = TypeUtils . getOuter ( type ) ; // simplify to avoid redundant ( actually absent ) parametrization return outer != null || notAllObject ? new ParameterizedTypeImpl ( type , params , outer ) : type ; } else { return type ; }
public class WeakConstructorStorage { /** * stores the constructors for a Class * @ param clazz * @ return stored structure */ private Array store ( Class clazz ) { } }
Constructor [ ] conArr = clazz . getConstructors ( ) ; Array args = new ArrayImpl ( ) ; for ( int i = 0 ; i < conArr . length ; i ++ ) { storeArgs ( conArr [ i ] , args ) ; } map . put ( clazz , args ) ; return args ;
public class LTriPredicateBuilder { /** * Adds full new case for the argument that are of specific classes ( matched by instanceOf , null is a wildcard ) . */ @ Nonnull public < V1 extends T1 , V2 extends T2 , V3 extends T3 > LTriPredicateBuilder < T1 , T2 , T3 > aCase ( Class < V1 > argC1 , Class < V2 > argC2 , Class < V3 > argC3 , LTriPredicate < V1 , V2 , V3 > function ) { } }
PartialCaseWithBoolProduct . The pc = partialCaseFactoryMethod ( ( a1 , a2 , a3 ) -> ( argC1 == null || argC1 . isInstance ( a1 ) ) && ( argC2 == null || argC2 . isInstance ( a2 ) ) && ( argC3 == null || argC3 . isInstance ( a3 ) ) ) ; pc . evaluate ( function ) ; return self ( ) ;
public class CmsJspElFunctions { /** * Returns a list of resource wrappers created from the input list of resources . * @ param cms the current OpenCms user context * @ param list the list to create the resource wrapper list from * @ return the list of wrapped resources . */ public static List < CmsJspResourceWrapper > convertResourceList ( CmsObject cms , List < CmsResource > list ) { } }
List < CmsJspResourceWrapper > result = new ArrayList < CmsJspResourceWrapper > ( list . size ( ) ) ; for ( CmsResource res : list ) { result . add ( CmsJspResourceWrapper . wrap ( cms , res ) ) ; } return result ;
public class CapsuleUtils { /** * Compute softmax along a given dimension */ public static SDVariable softmax ( SameDiff SD , SDVariable x , int dimension , int rank ) { } }
int [ ] permutation = ArrayUtil . range ( 0 , rank ) ; permutation [ 0 ] = dimension ; permutation [ dimension ] = 0 ; return SD . nn . softmax ( x . permute ( permutation ) ) . permute ( ArrayUtil . invertPermutation ( permutation ) ) ;
public class ElasticPoolsInner { /** * Returns a list of elastic pools in a server . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; ElasticPoolInner & gt ; object */ public Observable < List < ElasticPoolInner > > listByServerAsync ( String resourceGroupName , String serverName ) { } }
return listByServerWithServiceResponseAsync ( resourceGroupName , serverName ) . map ( new Func1 < ServiceResponse < List < ElasticPoolInner > > , List < ElasticPoolInner > > ( ) { @ Override public List < ElasticPoolInner > call ( ServiceResponse < List < ElasticPoolInner > > response ) { return response . body ( ) ; } } ) ;
public class DataFlagEncoder { /** * This method creates a Config map out of the PMap . Later on this conversion should not be * necessary when we read JSON . */ public WeightingConfig createWeightingConfig ( PMap pMap ) { } }
HashMap < String , Double > map = new HashMap < > ( DEFAULT_SPEEDS . size ( ) ) ; for ( Entry < String , Double > e : DEFAULT_SPEEDS . entrySet ( ) ) { map . put ( e . getKey ( ) , pMap . getDouble ( e . getKey ( ) , e . getValue ( ) ) ) ; } return new WeightingConfig ( getHighwaySpeedMap ( map ) ) ;
public class StaticFlowTemplate { /** * Checks if the { @ link FlowTemplate } is resolvable using the provided { @ link Config } object . A { @ link FlowTemplate } * is resolvable only if each of the { @ link JobTemplate } s in the flow is resolvable * @ param userConfig User supplied Config * @ return true if the { @ link FlowTemplate } is resolvable */ @ Override public boolean isResolvable ( Config userConfig , DatasetDescriptor inputDescriptor , DatasetDescriptor outputDescriptor ) throws SpecNotFoundException , JobTemplate . TemplateException { } }
Config inputDescriptorConfig = inputDescriptor . getRawConfig ( ) . atPath ( DatasetDescriptorConfigKeys . FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX ) ; Config outputDescriptorConfig = outputDescriptor . getRawConfig ( ) . atPath ( DatasetDescriptorConfigKeys . FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX ) ; userConfig = userConfig . withFallback ( inputDescriptorConfig ) . withFallback ( outputDescriptorConfig ) ; ConfigResolveOptions resolveOptions = ConfigResolveOptions . defaults ( ) . setAllowUnresolved ( true ) ; for ( JobTemplate template : this . jobTemplates ) { Config templateConfig = template . getResolvedConfig ( userConfig ) . resolve ( resolveOptions ) ; if ( ! template . getResolvedConfig ( userConfig ) . resolve ( resolveOptions ) . isResolved ( ) ) { return false ; } } return true ;
public class TfIdf { /** * 计算文档的tf - idf * @ param tf 词频 * @ param idf 倒排频率 * @ param normalization 正规化 * @ param < TERM > 词语类型 * @ return 一个词语 - > tf - idf的Map */ public static < TERM > Map < TERM , Double > tfIdf ( Map < TERM , Double > tf , Map < TERM , Double > idf , Normalization normalization ) { } }
Map < TERM , Double > tfIdf = new HashMap < TERM , Double > ( ) ; for ( TERM term : tf . keySet ( ) ) { Double TF = tf . get ( term ) ; if ( TF == null ) TF = 1. ; Double IDF = idf . get ( term ) ; if ( IDF == null ) IDF = 1. ; tfIdf . put ( term , TF * IDF ) ; } if ( normalization == Normalization . COSINE ) { double n = 0.0 ; for ( double x : tfIdf . values ( ) ) { n += x * x ; } n = Math . sqrt ( n ) ; for ( TERM term : tfIdf . keySet ( ) ) { tfIdf . put ( term , tfIdf . get ( term ) / n ) ; } } return tfIdf ;
public class HtmlEscape { /** * Perform an HTML5 level 1 ( XML - style ) < strong > escape < / strong > operation on a < tt > Reader < / tt > input , * writing results to a < tt > Writer < / tt > . * < em > Level 1 < / em > means this method will only escape the five markup - significant characters : * < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > . It is called * < em > XML - style < / em > in order to link it with JSP ' s < tt > escapeXml < / tt > attribute in JSTL ' s * < tt > & lt ; c : out . . . / & gt ; < / tt > tags . * Note this method may < strong > not < / strong > produce the same results as { @ link # escapeHtml4Xml ( Reader , Writer ) } because * it will escape the apostrophe as < tt > & amp ; apos ; < / tt > , whereas in HTML 4 such NCR does not exist * ( the decimal numeric reference < tt > & amp ; # 39 ; < / tt > is used instead ) . * This method calls { @ link # escapeHtml ( Reader , Writer , HtmlEscapeType , HtmlEscapeLevel ) } with the following * preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . html . HtmlEscapeType # HTML5 _ NAMED _ REFERENCES _ DEFAULT _ TO _ DECIMAL } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . html . HtmlEscapeLevel # LEVEL _ 1 _ ONLY _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param reader the < tt > Reader < / tt > reading the text to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeHtml5Xml ( final Reader reader , final Writer writer ) throws IOException { } }
escapeHtml ( reader , writer , HtmlEscapeType . HTML5_NAMED_REFERENCES_DEFAULT_TO_DECIMAL , HtmlEscapeLevel . LEVEL_1_ONLY_MARKUP_SIGNIFICANT ) ;
public class ProcessEngineConfigurationImpl { /** * JPA / / / / / */ protected void initJpa ( ) { } }
if ( jpaPersistenceUnitName != null ) { jpaEntityManagerFactory = JpaHelper . createEntityManagerFactory ( jpaPersistenceUnitName ) ; } if ( jpaEntityManagerFactory != null ) { sessionFactories . put ( EntityManagerSession . class , new EntityManagerSessionFactory ( jpaEntityManagerFactory , jpaHandleTransaction , jpaCloseEntityManager ) ) ; JPAVariableSerializer jpaType = ( JPAVariableSerializer ) variableSerializers . getSerializerByName ( JPAVariableSerializer . NAME ) ; // Add JPA - type if ( jpaType == null ) { // We try adding the variable right after byte serializer , if available int serializableIndex = variableSerializers . getSerializerIndexByName ( ValueType . BYTES . getName ( ) ) ; if ( serializableIndex > - 1 ) { variableSerializers . addSerializer ( new JPAVariableSerializer ( ) , serializableIndex ) ; } else { variableSerializers . addSerializer ( new JPAVariableSerializer ( ) ) ; } } }
public class KeyManagementServiceClient { /** * Restore a [ CryptoKeyVersion ] [ google . cloud . kms . v1 . CryptoKeyVersion ] in the * [ DESTROY _ SCHEDULED ] [ google . cloud . kms . v1 . CryptoKeyVersion . CryptoKeyVersionState . DESTROY _ SCHEDULED ] * state . * < p > Upon restoration of the CryptoKeyVersion , * [ state ] [ google . cloud . kms . v1 . CryptoKeyVersion . state ] will be set to * [ DISABLED ] [ google . cloud . kms . v1 . CryptoKeyVersion . CryptoKeyVersionState . DISABLED ] , and * [ destroy _ time ] [ google . cloud . kms . v1 . CryptoKeyVersion . destroy _ time ] will be cleared . * < p > Sample code : * < pre > < code > * try ( KeyManagementServiceClient keyManagementServiceClient = KeyManagementServiceClient . create ( ) ) { * CryptoKeyVersionName name = CryptoKeyVersionName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ KEY _ RING ] " , " [ CRYPTO _ KEY ] " , " [ CRYPTO _ KEY _ VERSION ] " ) ; * CryptoKeyVersion response = keyManagementServiceClient . restoreCryptoKeyVersion ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The resource name of the [ CryptoKeyVersion ] [ google . cloud . kms . v1 . CryptoKeyVersion ] * to restore . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final CryptoKeyVersion restoreCryptoKeyVersion ( String name ) { } }
RestoreCryptoKeyVersionRequest request = RestoreCryptoKeyVersionRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return restoreCryptoKeyVersion ( request ) ;
public class LoopTagSupport { /** * Cycles through and discards up to ' n ' items from the iteration . * We only know " up to ' n ' " , not " exactly n , " since we stop cycling * if hasNext ( ) returns false or if we hit the ' end ' of the iteration . * Note : this does not update the iteration index , since this method * is intended as a behind - the - scenes operation . The index must be * updated separately . ( I don ' t really like this , but it ' s the simplest * way to support isLast ( ) without storing two separate inconsistent * indices . We need to ( a ) make sure hasNext ( ) refers to the next * item we actually * want * and ( b ) make sure the index refers to the * item associated with the * current * round , not the next one . * C ' est la vie . ) */ private void discard ( int n ) throws JspTagException { } }
/* * copy index so we can restore it , but we need to update it * as we work so that atEnd ( ) works */ int oldIndex = index ; while ( n -- > 0 && ! atEnd ( ) && hasNext ( ) ) { index ++ ; next ( ) ; } index = oldIndex ;
public class NotificationRemoteCallback { /** * Called when notification action view is clicked . * @ param remote * @ param entry * @ param act */ public void onClickRemoteAction ( NotificationRemote remote , NotificationEntry entry , NotificationEntry . Action act ) { } }
if ( DBG ) Log . v ( TAG , "onClickRemoteAction - " + entry . ID + ", " + act ) ;
public class DataWriterBuilder { /** * Tell the writer the output format of type { @ link WriterOutputFormat } . * @ param format output format of the writer * @ return this { @ link DataWriterBuilder } instance */ public DataWriterBuilder < S , D > writeInFormat ( WriterOutputFormat format ) { } }
this . format = format ; log . debug ( "writeInFormat : {}" , this . format ) ; return this ;
public class ParticleIO { /** * Create an XML element based on a configured range * @ param document * The document the element will be part of * @ param name * The name to give the new element * @ param range * The configured range * @ return A configured XML element on the range */ private static Element createRangeElement ( Document document , String name , ConfigurableEmitter . Range range ) { } }
Element element = document . createElement ( name ) ; element . setAttribute ( "min" , "" + range . getMin ( ) ) ; element . setAttribute ( "max" , "" + range . getMax ( ) ) ; element . setAttribute ( "enabled" , "" + range . isEnabled ( ) ) ; return element ;