signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class XmlNodeHelper { /** * 读取xml文件为dom4j的Docment文档 . * @ param xmlPath 定位xml文件的路径 , 如 : com / blinkfox / test . xml * @ return 返回dom4j文档 */ public static Document getDocument ( String xmlPath ) { } }
try { InputStream is = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( xmlPath ) ; return new SAXReader ( ) . read ( is ) ; } catch ( Exception e ) { throw new XmlParseException ( "读取或解析xml文件失败,xmlPath是:" + xmlPath , e ) ; }
public class BinderExtension { /** * / * @ Override */ public < S , T > void registerBinding ( ConverterKey < S , T > key , Binding < S , T > converter ) { } }
BINDING . registerBinding ( key , converter ) ;
public class V1InstanceCreator { /** * Create a new project entity with a name , parent project , begin date , and * optional schedule with specified attributes . * @ param name name of project . * @ param parentProject parent project for created project . * @ param beginDate start date of created project . * @ param schedule Schedule that defines how this project ' s iterations are * spaced . * @ param attributes additional attributes for inicialization project . * @ return A newly minted Project that exists in the VersionOne system . */ public Project project ( String name , Project parentProject , DateTime beginDate , Schedule schedule , Map < String , Object > attributes ) { } }
Project project = new Project ( instance ) ; project . setName ( name ) ; project . setParentProject ( parentProject ) ; project . setBeginDate ( beginDate ) ; project . setSchedule ( schedule ) ; addAttributes ( project , attributes ) ; project . save ( ) ; return project ;
public class RTMPConnection { /** * Returns whether or not a given stream id is valid . * @ param streamId * stream id * @ return true if its valid , false if its invalid */ public boolean isValidStreamId ( Number streamId ) { } }
double d = streamId . doubleValue ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Checking validation for streamId {}; reservedStreams: {}; streams: {}, connection: {}" , new Object [ ] { d , reservedStreams , streams , sessionId } ) ; } if ( d <= 0 || ! reservedStreams . contains ( d ) ) { log . warn ( "Stream id: {} was not reserved in connection {}" , d , sessionId ) ; // stream id has not been reserved before return false ; } if ( streams . get ( d ) != null ) { // another stream already exists with this id log . warn ( "Another stream already exists with this id in streams {} in connection: {}" , streams , sessionId ) ; return false ; } if ( log . isTraceEnabled ( ) ) { log . trace ( "Stream id: {} is valid for connection: {}" , d , sessionId ) ; } return true ;
public class ClientCacheHelper { /** * Gets the cache configuration from the server . * @ param client the client instance which will send the operation to server * @ param cacheName full cache name with prefixes * @ param simpleCacheName pure cache name without any prefix * @ param < K > type of the key of the cache * @ param < V > type of the value of the cache * @ return the cache configuration if it can be found */ static < K , V > CacheConfig < K , V > getCacheConfig ( HazelcastClientInstanceImpl client , String cacheName , String simpleCacheName ) { } }
ClientMessage request = CacheGetConfigCodec . encodeRequest ( cacheName , simpleCacheName ) ; try { int partitionId = client . getClientPartitionService ( ) . getPartitionId ( cacheName ) ; ClientInvocation clientInvocation = new ClientInvocation ( client , request , cacheName , partitionId ) ; Future < ClientMessage > future = clientInvocation . invoke ( ) ; ClientMessage responseMessage = future . get ( ) ; SerializationService serializationService = client . getSerializationService ( ) ; return deserializeCacheConfig ( client , responseMessage , serializationService , clientInvocation ) ; } catch ( Exception e ) { throw rethrow ( e ) ; }
public class AddTagsToVaultRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AddTagsToVaultRequest addTagsToVaultRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( addTagsToVaultRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( addTagsToVaultRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( addTagsToVaultRequest . getVaultName ( ) , VAULTNAME_BINDING ) ; protocolMarshaller . marshall ( addTagsToVaultRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CursorUtils { /** * Add each item of this { @ link android . database . Cursor } to the { @ code collection } parameter . * Closes the cursor once completed . * @ return the same collection as the parameter . * @ see # consumeToArrayList ( com . venmo . cursor . IterableCursor ) * @ see # consumeToLinkedList ( com . venmo . cursor . IterableCursor ) * @ see # consumeToLinkedHashSet ( com . venmo . cursor . IterableCursor ) */ public static < T , C extends Collection < T > > C consumeToCollection ( IterableCursor < T > cursor , C collection ) { } }
try { for ( T t : cursor ) { collection . add ( t ) ; } } finally { cursor . close ( ) ; } return collection ;
public class CreateRouteResponseRequest { /** * The route response parameters . * @ param responseParameters * The route response parameters . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateRouteResponseRequest withResponseParameters ( java . util . Map < String , ParameterConstraints > responseParameters ) { } }
setResponseParameters ( responseParameters ) ; return this ;
public class AbstractSaga { /** * Requests a timeout event to be sent back to this saga . */ protected TimeoutId requestTimeout ( final long delay , final TimeUnit unit ) { } }
return requestTimeout ( delay , unit , null , null ) ;
public class AccountsInner { /** * Lists the Data Lake Store firewall rules within the specified Data Lake Store account . * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Store account . * @ param accountName The name of the Data Lake Store account from which to get the firewall rules . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; FirewallRuleInner & gt ; object */ public Observable < ServiceResponse < Page < FirewallRuleInner > > > listFirewallRulesWithServiceResponseAsync ( final String resourceGroupName , final String accountName ) { } }
return listFirewallRulesSinglePageAsync ( resourceGroupName , accountName ) . concatMap ( new Func1 < ServiceResponse < Page < FirewallRuleInner > > , Observable < ServiceResponse < Page < FirewallRuleInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < FirewallRuleInner > > > call ( ServiceResponse < Page < FirewallRuleInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listFirewallRulesNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class CmsPublish { /** * Publishes the given list of resources . < p > * @ param resources list of resources to publish * @ throws CmsException if something goes wrong */ public void publishResources ( List < CmsResource > resources ) throws CmsException { } }
CmsObject cms = m_cms ; I_CmsReport report = new CmsWorkplaceReport ( cms . getRequestContext ( ) . getLocale ( ) , cms . getRequestContext ( ) . getSiteRoot ( ) , null ) ; CmsPublishManager publishManager = OpenCms . getPublishManager ( ) ; CmsPublishList publishList = publishManager . getPublishListAll ( m_cms , resources , false , true ) ; OpenCms . getPublishManager ( ) . publishProject ( m_cms , report , publishList ) ;
public class Configuration { /** * Get an configuration value as int value * @ param pKey the configuration key * @ return the value set or , if not , the default value */ public int getAsInt ( ConfigKey pKey ) { } }
int ret ; try { ret = Integer . parseInt ( get ( pKey ) ) ; } catch ( NumberFormatException exp ) { ret = Integer . parseInt ( pKey . getDefaultValue ( ) ) ; } return ret ;
public class SchemaStoreItemStream { /** * the message store . We build the index of any currently stored items . */ public void eventRestored ( ) throws SevereMessageStoreException { } }
super . eventRestored ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "eventRestored" ) ; try { NonLockingCursor cursor = newNonLockingItemCursor ( null ) ; AbstractItem item = cursor . next ( ) ; while ( item != null ) { if ( item instanceof SchemaStoreItem ) { addToIndex ( ( SchemaStoreItem ) item ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "JSchema found in store: " + ( ( SchemaStoreItem ) item ) . getSchema ( ) . getID ( ) ) ; } item = cursor . next ( ) ; } } catch ( MessageStoreException e ) { FFDCFilter . processException ( e , "eventRestored" , "108" , this ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "eventRestored" ) ;
public class AbstractMetric { /** * Ranks the set of items by associated score . * @ param userItems map with scores for each item * @ return the ranked list */ protected List < I > rankItems ( final Map < I , Double > userItems ) { } }
List < I > sortedItems = new ArrayList < > ( ) ; if ( userItems == null ) { return sortedItems ; } Map < Double , Set < I > > itemsByRank = new HashMap < > ( ) ; for ( Map . Entry < I , Double > e : userItems . entrySet ( ) ) { I item = e . getKey ( ) ; double pref = e . getValue ( ) ; if ( Double . isNaN ( pref ) ) { // we ignore any preference assigned as NaN continue ; } Set < I > items = itemsByRank . get ( pref ) ; if ( items == null ) { items = new HashSet < > ( ) ; itemsByRank . put ( pref , items ) ; } items . add ( item ) ; } List < Double > sortedScores = new ArrayList < > ( itemsByRank . keySet ( ) ) ; Collections . sort ( sortedScores , Collections . reverseOrder ( ) ) ; for ( double pref : sortedScores ) { List < I > sortedPrefItems = new ArrayList < > ( itemsByRank . get ( pref ) ) ; // deterministic output when ties in preferences : sort by item id Collections . sort ( sortedPrefItems , Collections . reverseOrder ( ) ) ; for ( I itemID : sortedPrefItems ) { sortedItems . add ( itemID ) ; } } return sortedItems ;
public class FileUtil { /** * Check file existence * @ param filePath * @ return * Return < code > true < / code > if the file is exist , otherwise return < code > false < / code > */ public static boolean exist ( String filePath ) { } }
File f = new File ( filePath ) ; if ( ! f . isFile ( ) ) { return false ; } return true ;
public class BasicStreamReader { /** * Method that checks that input following is of form * ' [ S ] * ' = ' [ S ] * ' ( as per XML specs , production # 25 ) . * Will push back non - white space characters as necessary , in * case no equals char is encountered . */ protected final char skipEquals ( String name , String eofMsg ) throws XMLStreamException { } }
char c = getNextInCurrAfterWS ( eofMsg ) ; if ( c != '=' ) { throwUnexpectedChar ( c , " in xml declaration; expected '=' to follow pseudo-attribute '" + name + "'" ) ; } // trailing space ? return getNextInCurrAfterWS ( eofMsg ) ;
public class AVIMConversationEventHandler { /** * 聊天室成员被移出黑名单通知处理函数 * @ param client 聊天客户端 * @ param conversation 对话 * @ param members 成员列表 * @ param operator 操作者 id */ public void onMemberUnblocked ( AVIMClient client , AVIMConversation conversation , List < String > members , String operator ) { } }
LOGGER . d ( "Notification --- " + operator + " unblocked members: " + StringUtil . join ( ", " , members ) ) ;
public class MoleculePropertyCalculator { /** * intern method to calculate the molecular formular for a list of molecules * @ param molecules * @ return * @ throws BuilderMoleculeException * @ throws CTKException * @ throws ChemistryException if the Chemistry Engine can not be initialized */ private static String calculateMolecularFormula ( List < AbstractMolecule > molecules ) throws BuilderMoleculeException , CTKException , ChemistryException { } }
Map < String , Integer > atomNumberMap = new TreeMap < String , Integer > ( ) ; for ( AbstractMolecule molecule : molecules ) { LOG . info ( molecule . getMolecule ( ) . toString ( ) ) ; atomNumberMap = generateAtomNumberMap ( molecule , atomNumberMap ) ; } LOG . info ( "GET map" ) ; StringBuilder sb = new StringBuilder ( ) ; Set < String > atoms = atomNumberMap . keySet ( ) ; for ( Iterator < String > i = atoms . iterator ( ) ; i . hasNext ( ) ; ) { String atom = i . next ( ) ; String num = atomNumberMap . get ( atom ) . toString ( ) ; if ( num . equals ( "1" ) ) { num = "" ; } sb . append ( atom ) ; sb . append ( num . toString ( ) ) ; } return sb . toString ( ) ;
public class CareWebShellEx { /** * Registers the plugin with the specified id and path . If a tree path is absent , the plugin is * associated with the tab itself . * @ param path Format is & lt ; tab name & gt ; \ & lt ; tree node path & gt ; * @ param id Unique id of plugin * @ return Container created for the plugin . * @ throws Exception Unspecified exception . */ public ElementBase registerFromId ( String path , String id ) throws Exception { } }
return registerFromId ( path , id , null ) ;
public class HttpMessageConvertingResponseWriter { /** * Return the more specific of the acceptable and the producible media types with the q - value of the former . */ protected MediaType getMostSpecificMediaType ( MediaType acceptType , MediaType produceType ) { } }
produceType = produceType . copyQualityValue ( acceptType ) ; return MediaType . SPECIFICITY_COMPARATOR . compare ( acceptType , produceType ) <= 0 ? acceptType : produceType ;
public class DBInstance { /** * Not supported * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDomainMemberships ( java . util . Collection ) } or { @ link # withDomainMemberships ( java . util . Collection ) } if * you want to override the existing values . * @ param domainMemberships * Not supported * @ return Returns a reference to this object so that method calls can be chained together . */ public DBInstance withDomainMemberships ( DomainMembership ... domainMemberships ) { } }
if ( this . domainMemberships == null ) { setDomainMemberships ( new java . util . ArrayList < DomainMembership > ( domainMemberships . length ) ) ; } for ( DomainMembership ele : domainMemberships ) { this . domainMemberships . add ( ele ) ; } return this ;
public class Descriptor { /** * Filename of the form " < ksname > - < cfname > - [ tmp - ] [ < version > - ] < gen > - < component > " * @ param directory The directory of the SSTable files * @ param name The name of the SSTable file * @ return A Descriptor for the SSTable , and the Component remainder . */ public static Pair < Descriptor , String > fromFilename ( Path directory , String name ) { } }
// tokenize the filename StringTokenizer st = new StringTokenizer ( name , String . valueOf ( separator ) ) ; String nexttok ; // all filenames must start with keyspace and column family String ksname = st . nextToken ( ) ; String cfname = st . nextToken ( ) ; // optional temporary marker nexttok = st . nextToken ( ) ; boolean temporary = false ; if ( nexttok . equals ( SSTable . TEMPFILE_MARKER ) ) { temporary = true ; nexttok = st . nextToken ( ) ; } // optional version string Version version = Version . LEGACY ; if ( Version . validate ( nexttok ) ) { version = new Version ( nexttok ) ; nexttok = st . nextToken ( ) ; } int generation = Integer . parseInt ( nexttok ) ; // component suffix String component = st . nextToken ( ) ; directory = directory != null ? directory : new Path ( "." ) ; return Pair . create ( new Descriptor ( version , directory , ksname , cfname , generation , temporary ) , component ) ;
public class RegionInstanceGroupManagerClient { /** * Flags the specified instances in the managed instance group to be immediately deleted . The * instances are also removed from any target pools of which they were a member . This method * reduces the targetSize of the managed instance group by the number of instances that you * delete . The deleteInstances operation is marked DONE if the deleteInstances request is * successful . The underlying actions take additional time . You must separately verify the status * of the deleting action with the listmanagedinstances method . * < p > If the group is part of a backend service that has enabled connection draining , it can take * up to 60 seconds after the connection draining duration has elapsed before the VM instance is * removed or deleted . * < p > You can specify a maximum of 1000 instances with this method per request . * < p > Sample code : * < pre > < code > * try ( RegionInstanceGroupManagerClient regionInstanceGroupManagerClient = RegionInstanceGroupManagerClient . create ( ) ) { * ProjectRegionInstanceGroupManagerName instanceGroupManager = ProjectRegionInstanceGroupManagerName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ INSTANCE _ GROUP _ MANAGER ] " ) ; * RegionInstanceGroupManagersDeleteInstancesRequest regionInstanceGroupManagersDeleteInstancesRequestResource = RegionInstanceGroupManagersDeleteInstancesRequest . newBuilder ( ) . build ( ) ; * Operation response = regionInstanceGroupManagerClient . deleteInstancesRegionInstanceGroupManager ( instanceGroupManager , regionInstanceGroupManagersDeleteInstancesRequestResource ) ; * < / code > < / pre > * @ param instanceGroupManager Name of the managed instance group . * @ param regionInstanceGroupManagersDeleteInstancesRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation deleteInstancesRegionInstanceGroupManager ( ProjectRegionInstanceGroupManagerName instanceGroupManager , RegionInstanceGroupManagersDeleteInstancesRequest regionInstanceGroupManagersDeleteInstancesRequestResource ) { } }
DeleteInstancesRegionInstanceGroupManagerHttpRequest request = DeleteInstancesRegionInstanceGroupManagerHttpRequest . newBuilder ( ) . setInstanceGroupManager ( instanceGroupManager == null ? null : instanceGroupManager . toString ( ) ) . setRegionInstanceGroupManagersDeleteInstancesRequestResource ( regionInstanceGroupManagersDeleteInstancesRequestResource ) . build ( ) ; return deleteInstancesRegionInstanceGroupManager ( request ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } * { @ link CmisExtensionType } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = DeleteObjectResponse . class ) public JAXBElement < CmisExtensionType > createDeleteObjectResponseExtension ( CmisExtensionType value ) { } }
return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , DeleteObjectResponse . class , value ) ;
public class HttpTracingClient { /** * Creates a new tracing { @ link Client } decorator using the specified { @ link Tracing } instance * and remote service name . */ public static Function < Client < HttpRequest , HttpResponse > , HttpTracingClient > newDecorator ( Tracing tracing , @ Nullable String remoteServiceName ) { } }
ensureScopeUsesRequestContext ( tracing ) ; return delegate -> new HttpTracingClient ( delegate , tracing , remoteServiceName ) ;
public class TypeValidator { /** * Expect that the first type can be addressed with GETELEM syntax and that the second type is the * right type for an index into the first type . * @ param t The node traversal . * @ param n The GETELEM or COMPUTED _ PROP node to issue warnings on . * @ param objType The type we ' re indexing into ( the left side of the GETELEM ) . * @ param indexType The type inside the brackets of the GETELEM / COMPUTED _ PROP . */ void expectIndexMatch ( Node n , JSType objType , JSType indexType ) { } }
checkState ( n . isGetElem ( ) || n . isComputedProp ( ) , n ) ; Node indexNode = n . isGetElem ( ) ? n . getLastChild ( ) : n . getFirstChild ( ) ; if ( indexType . isSymbolValueType ( ) ) { // For now , allow symbols definitions / access on any type . In the future only allow them // on the subtypes for which they are defined . return ; } if ( objType . isStruct ( ) ) { report ( JSError . make ( indexNode , ILLEGAL_PROPERTY_ACCESS , "'[]'" , "struct" ) ) ; } if ( objType . isUnknownType ( ) ) { expectStringOrNumberOrSymbol ( indexNode , indexType , "property access" ) ; } else { ObjectType dereferenced = objType . dereference ( ) ; if ( dereferenced != null && dereferenced . getTemplateTypeMap ( ) . hasTemplateKey ( typeRegistry . getObjectIndexKey ( ) ) ) { expectCanAssignTo ( indexNode , indexType , dereferenced . getTemplateTypeMap ( ) . getResolvedTemplateType ( typeRegistry . getObjectIndexKey ( ) ) , "restricted index type" ) ; } else if ( dereferenced != null && dereferenced . isArrayType ( ) ) { expectNumberOrSymbol ( indexNode , indexType , "array access" ) ; } else if ( objType . matchesObjectContext ( ) ) { expectStringOrSymbol ( indexNode , indexType , "property access" ) ; } else { mismatch ( n , "only arrays or objects can be accessed" , objType , typeRegistry . createUnionType ( ARRAY_TYPE , OBJECT_TYPE ) ) ; } }
public class ComapiChatClient { /** * Removes listener for changes in participant list in conversations . * @ param participantsListener Listener for changes in participant list in conversations . */ public void removeListener ( final ParticipantsListener participantsListener ) { } }
MessagingListener messagingListener = participantsListeners . get ( participantsListener ) ; if ( messagingListener != null ) { client . removeListener ( messagingListener ) ; participantsListeners . remove ( participantsListener ) ; }
public class RecordingTransactionBase { /** * Logs GETs for returned Row / Columns . Requests that return no data will not be logged . */ @ Override public Map < Bytes , Map < Column , Bytes > > get ( Collection < Bytes > rows , Set < Column > columns ) { } }
Map < Bytes , Map < Column , Bytes > > rowColVal = txb . get ( rows , columns ) ; for ( Map . Entry < Bytes , Map < Column , Bytes > > rowEntry : rowColVal . entrySet ( ) ) { for ( Map . Entry < Column , Bytes > colEntry : rowEntry . getValue ( ) . entrySet ( ) ) { txLog . filteredAdd ( LogEntry . newGet ( rowEntry . getKey ( ) , colEntry . getKey ( ) , colEntry . getValue ( ) ) , filter ) ; } } return rowColVal ;
public class AsciiDocExporter { /** * Method that is called to write video . * @ param videoEntry video entry to write */ protected void writeVideo ( VideoEntry videoEntry ) throws IOException { } }
writer . append ( "video::" ) . append ( videoEntry . getLink ( ) ) . append ( "[]" ) . append ( NEW_LINE ) . append ( NEW_LINE ) ;
public class ValidatorUtils { /** * 是否手机号 * @ since 2.0.1 */ public static boolean matchMobile ( String str ) { } }
if ( StringUtils . isEmpty ( str ) ) return false ; return Pattern . matches ( REG_MOBILE , str . trim ( ) ) ;
public class StAXEncoder { /** * Writes an end tag to the output relying on the internal state of the * writer to determine the prefix and local name of the event . * ( non - Javadoc ) * @ see javax . xml . stream . XMLStreamWriter # writeEndElement ( ) */ public void writeEndElement ( ) throws XMLStreamException { } }
try { this . checkPendingATEvents ( ) ; encoder . encodeEndElement ( ) ; } catch ( Exception e ) { throw new XMLStreamException ( e . getLocalizedMessage ( ) , e ) ; }
public class IntStreamEx { /** * Returns a { @ link String } which is the concatenation of the results of * calling { @ link String # valueOf ( int ) } on each element of this stream , * separated by the specified delimiter , with the specified prefix and * suffix in encounter order . * This is a terminal operation . * @ param delimiter the delimiter to be used between each element * @ param prefix the sequence of characters to be used at the beginning of * the joined result * @ param suffix the sequence of characters to be used at the end of the * joined result * @ return the result of concatenation . For empty input stream * { @ code prefix + suffix } is returned . * @ since 0.3.1 */ public String joining ( CharSequence delimiter , CharSequence prefix , CharSequence suffix ) { } }
return collect ( IntCollector . joining ( delimiter , prefix , suffix ) ) ;
public class SearchParameter { /** * ( non - Javadoc ) * @ see ca . uhn . fhir . rest . param . IParameter # encode ( java . lang . Object ) */ @ Override public List < QualifiedParamList > encode ( FhirContext theContext , Object theObject ) throws InternalErrorException { } }
ArrayList < QualifiedParamList > retVal = new ArrayList < QualifiedParamList > ( ) ; // TODO : declaring method should probably have a generic type . . @ SuppressWarnings ( "rawtypes" ) IParamBinder paramBinder = myParamBinder ; @ SuppressWarnings ( "unchecked" ) List < IQueryParameterOr < ? > > val = paramBinder . encode ( theContext , theObject ) ; for ( IQueryParameterOr < ? > nextOr : val ) { retVal . add ( new QualifiedParamList ( nextOr , theContext ) ) ; } return retVal ;
public class AResource { /** * Returns a stream output , depending on the query parameters . * @ param impl * implementation * @ param path * path info * @ return parameter map */ private StreamingOutput createOutput ( final JaxRx impl , final ResourcePath path ) { } }
// check for command parameter String qu = path . getValue ( QueryParameter . COMMAND ) ; if ( qu != null ) { return impl . command ( qu , path ) ; } // check for run parameter qu = path . getValue ( QueryParameter . RUN ) ; if ( qu != null ) { return impl . run ( qu , path ) ; } // check for query parameter qu = path . getValue ( QueryParameter . QUERY ) ; if ( qu != null ) { return impl . query ( qu , path ) ; } // no parameter found return impl . get ( path ) ;
public class ScalarOperation { /** * Builds the scaling operation for Doubles , that is the multiplying * operation for the factor . * @ return { @ link ScalarOperation } for Double */ public static ScalarOperation < Double > doubleMultiplicationOp ( ) { } }
return new ScalarOperation < Double > ( new ScalarFunction < Double > ( ) { @ Override public Double scale ( Double a , double b ) { return a * b ; } } , 1d ) ;
public class MapPoint { /** * Compute the bounds of this element . * This function does not update the internal * attribute replied by { @ link # getBoundingBox ( ) } */ @ Override @ Pure protected Rectangle2d calcBounds ( ) { } }
double x = this . getX ( ) ; double y = this . getY ( ) ; final double w = this . doubleFramed ? this . pointSize * 2 : this . pointSize ; final double h = w ; x -= w / 2. ; y -= h / 2. ; return new Rectangle2d ( x , y , w , h ) ;
public class Es6TemplateLiterals { /** * Converts tag ` a \ tb $ { bar } ` to : * / / A global ( module ) scoped variable * var $ jscomp $ templatelit $ 0 = [ " a \ tb " ] ; / / cooked string array * $ jscomp $ templatelit $ 0 . raw = [ " a \ \ tb " ] ; / / raw string array * / / A call to the tagging function * tag ( $ jscomp $ templatelit $ 0 , bar ) ; * See template _ literal _ test . js for more examples . * @ param n A TAGGED _ TEMPLATELIT node */ static void visitTaggedTemplateLiteral ( NodeTraversal t , Node n , boolean addTypes ) { } }
AstFactory astFactory = t . getCompiler ( ) . createAstFactory ( ) ; JSTypeRegistry registry = t . getCompiler ( ) . getTypeRegistry ( ) ; JSType stringType = createType ( addTypes , registry , JSTypeNative . STRING_TYPE ) ; JSType arrayType = createGenericType ( addTypes , registry , JSTypeNative . ARRAY_TYPE , stringType ) ; JSType templateArrayType = createType ( addTypes , registry , JSTypeNative . I_TEMPLATE_ARRAY_TYPE ) ; JSType voidType = createType ( addTypes , registry , JSTypeNative . VOID_TYPE ) ; JSType numberType = createType ( addTypes , registry , JSTypeNative . NUMBER_TYPE ) ; Node templateLit = n . getLastChild ( ) ; Node cooked = createCookedStringArray ( templateLit , templateArrayType , stringType , voidType , numberType ) ; // Specify the type of the first argument to be ITemplateArray . JSTypeExpression nonNullSiteObject = new JSTypeExpression ( JsDocInfoParser . parseTypeString ( "!ITemplateArray" ) , "<Es6TemplateLiterals.java>" ) ; JSDocInfoBuilder info = new JSDocInfoBuilder ( false ) ; info . recordType ( nonNullSiteObject ) ; Node siteObject = withType ( IR . cast ( cooked , info . build ( ) ) , templateArrayType ) ; // Create a variable representing the template literal . Node callsiteId = withType ( IR . name ( TEMPLATELIT_VAR + t . getCompiler ( ) . getUniqueNameIdSupplier ( ) . get ( ) ) , templateArrayType ) ; Node var = IR . var ( callsiteId , siteObject ) . useSourceInfoIfMissingFromForTree ( n ) ; Node script = NodeUtil . getEnclosingScript ( n ) ; script . addChildToFront ( var ) ; t . reportCodeChange ( var ) ; // Define the " raw " property on the introduced variable . Node defineRaw ; if ( cookedAndRawStringsSame ( templateLit ) ) { // The cooked and raw versions of the array are the same , so just call slice ( ) on the // cooked array at runtime to make the raw array a copy of the cooked array . defineRaw = IR . exprResult ( astFactory . createAssign ( astFactory . createGetProp ( callsiteId . cloneNode ( ) , "raw" ) , astFactory . createCall ( astFactory . createGetProp ( callsiteId . cloneNode ( ) , "slice" ) ) ) ) . useSourceInfoIfMissingFromForTree ( n ) ; } else { // The raw string array is different , so we need to construct it . Node raw = createRawStringArray ( templateLit , arrayType , stringType ) ; defineRaw = IR . exprResult ( astFactory . createAssign ( astFactory . createGetProp ( callsiteId . cloneNode ( ) , "raw" ) , raw ) ) . useSourceInfoIfMissingFromForTree ( n ) ; } script . addChildAfter ( defineRaw , var ) ; // Generate the call expression . Node call = withType ( IR . call ( n . removeFirstChild ( ) , callsiteId . cloneNode ( ) ) , n . getJSType ( ) ) ; for ( Node child = templateLit . getFirstChild ( ) ; child != null ; child = child . getNext ( ) ) { if ( ! child . isTemplateLitString ( ) ) { call . addChildToBack ( child . removeFirstChild ( ) ) ; } } call . useSourceInfoIfMissingFromForTree ( templateLit ) ; call . putBooleanProp ( Node . FREE_CALL , ! call . getFirstChild ( ) . isGetProp ( ) ) ; n . replaceWith ( call ) ; t . reportCodeChange ( ) ;
public class DescribeAvailabilityZonesRequest { /** * The names of the Availability Zones . * @ param zoneNames * The names of the Availability Zones . */ public void setZoneNames ( java . util . Collection < String > zoneNames ) { } }
if ( zoneNames == null ) { this . zoneNames = null ; return ; } this . zoneNames = new com . amazonaws . internal . SdkInternalList < String > ( zoneNames ) ;
public class BoxEntity { /** * Gets the id . * @ return the id of the entity . */ public String getId ( ) { } }
String id = getPropertyAsString ( FIELD_ID ) ; if ( id == null ) { return getPropertyAsString ( FIELD_ITEM_ID ) ; } return id ;
public class OrdersInner { /** * Creates or updates an order . * @ param deviceName The device name . * @ param resourceGroupName The resource group name . * @ param order The order to be created or updated . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OrderInner > beginCreateOrUpdateAsync ( String deviceName , String resourceGroupName , OrderInner order , final ServiceCallback < OrderInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( deviceName , resourceGroupName , order ) , serviceCallback ) ;
public class JvmTypesBuilder { /** * / * @ Nullable */ public JvmField toField ( /* @ Nullable */ EObject sourceElement , /* @ Nullable */ String name , /* @ Nullable */ JvmTypeReference typeRef , /* @ Nullable */ Procedure1 < ? super JvmField > initializer ) { } }
if ( sourceElement == null || name == null ) return null ; JvmField result = typesFactory . createJvmField ( ) ; result . setSimpleName ( name ) ; result . setVisibility ( JvmVisibility . PRIVATE ) ; result . setType ( cloneWithProxies ( typeRef ) ) ; associate ( sourceElement , result ) ; return initializeSafely ( result , initializer ) ;
public class CasConfigurationJasyptCipherExecutor { /** * Sets provider name . * @ param pName the p name */ public void setProviderName ( final String pName ) { } }
if ( StringUtils . isNotBlank ( pName ) ) { LOGGER . debug ( "Configured Jasypt provider" ) ; this . jasyptInstance . setProviderName ( pName ) ; }
public class CmsJobEditView { /** * Try to save the form values to the edited bean . < p > * @ return true if setting the information was successful */ public boolean trySaveToBean ( ) { } }
try { m_group . commit ( ) ; } catch ( Exception e ) { LOG . info ( e . getLocalizedMessage ( ) , e ) ; return false ; } m_job . setParameters ( readParams ( ) ) ; return true ;
public class SourceStreamManager { /** * Add a message in to the appropriate source stream . This will create a stream * if one does not exist and set any appropriate fields in the message * @ param msgItem The message to add * @ return Whether the message was added to a real stream or not */ public boolean addMessage ( SIMPMessage msgItem ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addMessage" , new Object [ ] { msgItem } ) ; // Get the JsMessage as we need to update the Guaranteed fields JsMessage jsMsg = msgItem . getMessage ( ) ; StreamSet streamSet = getStreamSet ( ) ; // Stamp the message with the stream ID msgItem . setGuaranteedStreamUuid ( streamSet . getStreamID ( ) ) ; // NOTE : no synchronization with flush necessary here since startFlush // is not supposed to be called until all competing producer threads have // exited . Reliability reliability = msgItem . getReliability ( ) ; if ( reliability == Reliability . BEST_EFFORT_NONPERSISTENT ) { addBestEffortMessage ( msgItem ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addMessage" , false ) ; return false ; } else { SourceStream sourceStream = null ; int priority = msgItem . getPriority ( ) ; synchronized ( streamSet ) { sourceStream = ( SourceStream ) streamSet . getStream ( priority , reliability ) ; if ( sourceStream == null ) { sourceStream = createStream ( streamSet , priority , reliability , streamSet . getPersistentData ( priority , reliability ) , false ) ; } } synchronized ( sourceStream ) { long tick = this . messageProcessor . nextTick ( ) ; // Set a unique id in the message if explicitly told to or // if one has not already been set if ( msgItem . getRequiresNewId ( ) || jsMsg . getSystemMessageId ( ) == null ) { jsMsg . setSystemMessageSourceUuid ( this . messageProcessor . getMessagingEngineUuid ( ) ) ; jsMsg . setSystemMessageValue ( tick ) ; msgItem . setRequiresNewId ( false ) ; } jsMsg . setGuaranteedValueEndTick ( tick ) ; jsMsg . setGuaranteedValueValueTick ( tick ) ; jsMsg . setGuaranteedValueRequestedOnly ( false ) ; jsMsg . setGuaranteedValueStartTick ( sourceStream . getLastMsgAdded ( ) + 1 ) ; jsMsg . setGuaranteedValueCompletedPrefix ( sourceStream . getCompletedPrefix ( ) ) ; sourceStream . writeUncommitted ( msgItem ) ; } // end sync if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addMessage" , true ) ; return true ; }
public class Gram { /** * Frames and wraps a token according to the GRAM " renew " protocol * for use in a GSI delegation handshake . The input token is framed with * a 4 byte big - endian token length header , and the resulting framed token * wrapped in SSL mode ( GSSContext ' s GSS _ MODE option set to MODE _ SSL ) * @ param c The context used to wrap the token * @ param token The unaltered output of the context ' s initDelegation * @ throws GSSException if an error occurs during token wrapping or if * context is insufficient * @ return a wrapped , framed token to send to globus job manager */ private static byte [ ] produceRenewToken ( ExtendedGSSContext c , byte [ ] token ) throws GSSException { } }
if ( ! GSIConstants . MODE_SSL . equals ( c . getOption ( GSSConstants . GSS_MODE ) ) ) { throw new GSSException ( GSSException . NO_CONTEXT ) ; } byte [ ] framedToken = new byte [ token . length + 4 ] ; SSLUtil . writeInt ( token . length , framedToken , 0 ) ; System . arraycopy ( token , 0 , framedToken , 4 , token . length ) ; return c . wrap ( framedToken , 0 , framedToken . length , null ) ;
public class BaseMessage { /** * Set the message data as a XML String . * @ return */ public boolean setXML ( String strXML ) { } }
Document doc = Util . convertXMLToDOM ( strXML ) ; return this . setDOM ( doc ) ;
public class AbstractEpollStreamChannel { /** * Splice from this { @ link AbstractEpollStreamChannel } to another { @ link FileDescriptor } . * The { @ code offset } is the offset for the { @ link FileDescriptor } and { @ code len } is the * number of bytes to splice . If using { @ link Integer # MAX _ VALUE } it will splice until the * { @ link ChannelFuture } was canceled or it was failed . * Please note : * < ul > * < li > { @ link EpollChannelConfig # getEpollMode ( ) } must be { @ link EpollMode # LEVEL _ TRIGGERED } for this * { @ link AbstractEpollStreamChannel } < / li > * < li > the { @ link FileDescriptor } will not be closed after the { @ link ChannelFuture } is notified < / li > * < li > this channel must be registered to an event loop or { @ link IllegalStateException } will be thrown . < / li > * < / ul > */ public final ChannelFuture spliceTo ( final FileDescriptor ch , final int offset , final int len ) { } }
return spliceTo ( ch , offset , len , newPromise ( ) ) ;
public class RotationAxisAligner { /** * Returns a reference vector for the alignment of Cn structures . * @ return reference vector */ private Vector3d getReferenceAxisCylicWithSubunitAlignment ( ) { } }
if ( rotationGroup . getPointGroup ( ) . equals ( "C2" ) ) { return referenceVector ; } // find subunit that extends the most in the xy - plane List < List < Integer > > orbits = getOrbitsByXYWidth ( ) ; // get the last orbit which is the widest List < Integer > widestOrbit = orbits . get ( orbits . size ( ) - 1 ) ; List < Point3d > centers = subunits . getCenters ( ) ; int subunit = widestOrbit . get ( 0 ) ; // calculate reference vector Vector3d refAxis = new Vector3d ( ) ; refAxis . sub ( centers . get ( subunit ) , subunits . getCentroid ( ) ) ; refAxis . normalize ( ) ; return refAxis ;
public class CountryReader { /** * Add the requested query string arguments to the Request . * @ param request Request to add query string arguments to */ private void addQueryParams ( final Request request ) { } }
if ( isoCode != null ) { request . addQueryParam ( "IsoCode" , isoCode ) ; } if ( continent != null ) { request . addQueryParam ( "Continent" , continent ) ; } if ( countryCode != null ) { request . addQueryParam ( "CountryCode" , countryCode ) ; } if ( lowRiskNumbersEnabled != null ) { request . addQueryParam ( "LowRiskNumbersEnabled" , lowRiskNumbersEnabled . toString ( ) ) ; } if ( highRiskSpecialNumbersEnabled != null ) { request . addQueryParam ( "HighRiskSpecialNumbersEnabled" , highRiskSpecialNumbersEnabled . toString ( ) ) ; } if ( highRiskTollfraudNumbersEnabled != null ) { request . addQueryParam ( "HighRiskTollfraudNumbersEnabled" , highRiskTollfraudNumbersEnabled . toString ( ) ) ; } if ( getPageSize ( ) != null ) { request . addQueryParam ( "PageSize" , Integer . toString ( getPageSize ( ) ) ) ; }
public class ResultIterator { /** * Id value in byte arr . * @ return the byte [ ] */ private byte [ ] idValueInByteArr ( ) { } }
Object entity = results . get ( results . size ( ) - 1 ) ; Object id = PropertyAccessorHelper . getId ( entity , entityMetadata ) ; String idName = ( ( AbstractAttribute ) entityMetadata . getIdAttribute ( ) ) . getJPAColumnName ( ) ; Class idClazz = ( ( AbstractAttribute ) entityMetadata . getIdAttribute ( ) ) . getBindableJavaType ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( entityMetadata . getPersistenceUnit ( ) ) ; EmbeddableType keyObj = null ; ByteBuffer bytes = null ; // if the key attribute is composite if ( metaModel . isEmbeddable ( entityMetadata . getIdAttribute ( ) . getBindableJavaType ( ) ) ) { keyObj = metaModel . embeddable ( entityMetadata . getIdAttribute ( ) . getBindableJavaType ( ) ) ; Field embeddedField = getPartitionKeyField ( ) ; Attribute partitionKey = keyObj . getAttribute ( embeddedField . getName ( ) ) ; Object partitionKeyValue = PropertyAccessorHelper . getObject ( id , ( Field ) partitionKey . getJavaMember ( ) ) ; bytes = CassandraUtilities . toBytes ( partitionKeyValue , ( Field ) partitionKey . getJavaMember ( ) ) ; } else { bytes = query . getBytesValue ( idName , entityMetadata , id ) ; } return bytes . array ( ) ;
public class DockerRuleBuilder { /** * Keep stopped container after test . * @ deprecated Use { @ link # stopOptions ( StopOption . . . ) } instead . */ public DockerRuleBuilder keepContainer ( boolean keepContainer ) { } }
if ( keepContainer ) { this . stopOptions . setOptions ( StopOption . KEEP ) ; } else { this . stopOptions . setOptions ( StopOption . REMOVE ) ; } return this ;
public class CmsFileUtil { /** * Reads all bytes from the given input stream , conditionally closes the given input stream * and returns the result in an array . < p > * @ param in the input stream to read the bytes from * @ return the byte content of the input stream * @ param closeInputStream if true the given stream will be closed afterwards * @ throws IOException in case of errors in the underlying java . io methods used */ public static byte [ ] readFully ( InputStream in , boolean closeInputStream ) throws IOException { } }
if ( in instanceof ByteArrayInputStream ) { // content can be read in one pass return readFully ( in , in . available ( ) , closeInputStream ) ; } // copy buffer byte [ ] xfer = new byte [ 2048 ] ; // output buffer ByteArrayOutputStream out = new ByteArrayOutputStream ( xfer . length ) ; // transfer data from input to output in xfer - sized chunks . for ( int bytesRead = in . read ( xfer , 0 , xfer . length ) ; bytesRead >= 0 ; bytesRead = in . read ( xfer , 0 , xfer . length ) ) { if ( bytesRead > 0 ) { out . write ( xfer , 0 , bytesRead ) ; } } if ( closeInputStream ) { in . close ( ) ; } out . close ( ) ; return out . toByteArray ( ) ;
public class CopycatServer { /** * Starts listening the server . */ private CompletableFuture < Void > listen ( ) { } }
CompletableFuture < Void > future = new CompletableFuture < > ( ) ; context . getThreadContext ( ) . executor ( ) . execute ( ( ) -> { internalServer . listen ( cluster ( ) . member ( ) . serverAddress ( ) , context :: connectServer ) . whenComplete ( ( internalResult , internalError ) -> { if ( internalError == null ) { // If the client address is different than the server address , start a separate client server . if ( clientServer != null ) { clientServer . listen ( cluster ( ) . member ( ) . clientAddress ( ) , context :: connectClient ) . whenComplete ( ( clientResult , clientError ) -> { started = true ; future . complete ( null ) ; } ) ; } else { started = true ; future . complete ( null ) ; } } else { future . completeExceptionally ( internalError ) ; } } ) ; } ) ; return future ;
public class SessionEventHandler { /** * @ see com . ibm . websphere . eventengine . EventHandler # handleEvent ( com . ibm . websphere . eventengine . Event ) */ @ Override public void handleEvent ( Event event ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Received event: " + event . getTopic ( ) ) ; } // if ( event . getTopic ( ) . equals ( PURGE _ EVENT ) ) { this . sessionMgr . startPurge ( ) ;
public class SpaceResource { /** * Adds a space . * @ param spaceID * @ param storeID */ public void addSpace ( String spaceID , Map < String , AclType > userACLs , String storeID ) throws ResourceException , InvalidIdException { } }
IdUtil . validateSpaceId ( spaceID ) ; try { StorageProvider storage = storageProviderFactory . getStorageProvider ( storeID ) ; storage . createSpace ( spaceID ) ; waitForSpaceCreation ( storage , spaceID ) ; updateSpaceACLs ( spaceID , userACLs , storeID ) ; } catch ( NotFoundException e ) { throw new InvalidIdException ( e . getMessage ( ) ) ; } catch ( Exception e ) { storageProviderFactory . expireStorageProvider ( storeID ) ; throw new ResourceException ( "add space" , spaceID , e ) ; }
public class Util { /** * Compares two comparable objects where either may be null . Null is regarded as the smallest value , and 2 nulls are * considered equal . * @ param c1 the first comparable * @ param c2 the second comparable * @ return a negative integer , zero , or a positive integer if c1 is less than , equal to , or greater than the c2. */ public static int compareAllowNull ( final Comparable c1 , final Comparable c2 ) { } }
if ( c1 == null && c2 == null ) { return 0 ; } else if ( c1 == null ) { return - 1 ; } else if ( c2 == null ) { return 1 ; } else { return c1 . compareTo ( c2 ) ; }
public class JCudaDriver { /** * Returns a function handle . * < pre > * CUresult cuModuleGetFunction ( * CUfunction * hfunc , * CUmodule hmod , * const char * name ) * < / pre > * < div > * < p > Returns a function handle . Returns in * < tt > * hfunc < / tt > the handle of the function of name < tt > name < / tt > * located in module < tt > hmod < / tt > . If no function of that name exists , * cuModuleGetFunction ( ) returns CUDA _ ERROR _ NOT _ FOUND . * < div > * < span > Note : < / span > * < p > Note that this * function may also return error codes from previous , asynchronous * launches . * < / div > * < / div > * @ param hfunc Returned function handle * @ param hmod Module to retrieve function from * @ param name Name of function to retrieve * @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED , * CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ VALUE , * CUDA _ ERROR _ NOT _ FOUND * @ see JCudaDriver # cuModuleGetGlobal * @ see JCudaDriver # cuModuleGetTexRef * @ see JCudaDriver # cuModuleLoad * @ see JCudaDriver # cuModuleLoadData * @ see JCudaDriver # cuModuleLoadDataEx * @ see JCudaDriver # cuModuleLoadFatBinary * @ see JCudaDriver # cuModuleUnload */ public static int cuModuleGetFunction ( CUfunction hfunc , CUmodule hmod , String name ) { } }
return checkResult ( cuModuleGetFunctionNative ( hfunc , hmod , name ) ) ;
public class GuiMapReaderFactory { /** * Method to get the complete file path . * @ param file * @ return String file path */ private static String getFilePath ( String file ) { } }
logger . entering ( file ) ; String filePath = null ; URL fileURL = GuiMapReaderFactory . class . getClassLoader ( ) . getResource ( file ) ; if ( fileURL != null ) { filePath = fileURL . getPath ( ) ; } logger . exiting ( filePath ) ; return filePath ;
public class ChartComputator { /** * Checks if new viewport doesn ' t exceed max available viewport . */ public void constrainViewport ( float left , float top , float right , float bottom ) { } }
if ( right - left < minViewportWidth ) { // Minimum width - constrain horizontal zoom ! right = left + minViewportWidth ; if ( left < maxViewport . left ) { left = maxViewport . left ; right = left + minViewportWidth ; } else if ( right > maxViewport . right ) { right = maxViewport . right ; left = right - minViewportWidth ; } } if ( top - bottom < minViewportHeight ) { // Minimum height - constrain vertical zoom ! bottom = top - minViewportHeight ; if ( top > maxViewport . top ) { top = maxViewport . top ; bottom = top - minViewportHeight ; } else if ( bottom < maxViewport . bottom ) { bottom = maxViewport . bottom ; top = bottom + minViewportHeight ; } } currentViewport . left = Math . max ( maxViewport . left , left ) ; currentViewport . top = Math . min ( maxViewport . top , top ) ; currentViewport . right = Math . min ( maxViewport . right , right ) ; currentViewport . bottom = Math . max ( maxViewport . bottom , bottom ) ; viewportChangeListener . onViewportChanged ( currentViewport ) ;
public class ThreadPoolController { /** * Detect and handle aberrant data points by resetting the statistics * in the throughput distribution . * @ param distribution the throughput distribution associated with throughput * @ param throughput the observed throughput * @ return true if the thread pool has been reset due to an aberrant * workload */ boolean handleOutliers ( ThroughputDistribution distribution , double throughput ) { } }
if ( throughput < 0.0 ) { resetStatistics ( false ) ; return true ; } else if ( throughput == 0.0 ) { return false ; } double zScore = distribution . getZScore ( throughput ) ; boolean currentIsOutlier = zScore <= - 3.0 || zScore >= 3.0 ; // 8/10/2012 : Reset the data for this thread count when we hit an outlier // 1/20/2018 : refine the distribution reset criteria if ( currentIsOutlier ) { /* * Decide whether to reset the distribution , which throws away the historical * ewma for the poolSize and replaces it with the new throughput . * We will use 3 criteria , any of which is sufficient to reset the distribution : * 1 ) How much do we trust the historical data ? * If the historical ewma is the result of many observations with similar throughput , * the standard deviation will be a small fraction of the ewma . If stddev / ewma is * greater than 10 % , then the historical data is not really strong , let ' s reset . * 2 ) How much different is the new tput from the ewma ? * If the new throughput is very very different from the ewma , that suggests the workload * may have changed significantly , in which case the historical data would no longer be * valid . If the throughput change is greater than 50 % of ewma , let ' s reset . * 3 ) Is the throughput simply unstable ? * If every new datapoint at this poolSize is more than 3 standard deviations off the * historical ewma , then we may as well follow the bouncing ball , rather than averaging * points which do not seem to want to cluster around a mean . If we get N outliers in a * row at this poolSize , let ' s reset . */ double ewma = distribution . getMovingAverage ( ) ; double stddev = distribution . getStddev ( ) ; if ( ( stddev / ewma ) > resetDistroStdDevEwmaRatio || ( Math . abs ( throughput - ewma ) / ewma ) > resetDistroNewTputEwmaRatio || distribution . incrementAndGetConsecutiveOutliers ( ) >= resetDistroConsecutiveOutliers ) { if ( tc . isEventEnabled ( ) ) { Tr . event ( tc , "reset distribution" , ( " distribution: " + distribution + ", new throughput: " + throughput ) ) ; } distribution . reset ( throughput , controllerCycle ) ; distributionReset = true ; } else if ( tc . isEventEnabled ( ) ) { Tr . event ( tc , "outlier detected" , ( " distribution: " + distribution + ", new throughput: " + throughput ) ) ; } } else { distribution . resetConsecutiveOutliers ( ) ; } // Check for repeated outliers // 1/20/2018 : increment only after resetting a distribution , not a single outlier event if ( lastAction != LastAction . NONE ) { if ( distributionReset ) { consecutiveOutlierAfterAdjustment ++ ; } else { consecutiveOutlierAfterAdjustment = 0 ; } } // If we repeatedly hit an outlier after changing the pool size // we should reset the statistics if ( consecutiveOutlierAfterAdjustment >= MAX_OUTLIER_AFTER_CHANGE_BEFORE_RESET ) { resetThreadPool ( ) ; return true ; } return false ;
public class ManagedObject { /** * Convert serialized bytes back into a managed object . * @ param byteArrayInputStream from which the serializable ManagedObject * is to be read . * @ param objectManagerState of the objectManager reconstructing the ManagedObject . * @ return ManagedObject that is deserialized . * @ throws ObjectManagerException */ protected static final ManagedObject restoreSerializedDefault ( java . io . ByteArrayInputStream byteArrayInputStream , ObjectManagerState objectManagerState ) throws ObjectManagerException { } }
final String methodName = "restoreSerializedDefault" ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( cclass , methodName , new Object [ ] { byteArrayInputStream , objectManagerState } ) ; ManagedObject managedObjectToReturn = null ; try { // This type of ObjectInputStream ensures that any Tokens referenced by the ManagedObject will // be replaced with any equivalent Tokens already in memory . ManagedObjectInputStream objectInputStream = new ManagedObjectInputStream ( byteArrayInputStream , objectManagerState ) ; managedObjectToReturn = ( ManagedObject ) objectInputStream . readObject ( ) ; } catch ( java . io . IOException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , methodName , exception , "1:524:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , exception ) ; throw new PermanentIOException ( cclass , exception ) ; } catch ( java . lang . ClassNotFoundException exception ) { // No FFDC Code Needed . ObjectManager . ffdc . processException ( cclass , methodName , exception , "1:535:1.34" ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , exception ) ; throw new com . ibm . ws . objectManager . ClassNotFoundException ( cclass , exception ) ; } // catch java . lang . ClassNotFoundException . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( cclass , methodName , managedObjectToReturn ) ; return managedObjectToReturn ;
public class NonVoltDBBackend { /** * Modifies a < i > query < / i > containing the specified < i > queryPattern < / i > , in * such a way that the backend database ( e . g . PostgreSQL ) results will match * VoltDB results , typically by adding a < i > prefix < / i > and / or < i > suffix < / i > , * either to individual < i > groups < / i > within the < i > queryPattern < / i > , or to * the < i > queryPattern < / i > as a whole . * @ param query - the query text ( DDL , DML or DQL ) to be transformed . * @ param qt - a QueryTransformer object , specifying the various options to * be used to transform the query , e . g . , a < i > queryPattern < / i > , < i > prefix < / i > , * < i > suffix < / i > , or one or more < i > groups < / i > . For details of all options , * see the < i > QueryTransformer < / i > JavaDoc . * @ return the < i > query < / i > , transformed in the specified ways ( possibly * unchanged ) . * @ throws NullPointerException if < i > query < / i > or < i > qt < / i > is < b > null < / b > , * or if the < i > qt < / i > ' s < i > queryPattern < / i > , < i > initText < / i > , < i > prefix < / i > , * or < i > suffix < / i > is < b > null < / b > . */ protected String transformQuery ( String query , QueryTransformer qt ) { } }
StringBuffer modified_query = new StringBuffer ( ) ; Matcher matcher = qt . m_queryPattern . matcher ( query ) ; int count = 0 ; while ( matcher . find ( ) ) { StringBuffer replaceText = new StringBuffer ( qt . m_initialText ) ; String wholeMatch = matcher . group ( ) ; String lastGroup = wholeMatch ; List < String > groups = new ArrayList < String > ( ) ; if ( qt . m_debugPrint ) { if ( count < 1 ) { System . out . println ( "In NonVoltDBBackend.transformQuery,\n with query : " + query ) ; System . out . println ( " QueryTransformer:\n " + qt ) ; } System . out . println ( " " + ++ count + ".wholeMatch: " + wholeMatch ) ; } for ( String groupName : qt . m_groups ) { String group = matcher . group ( groupName ) ; groups . add ( group ) ; if ( qt . m_debugPrint ) { System . out . println ( " group : " + group ) ; } if ( group == null ) { continue ; } else if ( ! qt . m_useWholeMatch ) { String groupValue = group , suffixValue = qt . m_suffix ; // Check for the case where a multiplier & minimum are used if ( qt . m_multiplier != null && qt . m_minimum != null ) { groupValue = Long . toString ( Math . round ( Math . max ( Integer . parseInt ( group ) * qt . m_multiplier , qt . m_minimum ) ) ) ; } // Check for the ending that indicates to use the alternate suffix if ( qt . m_altSuffix != null && group . toUpperCase ( ) . endsWith ( qt . m_useAltSuffixAfter ) ) { suffixValue = qt . m_altSuffix ; } // If a specific column type was specified for this // QueryTransformer , check that it matches if ( columnTypeMatches ( qt . m_columnType , qt . m_allColumnsShouldMatchType , query , qt . m_debugPrint , groupValue ) ) { // Make sure not to swallow up extra ' ) ' , in this group replaceText . append ( handleParens ( groupValue , qt . m_prefix , suffixValue , qt . m_debugPrint ) ) ; } else { // Since column type does not match , don ' t change anything replaceText . append ( wholeMatch ) ; } } lastGroup = group ; } if ( qt . m_debugPrint ) { System . out . println ( " lastGroup : " + lastGroup ) ; } if ( qt . m_useWholeMatch ) { boolean noChangesNeeded = false ; // If the matched string contains one of the strings in the // ( possibly empty ) list of excluded strings , then no changes // are needed if ( qt . m_exclude != null ) { for ( String excl : qt . m_exclude ) { if ( wholeMatch . contains ( excl ) ) { noChangesNeeded = true ; if ( qt . m_debugPrint ) { System . out . println ( " noChangesNeeded, because wholeMatch contains excl:" ) ; System . out . println ( " wholeMatch: " + wholeMatch ) ; System . out . println ( " m_exclude : " + qt . m_exclude ) ; System . out . println ( " excl : " + excl ) ; } } } } // When columnType is specified , it means only modify queries // that use that type ; so if the relevant column ( s ) are not of // the specified type , no changes are needed if ( ! noChangesNeeded ) { String [ ] groupsArray = new String [ groups . size ( ) ] ; groupsArray = groups . toArray ( groupsArray ) ; if ( ! columnTypeMatches ( qt . m_columnType , qt . m_allColumnsShouldMatchType , query , qt . m_debugPrint , groupsArray ) ) { noChangesNeeded = true ; if ( qt . m_debugPrint ) { System . out . println ( " noChangesNeeded, because columnType(s) do not Match" ) ; } } } if ( noChangesNeeded ) { // Make no changes to the query , if one of the excluded // strings was found , or when the columnType is specified , // but does not match the column type ( s ) found in this query replaceText . append ( wholeMatch ) ; } else { // Check for the case where the group ( or the whole text ) is to be replaced with replacementText if ( qt . m_replacementText != null ) { wholeMatch = wholeMatch . replace ( lastGroup , qt . m_replacementText ) ; } // Check for the case where each group is to be replaced using groupReplacementTexts if ( qt . m_groupReplacementTexts != null && ! qt . m_groupReplacementTexts . isEmpty ( ) ) { for ( int i = 0 ; i < Math . min ( groups . size ( ) , qt . m_groupReplacementTexts . size ( ) ) ; i ++ ) { if ( groups . get ( i ) != null && qt . m_groupReplacementTexts . get ( i ) != null ) { if ( qt . m_debugPrint ) { String instances = ( qt . m_groupReplaceAll ? "all instances" : "first instance" ) ; System . out . println ( " replacing " + instances + " of groups(i) " + "with groupReplacementTexts(i)." ) ; System . out . println ( " wholeMatch: " + wholeMatch ) ; System . out . println ( " i : " + i ) ; System . out . println ( " groups [i]: " + groups . get ( i ) ) ; System . out . println ( " gRepTxt[i]: " + qt . m_groupReplacementTexts . get ( i ) ) ; } if ( qt . m_groupReplaceAll ) { // Extra escaping to make sure that " \ " remains as " \ " and " $ " // remains " $ " , despite replace ' s efforts to change them wholeMatch = wholeMatch . replace ( groups . get ( i ) , protectSpecialChars ( qt . m_groupReplacementTexts . get ( i ) , qt . m_debugPrint ) ) ; } else { wholeMatch = wholeMatch . replaceFirst ( groups . get ( i ) , protectSpecialChars ( qt . m_groupReplacementTexts . get ( i ) , qt . m_debugPrint ) ) ; } } } if ( qt . m_debugPrint ) { System . out . println ( " wholeMatch : " + wholeMatch ) ; } } // Make sure not to swallow up extra ' ) ' , in whole match ; and // replace symbols like { foo } with the appropriate group values replaceText . append ( replaceGroupNameVariables ( handleParens ( wholeMatch , qt . m_prefix , qt . m_suffix , qt . m_debugPrint ) , qt . m_groups , groups , qt . m_debugPrint ) ) ; } } if ( qt . m_debugPrint ) { System . out . println ( " replaceText : " + replaceText ) ; } // Extra escaping to make sure that " \ " remains as " \ " and " $ " // remains " $ " , despite appendReplacement ' s efforts to change them matcher . appendReplacement ( modified_query , protectSpecialChars ( replaceText . toString ( ) , qt . m_debugPrint ) ) ; } matcher . appendTail ( modified_query ) ; if ( ( DEBUG || qt . m_debugPrint ) && ! query . equalsIgnoreCase ( modified_query . toString ( ) ) ) { System . out . println ( "In NonVoltDBBackend.transformQuery,\n with query : " + query ) ; System . out . println ( " modified_query: " + modified_query ) ; } return modified_query . toString ( ) ;
public class URIParsedResult { /** * Transforms a string that represents a URI into something more proper , by adding or canonicalizing * the protocol . */ private static String massageURI ( String uri ) { } }
uri = uri . trim ( ) ; int protocolEnd = uri . indexOf ( ':' ) ; if ( protocolEnd < 0 || isColonFollowedByPortNumber ( uri , protocolEnd ) ) { // No protocol , or found a colon , but it looks like it is after the host , so the protocol is still missing , // so assume http uri = "http://" + uri ; } return uri ;
public class HttpChannelConfig { /** * Check the configuration map for if we should skip adding the quote * to the cookie path attribute * @ param props */ private void parseSkipCookiePathQuotes ( Map < ? , ? > props ) { } }
// 738893 - Skip adding the quotes to the cookie path attribute String value = ( String ) props . get ( HttpConfigConstants . PROPNAME_SKIP_PATH_QUOTE ) ; if ( null != value ) { this . skipCookiePathQuotes = convertBoolean ( value ) ; if ( ( TraceComponent . isAnyTracingEnabled ( ) ) && ( tc . isEventEnabled ( ) ) ) { Tr . event ( tc , "Config: SkipCookiePathQuotes is " + shouldSkipCookiePathQuotes ( ) ) ; } }
public class StoppableThreadCache { /** * Registers a new thread for stopping */ public void registerThread ( StoppableThread thread ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerThread" , thread ) ; synchronized ( this ) { _threadCache . add ( thread ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerThread" ) ;
public class MethodGenFactory { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . classfile . IAnalysisEngine # analyze ( edu . umd . cs . findbugs * . classfile . IAnalysisCache , java . lang . Object ) */ @ Override public MethodGen analyze ( IAnalysisCache analysisCache , MethodDescriptor descriptor ) throws CheckedAnalysisException { } }
Method method = getMethod ( analysisCache , descriptor ) ; if ( method . getCode ( ) == null ) { return null ; } XMethod xmethod = XFactory . createXMethod ( descriptor ) ; if ( xmethod . usesInvokeDynamic ( ) && false ) { AnalysisContext . currentAnalysisContext ( ) . analysisSkippedDueToInvokeDynamic ( xmethod ) ; return null ; } try { AnalysisContext analysisContext = AnalysisContext . currentAnalysisContext ( ) ; JavaClass jclass = getJavaClass ( analysisCache , descriptor . getClassDescriptor ( ) ) ; ConstantPoolGen cpg = getConstantPoolGen ( analysisCache , descriptor . getClassDescriptor ( ) ) ; String methodName = method . getName ( ) ; int codeLength = method . getCode ( ) . getCode ( ) . length ; String superclassName = jclass . getSuperclassName ( ) ; if ( codeLength > 6000 && Const . STATIC_INITIALIZER_NAME . equals ( methodName ) && "java.lang.Enum" . equals ( superclassName ) ) { analysisContext . getLookupFailureCallback ( ) . reportSkippedAnalysis ( new JavaClassAndMethod ( jclass , method ) . toMethodDescriptor ( ) ) ; return null ; } if ( analysisContext . getBoolProperty ( AnalysisFeatures . SKIP_HUGE_METHODS ) ) { if ( codeLength > 6000 || ( Const . STATIC_INITIALIZER_NAME . equals ( methodName ) || "getContents" . equals ( methodName ) ) && codeLength > 2000 ) { analysisContext . getLookupFailureCallback ( ) . reportSkippedAnalysis ( new JavaClassAndMethod ( jclass , method ) . toMethodDescriptor ( ) ) ; return null ; } } return new MethodGen ( method , jclass . getClassName ( ) , cpg ) ; } catch ( Exception e ) { AnalysisContext . logError ( "Error constructing methodGen" , e ) ; return null ; }
public class ReflectionUtils { /** * Returns the setter method associated with the object ' s field . * This method handles any autoboxing / unboxing of the argument passed to the setter ( e . g . if the setter type is a * primitive { @ code int } but the argument passed to the setter is an { @ code Integer } ) by looking for a setter with * the same type , and failing that checking for a setter with the corresponding primitive / wrapper type . * It also allows for an argument type that is a subclass or implementation of the setter type ( when the setter type * is an { @ code Object } or { @ code interface } respectively ) . * @ param object * the object * @ param fieldName * the name of the field * @ param argumentType * the type to be passed to the setter * @ return the setter method * @ throws NullPointerException * if object , fieldName or fieldType is null * @ throws SuperCsvReflectionException * if the setter doesn ' t exist or is not visible */ public static Method findSetter ( final Object object , final String fieldName , final Class < ? > argumentType ) { } }
if ( object == null ) { throw new NullPointerException ( "object should not be null" ) ; } else if ( fieldName == null ) { throw new NullPointerException ( "fieldName should not be null" ) ; } else if ( argumentType == null ) { throw new NullPointerException ( "argumentType should not be null" ) ; } final String setterName = getMethodNameForField ( SET_PREFIX , fieldName ) ; final Class < ? > clazz = object . getClass ( ) ; // find a setter compatible with the supplied argument type Method setter = findSetterWithCompatibleParamType ( clazz , setterName , argumentType ) ; // if that failed , try the corresponding primitive / wrapper if it ' s a type that can be autoboxed / unboxed if ( setter == null && AUTOBOXING_CONVERTER . containsKey ( argumentType ) ) { setter = findSetterWithCompatibleParamType ( clazz , setterName , AUTOBOXING_CONVERTER . get ( argumentType ) ) ; } if ( setter == null ) { throw new SuperCsvReflectionException ( String . format ( "unable to find method %s(%s) in class %s - check that the corresponding nameMapping element matches the field name in the bean, " + "and the cell processor returns a type compatible with the field" , setterName , argumentType . getName ( ) , clazz . getName ( ) ) ) ; } return setter ;
public class ClassPathUtils { /** * Return the classes from the given package and subpackages using the supplied classloader * @ param classLoader classloader to be used * @ param pkg package to scan * @ return set of found classes * @ throws IOException */ public static Set < Class < ? > > scanPackage ( ClassLoader classLoader , String pkg ) throws IOException { } }
Reflections reflections = new Reflections ( new ConfigurationBuilder ( ) . addUrls ( ClasspathHelper . forPackage ( pkg , classLoader ) ) . addClassLoader ( classLoader ) . setScanners ( new SubTypesScanner ( false ) ) ) ; Set < Class < ? > > classes = new HashSet < Class < ? > > ( ) ; for ( String typeNames : reflections . getStore ( ) . get ( SubTypesScanner . class . getSimpleName ( ) ) . values ( ) ) { Class < ? > clazz = safeClassForName ( classLoader , typeNames ) ; if ( clazz != null ) { classes . add ( clazz ) ; } } return classes ;
public class BoxApiFile { /** * Gets a request that downloads a thumbnail to a target file * @ param target target file to download to , target can only be a file * @ param fileId id of file to download the thumbnail of * @ return request to download a thumbnail to a target file * @ throws IOException throws FileNotFoundException if target file does not exist . */ public BoxRequestsFile . DownloadThumbnail getDownloadThumbnailRequest ( File target , String fileId ) throws IOException { } }
if ( ! target . exists ( ) ) { throw new FileNotFoundException ( ) ; } if ( target . isDirectory ( ) ) { throw new RuntimeException ( "This endpoint only supports files and does not support directories" ) ; } BoxRequestsFile . DownloadThumbnail request = new BoxRequestsFile . DownloadThumbnail ( fileId , target , getThumbnailFileDownloadUrl ( fileId ) , mSession ) ; return request ;
public class HandleHelper { /** * 处理单条数据 * @ param rs 数据集 * @ return 每一行的Entity * @ throws SQLException SQL执行异常 */ public static Entity handleRow ( ResultSet rs ) throws SQLException { } }
final ResultSetMetaData meta = rs . getMetaData ( ) ; final int columnCount = meta . getColumnCount ( ) ; return handleRow ( columnCount , meta , rs ) ;
public class Element { /** * ( non - Javadoc ) * @ see qc . automation . framework . widget . IElement # getText ( ) */ @ Override public String getText ( ) throws WidgetException { } }
try { WebElement webElement = findElement ( ) ; highlight ( HIGHLIGHT_MODES . GET ) ; return webElement . getText ( ) ; } catch ( Exception e ) { throw new WidgetException ( "Error while fetching text" , locator , e ) ; }
public class MultiChoiceListPreference { /** * Return the indices of the entries , which correspond to specific values . * @ param values * A set , which contains the values of the entries , whose indices should be returned , as * an instance of the type { @ link Set } * @ return A list , which contains the indices of the entries , the given values correspond to , as * an instance of the type { @ link List } */ private List < Integer > indicesOf ( @ Nullable final Set < String > values ) { } }
List < Integer > indices = new ArrayList < > ( ) ; if ( values != null && getEntryValues ( ) != null ) { for ( String value : values ) { int index = indexOf ( value ) ; if ( index >= 0 ) { indices . add ( index ) ; } } } return indices ;
public class PDFPageHelper { /** * Creates a String ( eg . < b > 3-9 < / b > or < b > 3 < / b > ) , based on the most left and * most right page annotation . * < p > The page annotation is detected with * { @ link # getPageFromAnnotation ( de . hu _ berlin . german . korpling . saltnpepper . salt . saltCommon . sDocumentStructure . SSpan ) } < / p > * @ return A String which represents the start and the end page of a pdf , * seperated by { @ link # PAGE _ NUMBER _ SEPERATOR } . If there is no end page , or * exactly one page annotation , only a String with one number is returned . */ public String getMostLeftAndMostRightPageAnno ( ) { } }
if ( sspans == null || sspans . isEmpty ( ) ) { return null ; } TreeMap < Integer , SSpan > rightTokIdxToSSpan = sspans . get ( sspans . firstKey ( ) ) ; SSpan leftSpan = rightTokIdxToSSpan . get ( rightTokIdxToSSpan . firstKey ( ) ) ; SSpan rightSpan = null ; Integer rightIdx = null ; for ( TreeMap < Integer , SSpan > leftIdxValue : sspans . values ( ) ) { for ( Map . Entry < Integer , SSpan > rightIdxEntry : leftIdxValue . entrySet ( ) ) { if ( rightIdx == null || rightIdx <= rightIdxEntry . getKey ( ) ) { rightIdx = rightIdxEntry . getKey ( ) ; rightSpan = rightIdxEntry . getValue ( ) ; } } } if ( rightIdx != null ) { return getPageFromAnnotation ( leftSpan ) + PAGE_NUMBER_SEPERATOR + getPageFromAnnotation ( rightSpan ) ; } return getPageFromAnnotation ( leftSpan ) ;
public class CourierTemplateSpecGenerator { /** * Generate { @ link com . linkedin . pegasus . generator . spec . ClassTemplateSpec } from the specified { @ link com . linkedin . data . schema . DataSchema } and its location . */ public ClassTemplateSpec generate ( DataSchema schema , DataSchemaLocation location ) { } }
pushCurrentLocation ( location ) ; final ClassTemplateSpec result = generate ( schema ) ; popCurrentLocation ( ) ; return result ;
public class Configuration { /** * Loads configuration from | file | . * @ param config Configuration container * @ param f File to be loaded */ protected static void loadConfig ( final Properties config , final File f ) { } }
InputStreamReader r = null ; try { final FileInputStream in = new FileInputStream ( f ) ; r = new InputStreamReader ( in , "UTF-8" ) ; config . load ( r ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } finally { if ( r != null ) { try { r . close ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } // end of catch } // end of if } // end of finally
public class CollectUtils { /** * newLinkedHashMap . * @ param m a { @ link java . util . Map } object . * @ param < K > a K object . * @ param < V > a V object . * @ return a { @ link java . util . Map } object . */ public static < K , V > Map < K , V > newLinkedHashMap ( Map < ? extends K , ? extends V > m ) { } }
return new LinkedHashMap < K , V > ( m ) ;
public class WebSocketContext { /** * Send message to all connections labeled with tag specified . * @ param message the message to be sent * @ param tag the string that tag the connections to be sent * @ param excludeSelf specify whether the connection of this context should be send * @ return this context */ public WebSocketContext sendToTagged ( String message , String tag , boolean excludeSelf ) { } }
return sendToConnections ( message , tag , manager . tagRegistry ( ) , excludeSelf ) ;
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Removes the cp definition specification option value where CPDefinitionId = & # 63 ; and CPDefinitionSpecificationOptionValueId = & # 63 ; from the database . * @ param CPDefinitionId the cp definition ID * @ param CPDefinitionSpecificationOptionValueId the cp definition specification option value ID * @ return the cp definition specification option value that was removed */ @ Override public CPDefinitionSpecificationOptionValue removeByC_CSOVI ( long CPDefinitionId , long CPDefinitionSpecificationOptionValueId ) throws NoSuchCPDefinitionSpecificationOptionValueException { } }
CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = findByC_CSOVI ( CPDefinitionId , CPDefinitionSpecificationOptionValueId ) ; return remove ( cpDefinitionSpecificationOptionValue ) ;
public class TieredBlockStore { /** * Checks if a block id is available for a new temp block . This method must be enclosed by * { @ link # mMetadataLock } . * @ param blockId the id of block * @ throws BlockAlreadyExistsException if block id already exists */ private void checkTempBlockIdAvailable ( long blockId ) throws BlockAlreadyExistsException { } }
if ( mMetaManager . hasTempBlockMeta ( blockId ) ) { throw new BlockAlreadyExistsException ( ExceptionMessage . TEMP_BLOCK_ID_EXISTS , blockId ) ; } if ( mMetaManager . hasBlockMeta ( blockId ) ) { throw new BlockAlreadyExistsException ( ExceptionMessage . TEMP_BLOCK_ID_COMMITTED , blockId ) ; }
public class InternalArrayIterate { /** * Implemented to avoid megamorphic call on castProcedure . */ private static < T > void batchFastListCollectIf ( T [ ] array , int start , int end , FastListCollectIfProcedure < T , ? > castProcedure ) { } }
for ( int i = start ; i < end ; i ++ ) { castProcedure . value ( array [ i ] ) ; }
public class Matrix4d { /** * Set < code > this < / code > matrix to < code > T * R < / code > , where < code > T < / code > is a translation by the given < code > ( tx , ty , tz ) < / code > and * < code > R < / code > is a rotation - and possibly scaling - transformation specified by the quaternion < code > ( qx , qy , qz , qw ) < / code > . * When transforming a vector by the resulting matrix the rotation - and possibly scaling - transformation will be applied first and then the translation . * When used with a right - handed coordinate system , the produced rotation will rotate a vector * counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin . * When used with a left - handed coordinate system , the rotation is clockwise . * This method is equivalent to calling : < code > translation ( tx , ty , tz ) . rotate ( quat ) < / code > * @ see # translation ( double , double , double ) * @ see # rotate ( Quaterniondc ) * @ param tx * the number of units by which to translate the x - component * @ param ty * the number of units by which to translate the y - component * @ param tz * the number of units by which to translate the z - component * @ param qx * the x - coordinate of the vector part of the quaternion * @ param qy * the y - coordinate of the vector part of the quaternion * @ param qz * the z - coordinate of the vector part of the quaternion * @ param qw * the scalar part of the quaternion * @ return this */ public Matrix4d translationRotate ( double tx , double ty , double tz , double qx , double qy , double qz , double qw ) { } }
double w2 = qw * qw ; double x2 = qx * qx ; double y2 = qy * qy ; double z2 = qz * qz ; double zw = qz * qw ; double xy = qx * qy ; double xz = qx * qz ; double yw = qy * qw ; double yz = qy * qz ; double xw = qx * qw ; this . m00 = w2 + x2 - z2 - y2 ; this . m01 = xy + zw + zw + xy ; this . m02 = xz - yw + xz - yw ; this . m10 = - zw + xy - zw + xy ; this . m11 = y2 - z2 + w2 - x2 ; this . m12 = yz + yz + xw + xw ; this . m20 = yw + xz + xz + yw ; this . m21 = yz + yz - xw - xw ; this . m22 = z2 - y2 - x2 + w2 ; this . m30 = tx ; this . m31 = ty ; this . m32 = tz ; this . m33 = 1.0 ; this . properties = PROPERTY_AFFINE | PROPERTY_ORTHONORMAL ; return this ;
public class RuntimeExceptionsFactory { /** * Constructs and initializes a new { @ link NullPointerException } with the given { @ link String message } * formatted with the given { @ link Object [ ] arguments } . * @ param message { @ link String } describing the { @ link NullPointerException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link NullPointerException } with the given { @ link String message } . * @ see # newNullPointerException ( Throwable , String , Object . . . ) * @ see java . lang . NullPointerException */ public static NullPointerException newNullPointerException ( String message , Object ... args ) { } }
return newNullPointerException ( null , message , args ) ;
public class TextBox { /** * Sets the current text mask , meaning the substitute to draw instead of the text inside the { @ code TextBox } . * This is normally used for password input fields so the password isn ' t shown * @ param mask New text mask or { @ code null } if there is no mask * @ return Itself */ public TextBox setMask ( Character mask ) { } }
if ( mask != null && TerminalTextUtils . isCharCJK ( mask ) ) { throw new IllegalArgumentException ( "Cannot use a CJK character as a mask" ) ; } this . mask = mask ; invalidate ( ) ; return this ;
public class ConnectionPoolManagerImpl { /** * Closes all connection pools . This method overrides * < code > shutdownModule ( ) < / code > . * @ throws ModuleShutdownException * If the close operation for the connection pool ( s ) fails . */ @ Override public void shutdownModule ( ) throws ModuleShutdownException { } }
super . shutdownModule ( ) ; for ( Map . Entry < String , ConnectionPool > e : h_ConnectionPools . entrySet ( ) ) { e . getValue ( ) . close ( ) ; }
public class PathManagerService { /** * Install an { @ code Service < String > } for the given path . * @ param serviceTarget the service target associated with the management operation making this request . Cannot be { @ code null } * @ param pathName the name of the relevant path . Cannot be { @ code null } * @ param path the value of the path within the model . This is either an absolute path or * the relative portion of the path . Cannot be { @ code null } * @ param possiblyAbsolute { @ code true } if the path may be absolute and a check should be performed before installing * a service variant that depends on the service associated with { @ code relativeTo } * @ param relativeTo the name of the path this path is relative to . If { @ code null } this is an absolute path * @ return the service controller for the { @ code Service < String > } */ final ServiceController < ? > addRelativePathService ( final ServiceTarget serviceTarget , final String pathName , final String path , final boolean possiblyAbsolute , final String relativeTo ) { } }
if ( possiblyAbsolute && AbstractPathService . isAbsoluteUnixOrWindowsPath ( path ) ) { return addAbsolutePathService ( serviceTarget , pathName , path ) ; } else { return RelativePathService . addService ( AbstractPathService . pathNameOf ( pathName ) , path , possiblyAbsolute , relativeTo , serviceTarget ) ; }
public class SVNCommands { /** * Performs a SVN Checkout of the given URL to the given directory * @ param url The SVN URL that should be checked out * @ param directory The location where the working copy is created . * @ param user The SVN user or null if the default user from the machine should be used * @ param pwd The SVN password or null if the default user from the machine should be used @ return The contents of the file . * @ return A stream with output from the command , should be closed by the caller * @ throws IOException Execution of the SVN sub - process failed or the * sub - process returned a exit value indicating a failure */ public static InputStream checkout ( String url , File directory , String user , String pwd ) throws IOException { } }
if ( ! directory . exists ( ) && ! directory . mkdirs ( ) ) { throw new IOException ( "Could not create new working copy directory at " + directory ) ; } CommandLine cmdLine = new CommandLine ( SVN_CMD ) ; cmdLine . addArgument ( "co" ) ; addDefaultArguments ( cmdLine , user , pwd ) ; cmdLine . addArgument ( url ) ; cmdLine . addArgument ( directory . toString ( ) ) ; // allow up to two hour for new checkouts return ExecutionHelper . getCommandResult ( cmdLine , directory , - 1 , 2 * 60 * 60 * 1000 ) ;
public class SizeLimitableBlockingQueue { /** * / * ( non - Javadoc ) * @ see java . util . concurrent . BlockingQueue # offer ( java . lang . Object , long , java . util . concurrent . TimeUnit ) */ @ Override public boolean offer ( E e , long timeout , TimeUnit unit ) throws InterruptedException { } }
if ( sizeLimit > 0 ) { long nanos = unit . toNanos ( timeout ) ; final ReentrantLock lock = this . sizeLimitLock ; lock . lockInterruptibly ( ) ; try { for ( ; ; ) { if ( size ( ) < sizeLimit ) { return queue . offer ( e , timeout , unit ) ; } if ( nanos <= 0 ) return false ; try { nanos = withinSizeLimit . awaitNanos ( nanos ) ; } catch ( InterruptedException ie ) { withinSizeLimit . signal ( ) ; // propagate to non - interrupted thread throw ie ; } } } finally { lock . unlock ( ) ; } } else { return queue . offer ( e , timeout , unit ) ; }
public class PostgreSCSConnectionFactory { /** * { @ inheritDoc } */ public WorkspaceStorageConnection openConnection ( boolean readOnly ) throws RepositoryException { } }
try { if ( this . containerConfig . dbStructureType . isMultiDatabase ( ) ) { return new PostgreSCSMultiDbJDBCConnection ( getJdbcConnection ( readOnly ) , readOnly , containerConfig ) ; } return new PostgreSCSSingleDbJDBCConnection ( getJdbcConnection ( readOnly ) , readOnly , containerConfig ) ; } catch ( SQLException e ) { throw new RepositoryException ( e ) ; }
public class WInternalLinkRenderer { /** * Paints the given { @ link WInternalLink } . * @ param component the WInternalLink to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WInternalLink link = ( WInternalLink ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; if ( Util . empty ( link . getText ( ) ) ) { return ; } xml . appendTagOpen ( "ui:link" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "toolTip" , link . getToolTip ( ) ) ; xml . appendOptionalAttribute ( "accessibleText" , link . getAccessibleText ( ) ) ; xml . appendUrlAttribute ( "url" , "#" + link . getReference ( ) . getId ( ) ) ; xml . appendClose ( ) ; xml . appendEscaped ( link . getText ( ) ) ; xml . appendEndTag ( "ui:link" ) ;
public class DKV { /** * Used to order successive writes . */ static public void write_barrier ( ) { } }
for ( H2ONode h2o : H2O . CLOUD . _memary ) for ( RPC rpc : h2o . tasks ( ) ) if ( rpc . _dt instanceof TaskPutKey || rpc . _dt instanceof Atomic ) rpc . get ( ) ;
public class ResourceUtils { /** * 如果替换包含占位符则替换占位符 * @ param key * @ return */ private static String addToProperties ( String key , String value ) { } }
if ( ! value . contains ( PLACEHOLDER_PREFIX ) ) { allProperties . put ( key , value ) ; return value ; } String [ ] segments = value . split ( "\\$\\{" ) ; String seg ; StringBuilder finalValue = new StringBuilder ( ) ; for ( int i = 0 ; i < segments . length ; i ++ ) { seg = StringUtils . trimToNull ( segments [ i ] ) ; if ( StringUtils . isBlank ( seg ) ) continue ; if ( seg . contains ( PLACEHOLDER_SUFFIX ) ) { String refKey = seg . substring ( 0 , seg . indexOf ( PLACEHOLDER_SUFFIX ) ) . trim ( ) ; // 其他非 $ { } 的占位符如 : { { host } } String withBraceString = null ; if ( seg . contains ( "{" ) ) { withBraceString = seg . substring ( seg . indexOf ( PLACEHOLDER_SUFFIX ) + 1 ) ; } // 如果包含默认值 , 如 : $ { host : 127.0.0.1} String defaultValue = null ; if ( refKey . contains ( ":" ) ) { String [ ] tmpArray = refKey . split ( ":" ) ; refKey = tmpArray [ 0 ] ; defaultValue = tmpArray [ 1 ] ; } String refValue = getProperty ( refKey ) ; if ( StringUtils . isBlank ( refValue ) ) { refValue = defaultValue ; } finalValue . append ( refValue ) ; if ( withBraceString != null ) { finalValue . append ( withBraceString ) ; } else { String [ ] segments2 = seg . split ( "\\}" ) ; if ( segments2 . length == 2 ) { finalValue . append ( segments2 [ 1 ] ) ; } } } else { finalValue . append ( seg ) ; } } allProperties . put ( key , finalValue . toString ( ) ) ; return finalValue . toString ( ) ;
public class LogRepositoryConfiguration { /** * Modify the trace to use a memory buffer * @ param dataDirectory directory where buffer will be dumped if requested * @ param memoryBufferSize amount of memory ( in Mb ) to be used for this circular buffer */ public void setTraceMemory ( String dataDirectory , long memoryBufferSize ) { } }
TraceState state = ( TraceState ) ivTrace . clone ( ) ; state . ivStorageType = MEMORYBUFFER_TYPE ; state . ivDataDirectory = dataDirectory ; state . ivMemoryBufferSize = memoryBufferSize ; updateTraceConfiguration ( state ) ; state . copyTo ( ivTrace ) ;
public class FifoTaskExecutor { /** * Executes the submitted task . If the maximum number of pooled threads is * in use , this method blocks until one of a them is available . * @ param task * @ throws InterruptedException */ public void execute ( final FifoTask < E > task ) throws InterruptedException { } }
final int id ; synchronized ( this ) { id = idCounter ++ ; taskMap . put ( id , task ) ; while ( activeCounter >= maxThreads ) { wait ( ) ; } activeCounter ++ ; } this . threadPoolExecutor . execute ( new Runnable ( ) { public void run ( ) { try { try { final E outcome = task . runParallel ( ) ; synchronized ( resultMap ) { resultMap . put ( id , new Result ( outcome ) ) ; } } catch ( Throwable th ) { synchronized ( resultMap ) { resultMap . put ( id , new Result ( null , th ) ) ; } } finally { processResults ( ) ; synchronized ( FifoTaskExecutor . this ) { activeCounter -- ; FifoTaskExecutor . this . notifyAll ( ) ; } } } catch ( Exception ex ) { Logger . getLogger ( FifoTaskExecutor . class . getName ( ) ) . log ( Level . SEVERE , ex . getMessage ( ) , ex ) ; } } } ) ;
public class ResourceBundleMessageSource { /** * Resolves the given message code as key in the registered resource bundles , * returning the value found in the bundle as - is ( without MessageFormat parsing ) . */ @ Override protected String resolveCodeWithoutArguments ( String code , Locale locale ) { } }
String result = null ; for ( int i = 0 ; result == null && i < this . basenames . length ; i ++ ) { ResourceBundle bundle = getResourceBundle ( this . basenames [ i ] , locale ) ; if ( bundle != null ) { result = getStringOrNull ( bundle , code ) ; } } return result ;
public class InfinitePagerAdapter { /** * fills the page on index { @ code position } . * @ param position the page index to fill the page . */ void fillPage ( final int position ) { } }
if ( Constants . DEBUG ) { Log . d ( "InfiniteViewPager" , "setup Page " + position ) ; printPageModels ( "before newPage" ) ; } final PageModel < T > oldModel = mPageModels [ position ] ; final PageModel < T > newModel = createPageModel ( position ) ; if ( oldModel == null || newModel == null ) { Log . w ( Constants . LOG_TAG , "fillPage no model found " + oldModel + " " + newModel ) ; return ; } // moving the new created views to the page of the viewpager oldModel . removeAllChildren ( ) ; for ( final View newChild : newModel . getChildren ( ) ) { newModel . removeViewFromParent ( newChild ) ; oldModel . addChild ( newChild ) ; } mPageModels [ position ] . setIndicator ( newModel . getIndicator ( ) ) ;
public class CmdArgs { /** * Returns usage string . * @ return */ public String getUsage ( ) { } }
Set < Option > set = new HashSet < > ( ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( "usage: " ) ; boolean n1 = false ; for ( Entry < String , List < Option > > e : groups . entrySet ( ) ) { if ( n1 ) { sb . append ( "|" ) ; } n1 = true ; sb . append ( "[" ) ; boolean n2 = false ; for ( Option opt : e . getValue ( ) ) { if ( n2 ) { sb . append ( " " ) ; } n2 = true ; append ( sb , opt ) ; set . add ( opt ) ; } sb . append ( "]" ) ; } for ( Option opt : map . values ( ) ) { if ( ! set . contains ( opt ) ) { sb . append ( " " ) ; append ( sb , opt ) ; } } for ( int ii = 0 ; ii < names . size ( ) ; ii ++ ) { sb . append ( " <" ) . append ( names . get ( ii ) ) . append ( ">" ) ; if ( types . get ( ii ) . isArray ( ) ) { sb . append ( "..." ) ; } } return sb . toString ( ) ;
public class WorkbookReader { /** * Converts the spreadsheet to String Lists by a List Iterable . * @ return List of String Iterable */ public Iterable < List < String > > toLists ( ) { } }
checkState ( ! isClosed , WORKBOOK_CLOSED ) ; Iterable < List < String > > listsIterable = Iterables . transform ( sheet , item -> { return rowToList ( item ) ; } ) ; return hasHeader ? Iterables . skip ( listsIterable , 1 ) : listsIterable ;
public class RouteProcessorThreadListener { /** * RouteListener from the { @ link RouteProcessorBackgroundThread } - if fired with checkFasterRoute set * to true , a new { @ link DirectionsRoute } should be fetched with { @ link RouteFetcher } . * @ param location to create a new origin * @ param routeProgress for various { @ link com . mapbox . api . directions . v5 . models . LegStep } data * @ param checkFasterRoute true if should check for faster route , false otherwise */ @ Override public void onCheckFasterRoute ( Location location , RouteProgress routeProgress , boolean checkFasterRoute ) { } }
if ( checkFasterRoute ) { routeFetcher . findRouteFromRouteProgress ( location , routeProgress ) ; }
public class BinaryReader { /** * Read an int from the input stream . * @ return The number read . * @ throws IOException if unable to read from stream . */ public int expectInt ( ) throws IOException { } }
int b1 = in . read ( ) ; if ( b1 < 0 ) { throw new IOException ( "Missing byte 1 to expected int" ) ; } int b2 = in . read ( ) ; if ( b2 < 0 ) { throw new IOException ( "Missing byte 2 to expected int" ) ; } int b3 = in . read ( ) ; if ( b3 < 0 ) { throw new IOException ( "Missing byte 3 to expected int" ) ; } int b4 = in . read ( ) ; if ( b4 < 0 ) { throw new IOException ( "Missing byte 4 to expected int" ) ; } return unshift4bytes ( b1 , b2 , b3 , b4 ) ;
public class BinaryJedis { /** * Increment the number stored at field in the hash at key by a double precision floating point * value . If key does not exist , a new key holding a hash is created . If field does not exist or * holds a string , the value is set to 0 before applying the operation . Since the value argument * is signed you can use this command to perform both increments and decrements . * The range of values supported by HINCRBYFLOAT is limited to double precision floating point * values . * < b > Time complexity : < / b > O ( 1) * @ param key * @ param field * @ param value * @ return Double precision floating point reply The new value at field after the increment * operation . */ @ Override public Double hincrByFloat ( final byte [ ] key , final byte [ ] field , final double value ) { } }
checkIsInMultiOrPipeline ( ) ; client . hincrByFloat ( key , field , value ) ; final String dval = client . getBulkReply ( ) ; return ( dval != null ? new Double ( dval ) : null ) ;