signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ConfigUtil { /** * returns the hierarchical part of a configuration
* @ param configuration
* the given configuration
* @ return the hierarchical configuration or null if not found in the given
* configuration object . */
public static HierarchicalConfiguration getHierarchicalConfiguration ( final Configuration configuration ) { } } | if ( configuration instanceof CompositeConfiguration ) { final CompositeConfiguration compositeConfig = ( CompositeConfiguration ) configuration ; for ( int i = 0 ; i < compositeConfig . getNumberOfConfigurations ( ) ; i ++ ) { if ( compositeConfig . getConfiguration ( i ) instanceof HierarchicalConfiguration ) { return ( HierarchicalConfiguration ) compositeConfig . getConfiguration ( i ) ; } } } // maybe I need to send a runtime exception ? ?
// throw new
// ConfigurationRuntimeException ( " no hierarchical configuration was defined " ) ;
return null ; |
public class CmsCloneModuleThread { /** * Replaces the paths within all the given resources and removes all UUIDs by an regex . < p >
* @ param sourceModulePath the search path
* @ param targetModulePath the replace path
* @ param resources the resources
* @ throws CmsException if something goes wrong
* @ throws UnsupportedEncodingException if the file content could not be read with the determined encoding */
private void replacePath ( String sourceModulePath , String targetModulePath , List < CmsResource > resources ) throws CmsException , UnsupportedEncodingException { } } | for ( CmsResource resource : resources ) { if ( resource . isFile ( ) ) { CmsFile file = getCms ( ) . readFile ( resource ) ; if ( CmsResourceTypeXmlContent . isXmlContent ( file ) ) { CmsXmlContent xmlContent = CmsXmlContentFactory . unmarshal ( getCms ( ) , file ) ; xmlContent . setAutoCorrectionEnabled ( true ) ; file = xmlContent . correctXmlStructure ( getCms ( ) ) ; } String encoding = CmsLocaleManager . getResourceEncoding ( getCms ( ) , file ) ; String oldContent = new String ( file . getContents ( ) , encoding ) ; String newContent = oldContent . replaceAll ( sourceModulePath , targetModulePath ) ; Matcher matcher = Pattern . compile ( CmsUUID . UUID_REGEX ) . matcher ( newContent ) ; newContent = matcher . replaceAll ( "" ) ; newContent = newContent . replaceAll ( "<uuid></uuid>" , "" ) ; if ( ! oldContent . equals ( newContent ) ) { file . setContents ( newContent . getBytes ( encoding ) ) ; if ( ! resource . getRootPath ( ) . startsWith ( CmsWorkplace . VFS_PATH_SYSTEM ) ) { if ( lockResource ( getCms ( ) , resource ) ) { getCms ( ) . writeFile ( file ) ; } } else { getCms ( ) . writeFile ( file ) ; } } } } |
public class ClusterMemoryManager { /** * RemoteNodeMemory as we don ' t need to POST anything . */
private synchronized MemoryPoolAssignmentsRequest updateAssignments ( Iterable < QueryExecution > queries ) { } } | ClusterMemoryPool reservedPool = pools . get ( RESERVED_POOL ) ; ClusterMemoryPool generalPool = pools . get ( GENERAL_POOL ) ; verify ( generalPool != null , "generalPool is null" ) ; verify ( reservedPool != null , "reservedPool is null" ) ; long version = memoryPoolAssignmentsVersion . incrementAndGet ( ) ; // Check that all previous assignments have propagated to the visible nodes . This doesn ' t account for temporary network issues ,
// and is more of a safety check than a guarantee
if ( allAssignmentsHavePropagated ( queries ) ) { if ( reservedPool . getAssignedQueries ( ) == 0 && generalPool . getBlockedNodes ( ) > 0 ) { QueryExecution biggestQuery = null ; long maxMemory = - 1 ; for ( QueryExecution queryExecution : queries ) { if ( resourceOvercommit ( queryExecution . getSession ( ) ) ) { // Don ' t promote queries that requested resource overcommit to the reserved pool ,
// since their memory usage is unbounded .
continue ; } long bytesUsed = getQueryMemoryReservation ( queryExecution ) ; if ( bytesUsed > maxMemory ) { biggestQuery = queryExecution ; maxMemory = bytesUsed ; } } if ( biggestQuery != null ) { log . info ( "Moving query %s to the reserved pool" , biggestQuery . getQueryId ( ) ) ; biggestQuery . setMemoryPool ( new VersionedMemoryPoolId ( RESERVED_POOL , version ) ) ; } } } ImmutableList . Builder < MemoryPoolAssignment > assignments = ImmutableList . builder ( ) ; for ( QueryExecution queryExecution : queries ) { assignments . add ( new MemoryPoolAssignment ( queryExecution . getQueryId ( ) , queryExecution . getMemoryPool ( ) . getId ( ) ) ) ; } return new MemoryPoolAssignmentsRequest ( coordinatorId , version , assignments . build ( ) ) ; |
public class ModifiedButNotUsed { /** * Whether this is a chain of method invocations terminating in a new proto or collection builder . */
private static boolean newFluentChain ( ExpressionTree tree , VisitorState state ) { } } | while ( tree instanceof MethodInvocationTree && FLUENT_CHAIN . matches ( tree , state ) ) { tree = getReceiver ( tree ) ; } return tree != null && FLUENT_CONSTRUCTOR . matches ( tree , state ) ; |
public class MultipartContent { /** * Configures a field part with the given field name and value ( of the specified content type ) .
* @ param fieldName the field name
* @ param contentType the value content type
* @ param value the value
* @ return a reference to this { @ link MultipartContent } instance */
public MultipartContent part ( String fieldName , String contentType , String value ) { } } | return part ( fieldName , null , contentType , value ) ; |
public class transformaction { /** * Use this API to update transformaction . */
public static base_response update ( nitro_service client , transformaction resource ) throws Exception { } } | transformaction updateresource = new transformaction ( ) ; updateresource . name = resource . name ; updateresource . priority = resource . priority ; updateresource . requrlfrom = resource . requrlfrom ; updateresource . requrlinto = resource . requrlinto ; updateresource . resurlfrom = resource . resurlfrom ; updateresource . resurlinto = resource . resurlinto ; updateresource . cookiedomainfrom = resource . cookiedomainfrom ; updateresource . cookiedomaininto = resource . cookiedomaininto ; updateresource . state = resource . state ; updateresource . comment = resource . comment ; return updateresource . update_resource ( client ) ; |
public class Preconditions { /** * Checks the truth of the given expression and throws a customized
* { @ link IllegalStateException } if it is false . Intended for doing validation in methods
* involving the state of the calling instance , but not involving parameters of the calling
* method , e . g . :
* < blockquote > < pre >
* public void unlock ( ) {
* Preconditions . checkState ( locked ,
* " Must be locked to be unlocked . Most recent lock : % s " ,
* mostRecentLock ) ;
* < / pre > < / blockquote >
* @ param expression the precondition to check involving the state of the calling instance
* @ param messageFormat a { @ link Formatter format } string for the detail message to be used in
* the event that an exception is thrown .
* @ param messageArgs the arguments referenced by the format specifiers in the
* { @ code messageFormat }
* @ throws IllegalStateException if { @ code expression } is false */
public static void checkState ( boolean expression , String messageFormat , Object ... messageArgs ) { } } | if ( ! expression ) { throw new IllegalStateException ( format ( messageFormat , messageArgs ) ) ; } |
public class HttpResourceConnection { /** * Determines if the HTTP method GET or HEAD should be used to check the
* timestamp on external resources .
* @ return true if configured to use HEAD requests */
private boolean isQuickQuery ( ) { } } | boolean quickQuery ; try { quickQuery = settings . getBoolean ( Settings . KEYS . DOWNLOADER_QUICK_QUERY_TIMESTAMP , true ) ; } catch ( InvalidSettingException e ) { if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( "Invalid settings : {}" , e . getMessage ( ) , e ) ; } quickQuery = true ; } return quickQuery ; |
public class AzureDNS { /** * Returns an ArrayList for a single item , needed because the Azure API works in ArrayList , not List ( ! ! ) so we can ' t just use
* { @ link java . util . Arrays # asList ( Object [ ] ) }
* @ param item
* @ param < T >
* @ return */
private < T > ArrayList < T > list ( T item ) { } } | return new ArrayList < T > ( Collections . singletonList ( item ) ) ; |
public class Entity { /** * indexed setter for entity _ mentions - sets an indexed value -
* @ generated
* @ param i index in the array to set
* @ param v value to set into the array */
public void setEntity_mentions ( int i , EntityMention v ) { } } | if ( Entity_Type . featOkTst && ( ( Entity_Type ) jcasType ) . casFeat_entity_mentions == null ) jcasType . jcas . throwFeatMissing ( "entity_mentions" , "de.julielab.jules.types.ace.Entity" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Entity_Type ) jcasType ) . casFeatCode_entity_mentions ) , i ) ; jcasType . ll_cas . ll_setRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Entity_Type ) jcasType ) . casFeatCode_entity_mentions ) , i , jcasType . ll_cas . ll_getFSRef ( v ) ) ; |
public class GroupDiscussInterface { /** * Get a list of topics from a group .
* @ param groupId
* Unique identifier of a group returns a list of topics for a given group { @ link Group } .
* @ param perPage
* Number of records per page .
* @ param page
* Result - section .
* @ return A group topic list
* @ throws FlickrException
* @ see < a href = " http : / / www . flickr . com / services / api / flickr . groups . discuss . topics . getList . html " > API Documentation < / a > */
public TopicList < Topic > getTopicsList ( String groupId , int perPage , int page ) throws FlickrException { } } | TopicList < Topic > topicList = new TopicList < Topic > ( ) ; Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_TOPICS_GET_LIST ) ; parameters . put ( "group_id" , groupId ) ; if ( perPage > 0 ) { parameters . put ( "per_page" , "" + perPage ) ; } if ( page > 0 ) { parameters . put ( "page" , "" + page ) ; } Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Element topicElements = response . getPayload ( ) ; topicList . setPage ( topicElements . getAttribute ( "page" ) ) ; topicList . setPages ( topicElements . getAttribute ( "pages" ) ) ; topicList . setPerPage ( topicElements . getAttribute ( "perpage" ) ) ; topicList . setTotal ( topicElements . getAttribute ( "total" ) ) ; topicList . setGroupId ( topicElements . getAttribute ( "group_id" ) ) ; topicList . setIconServer ( Integer . parseInt ( topicElements . getAttribute ( "iconserver" ) ) ) ; topicList . setIconFarm ( Integer . parseInt ( topicElements . getAttribute ( "iconfarm" ) ) ) ; topicList . setName ( topicElements . getAttribute ( "name" ) ) ; topicList . setMembers ( Integer . parseInt ( topicElements . getAttribute ( "members" ) ) ) ; topicList . setPrivacy ( Integer . parseInt ( topicElements . getAttribute ( "privacy" ) ) ) ; topicList . setLanguage ( topicElements . getAttribute ( "lang" ) ) ; topicList . setIsPoolModerated ( "1" . equals ( topicElements . getAttribute ( "ispoolmoderated" ) ) ) ; NodeList topicNodes = topicElements . getElementsByTagName ( "topic" ) ; for ( int i = 0 ; i < topicNodes . getLength ( ) ; i ++ ) { Element element = ( Element ) topicNodes . item ( i ) ; topicList . add ( parseTopic ( element ) ) ; } return topicList ; |
public class ResourceBundleMessageResolver { /** * メッセージソースを削除する 。
* @ param resourceBundle 削除対象のメッセージソース
* @ return 登録されているメッセージソースがある場合はtrueを返します 。
* @ throws NullPointerException resourceBundle is null . */
public boolean removeResourceBundle ( final ResourceBundle resourceBundle ) { } } | Objects . requireNonNull ( resourceBundle , "resourceBundle should not be null." ) ; if ( ! messageBundles . contains ( resourceBundle ) ) { return false ; } messageBundles . remove ( resourceBundle ) ; return true ; |
public class OmsDrainDir { /** * Calculates new drainage directions
* @ throws Exception */
@ Execute public void process ( ) throws Exception { } } | if ( ! concatOr ( outFlow == null , doReset ) ) { return ; } checkNull ( inFlow , inPit ) ; RegionMap regionMap = CoverageUtilities . getRegionParamsFromGridCoverage ( inPit ) ; cols = regionMap . getCols ( ) ; rows = regionMap . getRows ( ) ; xRes = regionMap . getXres ( ) ; yRes = regionMap . getYres ( ) ; dxySqrt = Math . sqrt ( xRes * xRes + yRes * yRes ) ; RenderedImage pitfillerRI = inPit . getRenderedImage ( ) ; WritableRaster pitfillerWR = CoverageUtilities . renderedImage2DoubleWritableRaster ( pitfillerRI , true ) ; RenderedImage flowRI = inFlow . getRenderedImage ( ) ; WritableRaster flowWR = CoverageUtilities . renderedImage2ShortWritableRaster ( flowRI , true ) ; RandomIter pitRandomIter = RandomIterFactory . create ( pitfillerWR , null ) ; // create new matrix
double [ ] orderedelev = new double [ cols * rows ] ; int [ ] indexes = new int [ cols * rows ] ; int nelev = 0 ; for ( int r = 0 ; r < rows ; r ++ ) { if ( pm . isCanceled ( ) ) { return ; } for ( int c = 0 ; c < cols ; c ++ ) { double pitValue = pitRandomIter . getSampleDouble ( c , r , 0 ) ; int pos = ( r * cols ) + c ; orderedelev [ pos ] = pitValue ; indexes [ pos ] = pos + 1 ; if ( ! isNovalue ( pitValue ) ) { nelev = nelev + 1 ; } } } QuickSortAlgorithm t = new QuickSortAlgorithm ( pm ) ; t . sort ( orderedelev , indexes ) ; pm . message ( msg . message ( "draindir.initializematrix" ) ) ; // Initialize new RasterData and set value
WritableRaster tcaWR = CoverageUtilities . createWritableRaster ( cols , rows , Integer . class , null , HMConstants . intNovalue ) ; WritableRaster dirWR = CoverageUtilities . createWritableRaster ( cols , rows , Short . class , null , HMConstants . shortNovalue ) ; // it contains the analyzed cells
WritableRaster deviationsWR = CoverageUtilities . createWritableRaster ( cols , rows , Double . class , null , null ) ; BitMatrix analizedMatrix = new BitMatrix ( cols , rows ) ; if ( doLad ) { orlandiniD8LAD ( indexes , deviationsWR , analizedMatrix , pitfillerWR , flowWR , tcaWR , dirWR , nelev ) ; } else { orlandiniD8LTD ( indexes , deviationsWR , analizedMatrix , pitfillerWR , flowWR , tcaWR , dirWR , nelev ) ; if ( pm . isCanceled ( ) ) { return ; } // only if required executes this method
if ( inFlownet != null ) { newDirections ( pitfillerWR , dirWR ) ; } } if ( pm . isCanceled ( ) ) { return ; } outFlow = CoverageUtilities . buildCoverage ( "draindir" , dirWR , regionMap , inPit . getCoordinateReferenceSystem ( ) ) ; outTca = CoverageUtilities . buildCoverage ( "tca" , tcaWR , regionMap , inPit . getCoordinateReferenceSystem ( ) ) ; |
public class HttpFields { /** * Remove a field .
* @ param name the field to remove
* @ return the header that was removed */
public HttpField remove ( HttpHeader name ) { } } | HttpField removed = null ; for ( int i = _size ; i -- > 0 ; ) { HttpField f = _fields [ i ] ; if ( f . getHeader ( ) == name ) { removed = f ; System . arraycopy ( _fields , i + 1 , _fields , i , -- _size - i ) ; } } return removed ; |
public class NFRule { /** * This function parses the rule ' s rule descriptor ( i . e . , the base
* value and / or other tokens that precede the rule ' s rule text
* in the description ) and sets the rule ' s base value , radix , and
* exponent according to the descriptor . ( If the description doesn ' t
* include a rule descriptor , then this function sets everything to
* default values and the rule set sets the rule ' s real base value ) .
* @ param description The rule ' s description
* @ return If " description " included a rule descriptor , this is
* " description " with the descriptor and any trailing whitespace
* stripped off . Otherwise ; it ' s " descriptor " unchanged . */
private String parseRuleDescriptor ( String description ) { } } | String descriptor ; // the description consists of a rule descriptor and a rule body ,
// separated by a colon . The rule descriptor is optional . If
// it ' s omitted , just set the base value to 0.
int p = description . indexOf ( ":" ) ; if ( p != - 1 ) { // copy the descriptor out into its own string and strip it ,
// along with any trailing whitespace , out of the original
// description
descriptor = description . substring ( 0 , p ) ; ++ p ; while ( p < description . length ( ) && PatternProps . isWhiteSpace ( description . charAt ( p ) ) ) { ++ p ; } description = description . substring ( p ) ; // check first to see if the rule descriptor matches the token
// for one of the special rules . If it does , set the base
// value to the correct identifier value
int descriptorLength = descriptor . length ( ) ; char firstChar = descriptor . charAt ( 0 ) ; char lastChar = descriptor . charAt ( descriptorLength - 1 ) ; if ( firstChar >= '0' && firstChar <= '9' && lastChar != 'x' ) { // if the rule descriptor begins with a digit , it ' s a descriptor
// for a normal rule
long tempValue = 0 ; char c = 0 ; p = 0 ; // begin parsing the descriptor : copy digits
// into " tempValue " , skip periods , commas , and spaces ,
// stop on a slash or > sign ( or at the end of the string ) ,
// and throw an exception on any other character
while ( p < descriptorLength ) { c = descriptor . charAt ( p ) ; if ( c >= '0' && c <= '9' ) { tempValue = tempValue * 10 + ( c - '0' ) ; } else if ( c == '/' || c == '>' ) { break ; } else if ( ! PatternProps . isWhiteSpace ( c ) && c != ',' && c != '.' ) { throw new IllegalArgumentException ( "Illegal character " + c + " in rule descriptor" ) ; } ++ p ; } // Set the rule ' s base value according to what we parsed
setBaseValue ( tempValue ) ; // if we stopped the previous loop on a slash , we ' re
// now parsing the rule ' s radix . Again , accumulate digits
// in tempValue , skip punctuation , stop on a > mark , and
// throw an exception on anything else
if ( c == '/' ) { tempValue = 0 ; ++ p ; while ( p < descriptorLength ) { c = descriptor . charAt ( p ) ; if ( c >= '0' && c <= '9' ) { tempValue = tempValue * 10 + ( c - '0' ) ; } else if ( c == '>' ) { break ; } else if ( ! PatternProps . isWhiteSpace ( c ) && c != ',' && c != '.' ) { throw new IllegalArgumentException ( "Illegal character " + c + " in rule descriptor" ) ; } ++ p ; } // tempValue now contains the rule ' s radix . Set it
// accordingly , and recalculate the rule ' s exponent
radix = ( int ) tempValue ; if ( radix == 0 ) { throw new IllegalArgumentException ( "Rule can't have radix of 0" ) ; } exponent = expectedExponent ( ) ; } // if we stopped the previous loop on a > sign , then continue
// for as long as we still see > signs . For each one ,
// decrement the exponent ( unless the exponent is already 0 ) .
// If we see another character before reaching the end of
// the descriptor , that ' s also a syntax error .
if ( c == '>' ) { while ( p < descriptorLength ) { c = descriptor . charAt ( p ) ; if ( c == '>' && exponent > 0 ) { -- exponent ; } else { throw new IllegalArgumentException ( "Illegal character in rule descriptor" ) ; } ++ p ; } } } else if ( descriptor . equals ( "-x" ) ) { setBaseValue ( NEGATIVE_NUMBER_RULE ) ; } else if ( descriptorLength == 3 ) { if ( firstChar == '0' && lastChar == 'x' ) { setBaseValue ( PROPER_FRACTION_RULE ) ; decimalPoint = descriptor . charAt ( 1 ) ; } else if ( firstChar == 'x' && lastChar == 'x' ) { setBaseValue ( IMPROPER_FRACTION_RULE ) ; decimalPoint = descriptor . charAt ( 1 ) ; } else if ( firstChar == 'x' && lastChar == '0' ) { setBaseValue ( MASTER_RULE ) ; decimalPoint = descriptor . charAt ( 1 ) ; } else if ( descriptor . equals ( "NaN" ) ) { setBaseValue ( NAN_RULE ) ; } else if ( descriptor . equals ( "Inf" ) ) { setBaseValue ( INFINITY_RULE ) ; } } } // else use the default base value for now .
// finally , if the rule body begins with an apostrophe , strip it off
// ( this is generally used to put whitespace at the beginning of
// a rule ' s rule text )
if ( description . length ( ) > 0 && description . charAt ( 0 ) == '\'' ) { description = description . substring ( 1 ) ; } // return the description with all the stuff we ' ve just waded through
// stripped off the front . It now contains just the rule body .
return description ; |
public class DiscordWebSocketAdapter { /** * Registers all handlers . */
private void registerHandlers ( ) { } } | // general
addHandler ( new ReadyHandler ( api ) ) ; addHandler ( new ResumedHandler ( api ) ) ; // server
addHandler ( new GuildBanAddHandler ( api ) ) ; addHandler ( new GuildBanRemoveHandler ( api ) ) ; addHandler ( new GuildCreateHandler ( api ) ) ; addHandler ( new GuildDeleteHandler ( api ) ) ; addHandler ( new GuildMembersChunkHandler ( api ) ) ; addHandler ( new GuildMemberAddHandler ( api ) ) ; addHandler ( new GuildMemberRemoveHandler ( api ) ) ; addHandler ( new GuildMemberUpdateHandler ( api ) ) ; addHandler ( new GuildUpdateHandler ( api ) ) ; addHandler ( new VoiceStateUpdateHandler ( api ) ) ; // role
addHandler ( new GuildRoleCreateHandler ( api ) ) ; addHandler ( new GuildRoleDeleteHandler ( api ) ) ; addHandler ( new GuildRoleUpdateHandler ( api ) ) ; // emoji
addHandler ( new GuildEmojisUpdateHandler ( api ) ) ; // channel
addHandler ( new ChannelCreateHandler ( api ) ) ; addHandler ( new ChannelDeleteHandler ( api ) ) ; addHandler ( new ChannelPinsUpdateHandler ( api ) ) ; addHandler ( new ChannelUpdateHandler ( api ) ) ; addHandler ( new WebhooksUpdateHandler ( api ) ) ; // user
addHandler ( new PresencesReplaceHandler ( api ) ) ; addHandler ( new PresenceUpdateHandler ( api ) ) ; addHandler ( new TypingStartHandler ( api ) ) ; addHandler ( new UserUpdateHandler ( api ) ) ; // message
addHandler ( new MessageCreateHandler ( api ) ) ; addHandler ( new MessageDeleteBulkHandler ( api ) ) ; addHandler ( new MessageDeleteHandler ( api ) ) ; addHandler ( new MessageUpdateHandler ( api ) ) ; // reaction
addHandler ( new MessageReactionAddHandler ( api ) ) ; addHandler ( new MessageReactionRemoveAllHandler ( api ) ) ; addHandler ( new MessageReactionRemoveHandler ( api ) ) ; |
public class BoundablePair { /** * Computes the distance between the { @ link org . locationtech . jts . index . strtree . Boundable } s in this pair .
* The boundables are either composites or leaves .
* If either is composite , the distance is computed as the minimum distance
* between the bounds .
* If both are leaves , the distance is computed by { @ link # itemDistance ( org . locationtech . jts . index . strtree . ItemBoundable , org . locationtech . jts . index . strtree . ItemBoundable ) } .
* @ return */
private double distance ( ) { } } | // if items , compute exact distance
if ( isLeaves ( ) ) { return itemDistance . distance ( ( ItemBoundable ) boundable1 , ( ItemBoundable ) boundable2 ) ; } // otherwise compute distance between bounds of boundables
return ( ( Envelope ) boundable1 . getBounds ( ) ) . distance ( ( ( Envelope ) boundable2 . getBounds ( ) ) ) ; |
public class MediaClient { /** * List all your pipelines .
* @ param request The request object containing all options for listing all pipelines .
* @ return The list of all your pipelines */
public ListPipelinesResponse listPipelines ( ListPipelinesRequest request ) { } } | checkNotNull ( request , "The parameter request should NOT be null." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . GET , request , PIPELINE ) ; return invokeHttpClient ( internalRequest , ListPipelinesResponse . class ) ; |
public class DockerUtils { /** * Parse imageTag and get the relative path of the pushed image .
* example : url : 8081 / image : version to image / version .
* @ param imageTag
* @ return */
public static String getImagePath ( String imageTag ) { } } | int indexOfSlash = imageTag . indexOf ( "/" ) ; int indexOfLastColon = imageTag . lastIndexOf ( ":" ) ; String imageName ; String imageVersion ; if ( indexOfLastColon < 0 || indexOfLastColon < indexOfSlash ) { imageName = imageTag . substring ( indexOfSlash + 1 ) ; imageVersion = "latest" ; } else { imageName = imageTag . substring ( indexOfSlash + 1 , indexOfLastColon ) ; imageVersion = imageTag . substring ( indexOfLastColon + 1 ) ; } return imageName + "/" + imageVersion ; |
public class NetworkServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . admanager . axis . v201902 . NetworkServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201902 . NetworkServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201902 . NetworkServiceSoapBindingStub ( new java . net . URL ( NetworkServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getNetworkServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class ResourcePathNode { /** * { @ link Collection # add ( Object ) Adds } all { @ link ResourcePathNode } s of this path from the { @ link # getRoot ( ) root }
* down to { @ code this } node .
* @ param nodes is the { @ link Collection } where to { @ link Collection # add ( Object ) add } the nodes . */
public void collectFromRoot ( Collection < ResourcePathNode < D > > nodes ) { } } | if ( this . parent != null ) { this . parent . collectFromRoot ( nodes ) ; } nodes . add ( this ) ; |
public class AbstractExcelWriter { /** * 写多个数据
* @ param datas 被写入的数据
* @ return this
* @ throws WriteExcelException 校验异常 */
public AbstractExcelWriter < T > write ( List < T > datas ) throws WriteExcelException { } } | this . initWrite ( datas ) ; this . validateDataPerWrite ( ) ; for ( T data : this . datas ) { this . writePerData ( data ) ; } this . datas . clear ( ) ; return this ; |
public class ConcurrentReferenceHashMap { /** * Removes the key ( and its corresponding value ) from this map .
* This method does nothing if the key is not in the map .
* @ param key the key that needs to be removed
* @ return the previous value associated with < tt > key < / tt > , or
* < tt > null < / tt > if there was no mapping for < tt > key < / tt >
* @ throws NullPointerException if the specified key is null */
public V remove ( Object key ) { } } | int hash = hashOf ( key ) ; return segmentFor ( hash ) . remove ( key , hash , null , false ) ; |
public class FilterMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Filter filter , ProtocolMarshaller protocolMarshaller ) { } } | if ( filter == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( filter . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( filter . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CommerceAvailabilityEstimateLocalServiceWrapper { /** * Returns the commerce availability estimate with the primary key .
* @ param commerceAvailabilityEstimateId the primary key of the commerce availability estimate
* @ return the commerce availability estimate
* @ throws PortalException if a commerce availability estimate with the primary key could not be found */
@ Override public com . liferay . commerce . model . CommerceAvailabilityEstimate getCommerceAvailabilityEstimate ( long commerceAvailabilityEstimateId ) throws com . liferay . portal . kernel . exception . PortalException { } } | return _commerceAvailabilityEstimateLocalService . getCommerceAvailabilityEstimate ( commerceAvailabilityEstimateId ) ; |
public class WeiXinPayService { /** * 回调支付接口返回的结果
* & lt ; p & gt ;
* & lt ; xml & gt ;
* & lt ; appid & gt ; & lt ; ! [ CDATA [ wx4ad12c89818dd981 ] ] & gt ; & lt ; / appid & gt ;
* & lt ; attach & gt ; & lt ; ! [ CDATA [ 10000070334 ] ] & gt ; & lt ; / attach & gt ;
* & lt ; bank _ type & gt ; & lt ; ! [ CDATA [ ICBC _ DEBIT ] ] & gt ; & lt ; / bank _ type & gt ;
* & lt ; cash _ fee & gt ; & lt ; ! [ CDATA [ 10 ] ] & gt ; & lt ; / cash _ fee & gt ;
* & lt ; fee _ type & gt ; & lt ; ! [ CDATA [ CNY ] ] & gt ; & lt ; / fee _ type & gt ;
* & lt ; is _ subscribe & gt ; & lt ; ! [ CDATA [ Y ] ] & gt ; & lt ; / is _ subscribe & gt ;
* & lt ; mch _ id & gt ; & lt ; ! [ CDATA [ 1241384602 ] ] & gt ; & lt ; / mch _ id & gt ;
* & lt ; nonce _ str & gt ; & lt ; ! [ CDATA [ 14d69ac6d6525f27dc9bcbebc ] ] & gt ; & lt ; / nonce _ str & gt ;
* & lt ; openid & gt ; & lt ; ! [ CDATA [ ojEVbsyDUzGqlgX3eDgmAMaUDucA ] ] & gt ; & lt ; / openid & gt ;
* & lt ; out _ trade _ no & gt ; & lt ; ! [ CDATA [ 1000072334 ] ] & gt ; & lt ; / out _ trade _ no & gt ;
* & lt ; result _ code & gt ; & lt ; ! [ CDATA [ SUCCESS ] ] & gt ; & lt ; / result _ code & gt ;
* & lt ; return _ code & gt ; & lt ; ! [ CDATA [ SUCCESS ] ] & gt ; & lt ; / return _ code & gt ;
* & lt ; sign & gt ; & lt ; ! [ CDATA [ 60D95E25EA9C4F54BD1020952303C4E2 ] ] & gt ; & lt ; / sign & gt ;
* & lt ; time _ end & gt ; & lt ; ! [ CDATA [ 20150519085546 ] ] & gt ; & lt ; / time _ end & gt ;
* & lt ; total _ fee & gt ; 10 & lt ; / total _ fee & gt ;
* & lt ; trade _ type & gt ; & lt ; ! [ CDATA [ JSAPI ] ] & gt ; & lt ; / trade _ type & gt ;
* & lt ; transaction _ id & gt ; & lt ; ! [ CDATA [ 1009630061201505190139511926 ] ] & gt ; & lt ; / transaction _ id & gt ;
* & lt ; / xml & gt ;
* @ param request PayRequest
* @ return PayQueryResponse */
@ Override public PayQueryResponse query ( final PayRequest request ) { } } | request . checkVaild ( ) ; final PayQueryResponse result = new PayQueryResponse ( ) ; try { final WeixinPayElement element = elements . get ( request . getAppid ( ) ) ; if ( element == null ) return result . retcode ( RETPAY_CONF_ERROR ) ; final Map < String , String > map = new TreeMap < > ( ) ; map . put ( "appid" , element . appid ) ; map . put ( "mch_id" , element . merchno ) ; map . put ( "out_trade_no" , request . getPayno ( ) ) ; map . put ( "nonce_str" , Long . toHexString ( System . currentTimeMillis ( ) ) + Long . toHexString ( System . nanoTime ( ) ) ) ; map . put ( "sign" , createSign ( element , map ) ) ; final String responseText = Utility . postHttpContent ( "https://api.mch.weixin.qq.com/pay/orderquery" , formatMapToXML ( map ) ) ; result . setResponsetext ( responseText ) ; final Map < String , String > resultmap = formatXMLToMap ( responseText ) ; result . setResult ( resultmap ) ; String state = resultmap . getOrDefault ( "trade_state" , "" ) ; if ( state . isEmpty ( ) && "SUCCESS" . equals ( resultmap . get ( "result_code" ) ) && Long . parseLong ( resultmap . get ( "total_fee" ) ) > 0 ) { state = "SUCCESS" ; } if ( ! checkSign ( element , resultmap ) ) return result . retcode ( RETPAY_FALSIFY_ERROR ) ; if ( state . isEmpty ( ) ) logger . warning ( "weixin.pay.query = " + resultmap ) ; // trade _ state 支付状态 : SUCCESS — 支付成功 REFUND — 转入退款 NOTPAY — 未支付 CLOSED — 已关闭 REVOKED — 已撤销 ( 刷卡支付 ) USERPAYING - - 用户支付中 PAYERROR - - 支付失败 ( 其他原因 , 如银行返回失败 )
short paystatus = PAYSTATUS_PAYNO ; switch ( state ) { case "SUCCESS" : paystatus = PAYSTATUS_PAYOK ; break ; case "NOTPAY" : paystatus = PAYSTATUS_UNPAY ; break ; case "CLOSED" : paystatus = PAYSTATUS_CLOSED ; break ; case "REVOKED" : paystatus = PAYSTATUS_CANCELED ; break ; case "USERPAYING" : paystatus = PAYSTATUS_PAYING ; break ; case "PAYERROR" : paystatus = PAYSTATUS_PAYNO ; break ; } result . setPaystatus ( paystatus ) ; result . setThirdpayno ( resultmap . getOrDefault ( "transaction_id" , "" ) ) ; result . setPayedmoney ( Long . parseLong ( resultmap . getOrDefault ( "total_fee" , "0" ) ) ) ; } catch ( Exception e ) { result . setRetcode ( RETPAY_PAY_ERROR ) ; logger . log ( Level . WARNING , "query_pay_error req=" + request + ", resp=" + result . responsetext , e ) ; } return result ; |
public class CookieConverter { /** * Converts a cookie string from a http header value into a Cookie object
* @ param cookieString The string to convert
* @ return The Cookie representation of the given String */
private Cookie convertCookieString ( String cookieString ) { } } | Cookie cookie = new Cookie ( getCookieParam ( NAME , cookieString ) , getCookieParam ( VALUE , cookieString ) ) ; if ( cookieString . contains ( COMMENT ) ) { cookie . setComment ( getCookieParam ( COMMENT , cookieString ) ) ; } if ( cookieString . contains ( PATH ) ) { cookie . setPath ( getCookieParam ( PATH , cookieString ) ) ; } if ( cookieString . contains ( DOMAIN ) ) { cookie . setDomain ( getCookieParam ( DOMAIN , cookieString ) ) ; } if ( cookieString . contains ( MAX_AGE ) ) { cookie . setMaxAge ( Integer . valueOf ( getCookieParam ( MAX_AGE , cookieString ) ) ) ; } if ( cookieString . contains ( SECURE ) ) { cookie . setSecure ( Boolean . valueOf ( getCookieParam ( SECURE , cookieString ) ) ) ; } if ( cookieString . contains ( VERSION ) ) { cookie . setVersion ( Integer . valueOf ( getCookieParam ( VERSION , cookieString ) ) ) ; } if ( cookieString . contains ( HTTP_ONLY ) ) { cookie . setHttpOnly ( Boolean . valueOf ( getCookieParam ( HTTP_ONLY , cookieString ) ) ) ; } return cookie ; |
public class AbstractResource { /** * Copies properties .
* @ param dest The object to which the properties will be copied .
* @ param source The object whose properties are copied
* @ throws WebApplicationException Throws exception if beanutils encounter a problem . */
protected void copyProperties ( Object dest , Object source ) { } } | try { BeanUtils . copyProperties ( dest , source ) ; } catch ( Exception e ) { String errorMessage = MessageFormat . format ( "M:{0};;E:{1}" , e . getCause ( ) . getMessage ( ) , e . toString ( ) ) ; throw new WebApplicationException ( errorMessage , Status . BAD_REQUEST ) ; } |
public class CmsDialog { /** * Builds the html for the button row under the dialog content area , including buttons . < p >
* @ param buttons array of constants of which buttons to include in the row
* @ param attributes array of Strings for additional button attributes
* @ return the html for the button row under the dialog content area , including buttons */
public String dialogButtons ( int [ ] buttons , String [ ] attributes ) { } } | StringBuffer result = new StringBuffer ( 256 ) ; result . append ( dialogButtonRow ( HTML_START ) ) ; for ( int i = 0 ; i < buttons . length ; i ++ ) { dialogButtonsHtml ( result , buttons [ i ] , attributes [ i ] ) ; } result . append ( dialogButtonRow ( HTML_END ) ) ; return result . toString ( ) ; |
public class PyFunctionExprBuilder { /** * Returns a valid Python function call as a String . */
public String build ( ) { } } | StringBuilder sb = new StringBuilder ( funcName + "(" ) ; // Join args and kwargs into simple strings .
String args = argList . stream ( ) . map ( PyExpr :: getText ) . filter ( Objects :: nonNull ) . collect ( Collectors . joining ( ", " ) ) ; String kwargs = kwargMap . entrySet ( ) . stream ( ) . map ( entry -> entry . getKey ( ) + "=" + entry . getValue ( ) . getText ( ) ) . filter ( Objects :: nonNull ) . collect ( Collectors . joining ( ", " ) ) ; // Strip empty strings .
args = Strings . emptyToNull ( args ) ; kwargs = Strings . emptyToNull ( kwargs ) ; // Join all pieces together .
Joiner . on ( ", " ) . skipNulls ( ) . appendTo ( sb , args , kwargs , unpackedKwargs ) ; sb . append ( ")" ) ; return sb . toString ( ) ; |
public class BsonReader { /** * Read the binary BSON representation from supplied input stream and construct the { @ link Document } representation .
* @ param stream the input stream ; may not be null
* @ return the in - memory { @ link Document } representation
* @ throws IOException if there was a problem reading from the stream */
public Document read ( InputStream stream ) throws IOException { } } | // Create an object so that this reader is thread safe . . .
DocumentValueFactory valueFactory = VALUE_FACTORY ; Reader reader = new Reader ( new BsonDataInput ( new DataInputStream ( stream ) ) , valueFactory ) ; reader . startDocument ( ) ; return reader . endDocument ( ) ; |
public class ScriptFreeTLV { /** * Validates a single JSP page .
* @ param prefix the namespace prefix specified by the page for the
* custom tag library being validated .
* @ param uri the URI specified by the page for the TLD of the
* custom tag library being validated .
* @ param page a wrapper around the XML representation of the page
* being validated .
* @ return null , if the page is valid ; otherwise , a ValidationMessage [ ]
* containing one or more messages indicating why the page is not valid . */
@ Override public ValidationMessage [ ] validate ( String prefix , String uri , PageData page ) { } } | try { MyContentHandler handler = new MyContentHandler ( ) ; parser . parse ( page , handler ) ; return handler . reportResults ( ) ; } catch ( ParserConfigurationException e ) { return vmFromString ( e . toString ( ) ) ; } catch ( SAXException e ) { return vmFromString ( e . toString ( ) ) ; } catch ( IOException e ) { return vmFromString ( e . toString ( ) ) ; } |
public class OsmMapShapeConverter { /** * Convert a { @ link PolyhedralSurface } to a { @ link Polygon }
* @ param polyhedralSurface
* @ return */
public List < org . osmdroid . views . overlay . Polygon > toPolygons ( PolyhedralSurface polyhedralSurface ) { } } | List < org . osmdroid . views . overlay . Polygon > polygons = new ArrayList < > ( ) ; for ( Polygon polygon : polyhedralSurface . getPolygons ( ) ) { org . osmdroid . views . overlay . Polygon polygon1 = toPolygon ( polygon ) ; polygons . add ( polygon1 ) ; } return polygons ; |
public class NetworkConfig { /** * Returns the admin user associated with the specified organization
* @ param orgName The name of the organization
* @ return The admin user details
* @ throws NetworkConfigurationException */
public UserInfo getPeerAdmin ( String orgName ) throws NetworkConfigurationException { } } | OrgInfo org = getOrganizationInfo ( orgName ) ; if ( org == null ) { throw new NetworkConfigurationException ( format ( "Organization %s is not defined" , orgName ) ) ; } return org . getPeerAdmin ( ) ; |
public class NodeSet { /** * Returns the next node in the set and advances the position of the
* iterator in the set . After a NodeIterator is created , the first call
* to nextNode ( ) returns the first node in the set .
* @ return The next < code > Node < / code > in the set being iterated over , or
* < code > null < / code > if there are no more members in that set .
* @ throws DOMException
* INVALID _ STATE _ ERR : Raised if this method is called after the
* < code > detach < / code > method was invoked . */
public Node nextNode ( ) throws DOMException { } } | if ( ( m_next ) < this . size ( ) ) { Node next = this . elementAt ( m_next ) ; m_next ++ ; return next ; } else return null ; |
public class HashtableOnDisk { /** * Internal method to do default rehash of doubling */
private void rehash ( ) throws IOException , EOFException , FileManagerException , ClassNotFoundException , HashtableOnDiskException { } } | int size = ( header . tablesize ( ) * 2 ) + 1 ; if ( this . tempTableSize > size ) { doRehash ( this . tempTableSize ) ; this . tempTableSize = 0 ; } else { doRehash ( size ) ; } |
public class JobID { /** * Compare JobIds by first jtIdentifiers , then by job numbers */
@ Override public int compareTo ( ID o ) { } } | JobID that = ( JobID ) o ; int jtComp = this . jtIdentifier . compareTo ( that . jtIdentifier ) ; if ( jtComp == 0 ) { return this . id - that . id ; } else return jtComp ; |
public class Type { /** * This is the getter method for instance variable
* { @ link # allowedEventTypes } .
* @ return value of instance variable { @ link # allowedEventTypes }
* @ see # allowedEventTypes
* @ throws CacheReloadException on error */
public Set < Type > getAllowedEventTypes ( ) throws CacheReloadException { } } | final Set < Type > ret = new HashSet < > ( ) ; for ( final Long id : this . allowedEventTypes ) { ret . add ( Type . get ( id ) ) ; } return Collections . unmodifiableSet ( ret ) ; |
public class DrizzleStatement { /** * Moves to this < code > Statement < / code > object ' s next result , returns < code > true < / code > if it is a
* < code > ResultSet < / code > object , and implicitly closes any current < code > ResultSet < / code > object ( s ) obtained with
* the method < code > getResultSet < / code > .
* < P > There are no more results when the following is true : < PRE > / / stmt is a Statement object
* ( ( stmt . getMoreResults ( ) = = false ) & & ( stmt . getUpdateCount ( ) = = - 1 ) ) < / PRE >
* @ return < code > true < / code > if the next result is a < code > ResultSet < / code > object ; < code > false < / code > if it is an
* update count or there are no more results
* @ throws java . sql . SQLException if a database access error occurs or this method is called on a closed
* < code > Statement < / code >
* @ see # execute */
public boolean getMoreResults ( ) throws SQLException { } } | startTimer ( ) ; try { if ( queryResult != null ) { queryResult . close ( ) ; } queryResult = protocol . getMoreResults ( ) ; if ( queryResult == null ) { this . resultSet = null ; setUpdateCount ( - 1 ) ; return false ; } warningsCleared = false ; if ( queryResult . getResultSetType ( ) == ResultSetType . SELECT ) { setResultSet ( new DrizzleResultSet ( queryResult , this , getProtocol ( ) ) ) ; return true ; } setUpdateCount ( ( int ) ( ( ModifyQueryResult ) queryResult ) . getUpdateCount ( ) ) ; return false ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; } finally { stopTimer ( ) ; } |
public class TrOSGiLogForwarder { /** * Check to see if this exception should be squelched . */
private boolean shouldBeLogged ( Throwable t , OSGiTraceComponent tc , ExtendedLogEntry logEntry ) { } } | while ( t != null ) { if ( t instanceof IllegalStateException && COULD_NOT_OBTAIN_LOCK_EXCEPTION . equals ( t . getMessage ( ) ) ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "DS could not obtain a lock. This is not an error, but may indicate high system load" , getObjects ( logEntry , false ) ) ; } return false ; } t = t . getCause ( ) ; } return true ; |
public class lbgroup_lbvserver_binding { /** * Use this API to fetch lbgroup _ lbvserver _ binding resources of given name . */
public static lbgroup_lbvserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } } | lbgroup_lbvserver_binding obj = new lbgroup_lbvserver_binding ( ) ; obj . set_name ( name ) ; lbgroup_lbvserver_binding response [ ] = ( lbgroup_lbvserver_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class FlyWeightFlatXmlDataSetBuilder { /** * Sets the flat XML input source from which the { @ link FlyWeightFlatXmlDataSet } is to be built
* @ param xmlInputFile The flat XML input as { @ link File }
* @ return The created { @ link FlyWeightFlatXmlDataSet }
* @ throws DataSetException */
public FlyWeightFlatXmlDataSet build ( File xmlInputFile ) throws MalformedURLException , DataSetException { } } | URL xmlInputUrl = xmlInputFile . toURI ( ) . toURL ( ) ; InputSource inputSource = createInputSourceFromUrl ( xmlInputUrl ) ; return buildInternal ( inputSource ) ; |
public class ACL { /** * Checks if the current security principal has this permission .
* @ return false
* if the user doesn ' t have the permission . */
public final boolean hasPermission ( @ Nonnull Permission p ) { } } | Authentication a = Jenkins . getAuthentication ( ) ; if ( a == SYSTEM ) { return true ; } return hasPermission ( a , p ) ; |
public class JadeComponentProvider { /** * 查找并返回一个目录或jar包下符合Jade规范的DAO接口 。
* 所返回的每一个BeanDefinition代表一个符合规范的DAO接口 , 我们可以通过
* { @ link BeanDefinition # getBeanClassName ( ) } 得到对应的DAO接口的类名
* 所返回的BeanDefinition代表的是一个接口 , 不能直接注册到Spring容器中 , 必须先做额外的转化 ! */
public Set < BeanDefinition > findCandidateComponents ( String uriPrefix ) { } } | if ( ! uriPrefix . endsWith ( "/" ) ) { uriPrefix = uriPrefix + "/" ; } Set < BeanDefinition > candidates = new LinkedHashSet < BeanDefinition > ( ) ; try { String packageSearchPath = uriPrefix + this . resourcePattern ; boolean traceEnabled = logger . isDebugEnabled ( ) ; boolean debugEnabled = logger . isDebugEnabled ( ) ; Resource [ ] resources = this . resourcePatternResolver . getResources ( packageSearchPath ) ; if ( debugEnabled ) { logger . debug ( "[jade/find] find " + resources . length + " resources for " + packageSearchPath ) ; } for ( int i = 0 ; i < resources . length ; i ++ ) { Resource resource = resources [ i ] ; if ( traceEnabled ) { logger . trace ( "[jade/find] scanning " + resource ) ; } // resourcePatternResolver . getResources出来的classPathResources , metadataReader对其进行getInputStream的时候为什么返回null呢 ?
// 不得不做一个exists判断
if ( ! resource . exists ( ) ) { if ( debugEnabled ) { logger . debug ( "Ignored because not exists:" + resource ) ; } } else if ( resource . isReadable ( ) ) { MetadataReader metadataReader = metadataReaderFactory . getMetadataReader ( resource ) ; if ( isCandidateComponent ( metadataReader ) ) { ScannedGenericBeanDefinition sbd = new ScannedGenericBeanDefinition ( metadataReader ) ; sbd . setResource ( resource ) ; sbd . setSource ( resource ) ; if ( sbd . getMetadata ( ) . isInterface ( ) && sbd . getMetadata ( ) . isIndependent ( ) ) { if ( debugEnabled ) { logger . debug ( "Identified candidate component class: " + resource ) ; } candidates . add ( sbd ) ; } else { if ( traceEnabled ) { logger . trace ( "Ignored because not a interface top-level class: " + resource ) ; } } } else { if ( traceEnabled ) { logger . trace ( "Ignored because not matching any filter: " + resource ) ; } } } else { if ( traceEnabled ) { logger . trace ( "Ignored because not readable: " + resource ) ; } } } } catch ( IOException ex ) { throw new BeanDefinitionStoreException ( "I/O failure during jade scanning" , ex ) ; } return candidates ; |
public class NodeSelectorMarkupHandler { /** * Markup block and level handling */
private void checkSizeOfMarkupBlocksStructure ( final int markupLevel ) { } } | if ( markupLevel >= this . markupBlocks . length ) { final int newLen = Math . max ( markupLevel + 1 , this . markupBlocks . length + MARKUP_BLOCKS_LEN ) ; final int [ ] newMarkupBlocks = new int [ newLen ] ; Arrays . fill ( newMarkupBlocks , 0 ) ; System . arraycopy ( this . markupBlocks , 0 , newMarkupBlocks , 0 , this . markupBlocks . length ) ; this . markupBlocks = newMarkupBlocks ; } |
public class GenericUtils { /** * Simple utility to get the bytes from a String but handle a
* null String as well .
* @ param input
* @ return byte [ ] */
static public byte [ ] getBytes ( String input ) { } } | if ( null != input ) { int length = input . length ( ) ; byte [ ] output = new byte [ length ] ; for ( int i = 0 ; i < length ; i ++ ) { output [ i ] = ( byte ) input . charAt ( i ) ; } return output ; } return null ; |
public class CSSErrorStrategy { /** * Consumes token until lexer state is balanced and
* token from follow is matched . Matched token is also consumed */
protected void consumeUntilGreedy ( Parser recognizer , IntervalSet follow ) { } } | logger . trace ( "CONSUME UNTIL GREEDY {}" , follow . toString ( ) ) ; for ( int ttype = recognizer . getInputStream ( ) . LA ( 1 ) ; ttype != - 1 && ! follow . contains ( ttype ) ; ttype = recognizer . getInputStream ( ) . LA ( 1 ) ) { Token t = recognizer . consume ( ) ; logger . trace ( "Skipped greedy: {}" , t . getText ( ) ) ; } Token t = recognizer . consume ( ) ; logger . trace ( "Skipped greedy: {} follow: {}" , t . getText ( ) , follow ) ; |
public class ModbusMaster { /** * This function code is used to write a single output to either ON or OFF in a remote device .
* The requested ON / OFF state is specified by a constant in the request data field . A value of
* TRUE requests the output to be ON . A value of FALSE requests it to be OFF .
* The Request PDU specifies the address of the coil to be forced . Coils are addressed starting
* at zero . Therefore coil numbered 1 is addressed as 0.
* @ param serverAddress a slave address
* @ param startAddress the address of the coil to be forced
* @ param flag the request data field
* @ throws ModbusProtocolException if modbus - exception is received
* @ throws ModbusNumberException if response is invalid
* @ throws ModbusIOException if remote slave is unavailable */
final public void writeSingleCoil ( int serverAddress , int startAddress , boolean flag ) throws ModbusProtocolException , ModbusNumberException , ModbusIOException { } } | processRequest ( ModbusRequestBuilder . getInstance ( ) . buildWriteSingleCoil ( serverAddress , startAddress , flag ) ) ; |
public class JMSDestinationDefinitionProcessor { /** * ( non - Javadoc )
* @ see com . ibm . wsspi . injectionengine . InjectionProcessor # createInjectionBinding ( java . lang . annotation . Annotation , java . lang . Class , java . lang . reflect . Member , java . lang . String ) */
@ Override public InjectionBinding < JMSDestinationDefinition > createInjectionBinding ( JMSDestinationDefinition annotation , Class < ? > instanceClass , Member member , String jndiName ) throws InjectionException { } } | InjectionBinding < JMSDestinationDefinition > injectionBinding = new JMSDestinationDefinitionInjectionBinding ( jndiName , ivNameSpaceConfig ) ; injectionBinding . merge ( annotation , instanceClass , null ) ; return injectionBinding ; |
public class CmsProject { /** * Checks if the full resource name ( including the site root ) of a resource matches
* any of the project resources of a project . < p >
* @ param projectResources a List of project resources as Strings
* @ param resource the resource to check
* @ return true , if the resource is " inside " the project resources */
public static boolean isInsideProject ( List < String > projectResources , CmsResource resource ) { } } | String resourcename = resource . getRootPath ( ) ; return isInsideProject ( projectResources , resourcename ) ; |
public class StaticTypeCheckingVisitor { /** * In the case of a < em > Object . with { . . . } < / em > call , this method is supposed to retrieve
* the inferred closure return type .
* @ param callArguments the argument list from the < em > Object # with ( Closure ) < / em > call , ie . a single closure expression
* @ return the inferred closure return type or < em > null < / em > */
protected ClassNode getInferredReturnTypeFromWithClosureArgument ( Expression callArguments ) { } } | if ( ! ( callArguments instanceof ArgumentListExpression ) ) return null ; ArgumentListExpression argList = ( ArgumentListExpression ) callArguments ; ClosureExpression closure = ( ClosureExpression ) argList . getExpression ( 0 ) ; visitClosureExpression ( closure ) ; if ( getInferredReturnType ( closure ) != null ) { return getInferredReturnType ( closure ) ; } return null ; |
public class AccountHeader { /** * add the values to the bundle for saveInstanceState
* @ param savedInstanceState
* @ return */
public Bundle saveInstanceState ( Bundle savedInstanceState ) { } } | if ( savedInstanceState != null ) { savedInstanceState . putInt ( BUNDLE_SELECTION_HEADER , mAccountHeaderBuilder . getCurrentSelection ( ) ) ; } return savedInstanceState ; |
public class DataSynchronizer { /** * A document that is paused no longer has remote updates applied to it .
* Any local updates to this document cause it to be resumed . An example of pausing a document
* is when a conflict is being resolved for that document and the handler throws an exception .
* This method allows you to resume sync for a document .
* @ param namespace namespace for the document
* @ param documentId the id of the document to resume syncing
* @ return true if successfully resumed , false if the document
* could not be found or there was an error resuming */
boolean resumeSyncForDocument ( final MongoNamespace namespace , final BsonValue documentId ) { } } | if ( namespace == null || documentId == null ) { return false ; } final NamespaceSynchronizationConfig namespaceSynchronizationConfig ; final CoreDocumentSynchronizationConfig config ; if ( ( namespaceSynchronizationConfig = syncConfig . getNamespaceConfig ( namespace ) ) == null || ( config = namespaceSynchronizationConfig . getSynchronizedDocument ( documentId ) ) == null ) { return false ; } config . setPaused ( false ) ; return ! config . isPaused ( ) ; |
public class ServerFactory { /** * 确定下Server的host和port
* @ param serverConfig 服务器配置 */
private static void resolveServerConfig ( ServerConfig serverConfig ) { } } | // 绑定到指定网卡 或全部网卡
String boundHost = serverConfig . getBoundHost ( ) ; if ( boundHost == null ) { String host = serverConfig . getHost ( ) ; if ( StringUtils . isBlank ( host ) ) { host = SystemInfo . getLocalHost ( ) ; serverConfig . setHost ( host ) ; // windows绑定到0.0.0.0的某个端口以后 , 其它进程还能绑定到该端口
boundHost = SystemInfo . isWindows ( ) ? host : NetUtils . ANYHOST ; } else { boundHost = host ; } serverConfig . setBoundHost ( boundHost ) ; } // 绑定的端口
if ( serverConfig . isAdaptivePort ( ) ) { int oriPort = serverConfig . getPort ( ) ; int port = NetUtils . getAvailablePort ( boundHost , oriPort , RpcConfigs . getIntValue ( RpcOptions . SERVER_PORT_END ) ) ; if ( port != oriPort ) { if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( "Changed port from {} to {} because the config port is disabled" , oriPort , port ) ; } serverConfig . setPort ( port ) ; } } |
public class CloudMe { /** * Gets the id of root folder .
* If the root id is already known it is returned else it is fetched then cached .
* @ return the id of the root folder */
private String getRootId ( ) { } } | if ( rootId == null ) { rootId = getLogin ( ) . getElementsByTagName ( "home" ) . item ( 0 ) . getTextContent ( ) ; } return rootId ; |
public class WordNet { /** * Gets senses .
* @ param surfaceForm the surface form
* @ param pos the part of speech tag
* @ param language the language
* @ return the senses */
public List < Sense > getSenses ( @ NonNull String surfaceForm , @ NonNull POS pos , @ NonNull Language language ) { } } | return getSenses ( new SenseEnum ( - 1 , pos . getUniversalTag ( ) , language ) , Lemmatizers . getLemmatizer ( language ) . allPossibleLemmas ( surfaceForm , pos ) ) ; |
public class ReportConverter { /** * Loads properties from the specified InputStream . The properties are of
* the form < code > key = value < / code > , one property per line . It may be not
* encode as ' ISO - 8859-1 ' . The { @ code Properties } file is interpreted
* according to the following rules :
* < ul >
* < li > Empty lines are ignored . < / li >
* < li > Lines starting with either a " # " or a " ! " are comment lines and are
* ignored . < / li >
* < li > A backslash at the end of the line escapes the following newline
* character ( " \ r " , " \ n " , " \ r \ n " ) . If there ' s a whitespace after the
* backslash it will just escape that whitespace instead of concatenating
* the lines . This does not apply to comment lines . < / li >
* < li > A property line consists of the key , the space between the key and
* the value , and the value . The key goes up to the first whitespace , " = " or
* " : " that is not escaped . The space between the key and the value contains
* either one whitespace , one " = " or one " : " and any number of additional
* whitespaces before and after that character . The value starts with the
* first character after the space between the key and the value . < / li >
* < li > Following escape sequences are recognized : " \ " , " \ \ " , " \ r " , " \ n " ,
* " \ ! " , " \ # " , " \ t " , " \ b " , " \ f " , and " & # 92 ; uXXXX " ( unicode character ) . < / li >
* < / ul >
* @ param reader Reader from which to read the properties of this CrashReportData .
* @ return CrashReportData read from the supplied Reader .
* @ throws java . io . IOException if the properties could not be read .
* @ since 1.6 */
@ NonNull private synchronized CrashReportData legacyLoad ( @ NonNull Reader reader ) throws IOException { } } | int mode = NONE , unicode = 0 , count = 0 ; char nextChar ; char [ ] buf = new char [ 40 ] ; int offset = 0 , keyLength = - 1 , intVal ; boolean firstChar = true ; final CrashReportData crashData = new CrashReportData ( ) ; final BufferedReader br = new BufferedReader ( reader , ACRAConstants . DEFAULT_BUFFER_SIZE_IN_BYTES ) ; try { while ( true ) { intVal = br . read ( ) ; if ( intVal == - 1 ) { break ; } nextChar = ( char ) intVal ; if ( offset == buf . length ) { final char [ ] newBuf = new char [ buf . length * 2 ] ; System . arraycopy ( buf , 0 , newBuf , 0 , offset ) ; buf = newBuf ; } if ( mode == UNICODE ) { final int digit = Character . digit ( nextChar , 16 ) ; if ( digit >= 0 ) { unicode = ( unicode << 4 ) + digit ; if ( ++ count < 4 ) { continue ; } } else if ( count <= 4 ) { // luni . 09 = Invalid Unicode sequence : illegal character
throw new IllegalArgumentException ( "luni.09" ) ; } mode = NONE ; buf [ offset ++ ] = ( char ) unicode ; if ( nextChar != '\n' && nextChar != '\u0085' ) { continue ; } } if ( mode == SLASH ) { mode = NONE ; switch ( nextChar ) { case '\r' : mode = CONTINUE ; // Look for a following \ n
continue ; case '\u0085' : case '\n' : mode = IGNORE ; // Ignore whitespace on the next line
continue ; case 'b' : nextChar = '\b' ; break ; case 'f' : nextChar = '\f' ; break ; case 'n' : nextChar = '\n' ; break ; case 'r' : nextChar = '\r' ; break ; case 't' : nextChar = '\t' ; break ; case 'u' : mode = UNICODE ; unicode = count = 0 ; continue ; } } else { switch ( nextChar ) { case '#' : case '!' : if ( firstChar ) { while ( true ) { intVal = br . read ( ) ; if ( intVal == - 1 ) { break ; } nextChar = ( char ) intVal ; // & 0xff
// not
// required
if ( nextChar == '\r' || nextChar == '\n' || nextChar == '\u0085' ) { break ; } } continue ; } break ; case '\n' : if ( mode == CONTINUE ) { // Part of a \ r \ n sequence
mode = IGNORE ; // Ignore whitespace on the next line
continue ; } // fall into the next case
case '\u0085' : case '\r' : mode = NONE ; firstChar = true ; if ( offset > 0 || ( offset == 0 && keyLength == 0 ) ) { if ( keyLength == - 1 ) { keyLength = offset ; } final String temp = new String ( buf , 0 , offset ) ; putKeyValue ( crashData , temp . substring ( 0 , keyLength ) , temp . substring ( keyLength ) ) ; } keyLength = - 1 ; offset = 0 ; continue ; case '\\' : if ( mode == KEY_DONE ) { keyLength = offset ; } mode = SLASH ; continue ; case ':' : case '=' : if ( keyLength == - 1 ) { // if parsing the key
mode = NONE ; keyLength = offset ; continue ; } break ; } if ( Character . isWhitespace ( nextChar ) ) { if ( mode == CONTINUE ) { mode = IGNORE ; } // if key length = = 0 or value length = = 0
if ( offset == 0 || offset == keyLength || mode == IGNORE ) { continue ; } if ( keyLength == - 1 ) { // if parsing the key
mode = KEY_DONE ; continue ; } } if ( mode == IGNORE || mode == CONTINUE ) { mode = NONE ; } } firstChar = false ; if ( mode == KEY_DONE ) { keyLength = offset ; mode = NONE ; } buf [ offset ++ ] = nextChar ; } if ( mode == UNICODE && count <= 4 ) { // luni . 08 = Invalid Unicode sequence : expected format \ \ uxxxx
throw new IllegalArgumentException ( "luni.08" ) ; } if ( keyLength == - 1 && offset > 0 ) { keyLength = offset ; } if ( keyLength >= 0 ) { final String temp = new String ( buf , 0 , offset ) ; String value = temp . substring ( keyLength ) ; if ( mode == SLASH ) { value += "\u0000" ; } putKeyValue ( crashData , temp . substring ( 0 , keyLength ) , value ) ; } IOUtils . safeClose ( reader ) ; return crashData ; } finally { IOUtils . safeClose ( br ) ; } |
public class CheckParameterizables { /** * Check for a default constructor . */
private State checkDefaultConstructor ( Class < ? > cls , State state ) throws NoClassDefFoundError { } } | try { cls . getConstructor ( ) ; return State . DEFAULT_INSTANTIABLE ; } catch ( Exception e ) { // do nothing .
} return state ; |
public class RedoLog { /** * Returns a List with all { @ link MultiIndex . Action } instances in the
* redo log .
* @ return an List with all { @ link MultiIndex . Action } instances in the
* redo log .
* @ throws IOException if an error occurs while reading from the redo log . */
List < MultiIndex . Action > getActions ( ) throws IOException { } } | final List < MultiIndex . Action > actions = new ArrayList < MultiIndex . Action > ( ) ; read ( new ActionCollector ( ) { public void collect ( MultiIndex . Action a ) { actions . add ( a ) ; } } ) ; return actions ; |
public class Voikko { /** * Split the given natural language text into a list of Token objects .
* @ param text
* @ return list of tokens */
public synchronized List < Token > tokens ( String text ) { } } | requireValidHandle ( ) ; List < Token > allTokens = new ArrayList < Token > ( ) ; int lastStart = 0 ; for ( int i = indexOfSpecialUnknown ( text , 0 ) ; i != - 1 ; i = indexOfSpecialUnknown ( text , i + 1 ) ) { allTokens . addAll ( tokensNonNull ( text . substring ( lastStart , i ) , lastStart ) ) ; allTokens . add ( new Token ( TokenType . UNKNOWN , Character . toString ( text . charAt ( i ) ) , i ) ) ; lastStart = i + 1 ; } allTokens . addAll ( tokensNonNull ( text . substring ( lastStart ) , lastStart ) ) ; return allTokens ; |
public class GanttProjectReader { /** * Read custom property definitions for resources .
* @ param gpResources GanttProject resources */
private void readResourceCustomPropertyDefinitions ( Resources gpResources ) { } } | CustomField field = m_projectFile . getCustomFields ( ) . getCustomField ( ResourceField . TEXT1 ) ; field . setAlias ( "Phone" ) ; for ( CustomPropertyDefinition definition : gpResources . getCustomPropertyDefinition ( ) ) { // Find the next available field of the correct type .
String type = definition . getType ( ) ; FieldType fieldType = RESOURCE_PROPERTY_TYPES . get ( type ) . getField ( ) ; // If we have run out of fields of the right type , try using a text field .
if ( fieldType == null ) { fieldType = RESOURCE_PROPERTY_TYPES . get ( "text" ) . getField ( ) ; } // If we actually have a field available , set the alias to match
// the name used in GanttProject .
if ( fieldType != null ) { field = m_projectFile . getCustomFields ( ) . getCustomField ( fieldType ) ; field . setAlias ( definition . getName ( ) ) ; String defaultValue = definition . getDefaultValue ( ) ; if ( defaultValue != null && defaultValue . isEmpty ( ) ) { defaultValue = null ; } m_resourcePropertyDefinitions . put ( definition . getId ( ) , new Pair < FieldType , String > ( fieldType , defaultValue ) ) ; } } |
public class DefaultFeature { /** * If a property with the specified name exists , it is removed from this feature . if the given propertyname
* is null , the call is ignored . Note that this method has no effect whatsoever regarding the geometry or id property .
* @ param propertyName the name of the property to wipe . */
public void wipeProperty ( String propertyName ) { } } | if ( propertyName != null ) { propertyNames . remove ( propertyName ) ; if ( properties . containsKey ( propertyName ) ) { properties . remove ( propertyName ) ; } } |
public class ShuffledSource { /** * { @ inheritDoc } */
@ Override public BoundedReader < T > createReader ( PipelineOptions options ) throws IOException { } } | return delegate . createReader ( options ) ; |
public class ResourcesInner { /** * Creates a resource .
* @ param resourceGroupName The name of the resource group for the resource . The name is case insensitive .
* @ param resourceProviderNamespace The namespace of the resource provider .
* @ param parentResourcePath The parent resource identity .
* @ param resourceType The resource type of the resource to create .
* @ param resourceName The name of the resource to create .
* @ param apiVersion The API version to use for the operation .
* @ param parameters Parameters for creating or updating the resource .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < GenericResourceInner > createOrUpdateAsync ( String resourceGroupName , String resourceProviderNamespace , String parentResourcePath , String resourceType , String resourceName , String apiVersion , GenericResourceInner parameters , final ServiceCallback < GenericResourceInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , apiVersion , parameters ) , serviceCallback ) ; |
public class FieldScopeUtil { /** * Formats { @ code fmt } with the field numbers , concatenated , if a descriptor is available to
* resolve them to field names . Otherwise it uses the raw integers .
* @ param fmt Format string that must contain exactly one ' % s ' and no other format parameters . */
private static String resolveFieldNumbers ( Optional < Descriptor > optDescriptor , String fmt , Iterable < Integer > fieldNumbers ) { } } | if ( optDescriptor . isPresent ( ) ) { Descriptor descriptor = optDescriptor . get ( ) ; List < String > strings = Lists . newArrayList ( ) ; for ( int fieldNumber : fieldNumbers ) { FieldDescriptor field = descriptor . findFieldByNumber ( fieldNumber ) ; strings . add ( field != null ? field . toString ( ) : String . format ( "%d (?)" , fieldNumber ) ) ; } return String . format ( fmt , join ( strings ) ) ; } else { return String . format ( fmt , join ( fieldNumbers ) ) ; } |
public class Minimizer { /** * This method performs the actual splitting of blocks , using the sub block information stored in each block
* object . */
private void updateBlocks ( ) { } } | for ( Block < S , L > block : splitBlocks ) { // Ignore blocks that have no elements in their sub blocks .
int inSubBlocks = block . getElementsInSubBlocks ( ) ; if ( inSubBlocks == 0 ) { continue ; } boolean blockRemains = ( inSubBlocks < block . size ( ) ) ; boolean reuseBlock = ! blockRemains ; List < UnorderedCollection < State < S , L > > > subBlocks = block . getSubBlocks ( ) ; // If there is only one sub block which contains all elements of
// the block , then no split needs to be performed .
if ( ! blockRemains && subBlocks . size ( ) == 1 ) { block . clearSubBlocks ( ) ; continue ; } Iterator < UnorderedCollection < State < S , L > > > subBlockIt = subBlocks . iterator ( ) ; if ( reuseBlock ) { UnorderedCollection < State < S , L > > first = subBlockIt . next ( ) ; block . getStates ( ) . swap ( first ) ; updateBlockReferences ( block ) ; } while ( subBlockIt . hasNext ( ) ) { UnorderedCollection < State < S , L > > subBlockStates = subBlockIt . next ( ) ; if ( blockRemains ) { for ( State < S , L > state : subBlockStates ) { block . removeState ( state . getBlockReference ( ) ) ; } } Block < S , L > subBlock = new Block < > ( numBlocks ++ , subBlockStates ) ; updateBlockReferences ( subBlock ) ; newBlocks . add ( subBlock ) ; addToPartition ( subBlock ) ; } newBlocks . add ( block ) ; block . clearSubBlocks ( ) ; // If the split block previously was in the queue , add all newly
// created blocks to the queue . Otherwise , it ' s enough to add
// all but the largest
if ( removeFromSplitterQueue ( block ) ) { addAllToSplitterQueue ( newBlocks ) ; } else { addAllButLargest ( newBlocks ) ; } newBlocks . clear ( ) ; } splitBlocks . clear ( ) ; |
public class DescribeDeliveryChannelsResult { /** * A list that contains the descriptions of the specified delivery channel .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDeliveryChannels ( java . util . Collection ) } or { @ link # withDeliveryChannels ( java . util . Collection ) } if you
* want to override the existing values .
* @ param deliveryChannels
* A list that contains the descriptions of the specified delivery channel .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeDeliveryChannelsResult withDeliveryChannels ( DeliveryChannel ... deliveryChannels ) { } } | if ( this . deliveryChannels == null ) { setDeliveryChannels ( new com . amazonaws . internal . SdkInternalList < DeliveryChannel > ( deliveryChannels . length ) ) ; } for ( DeliveryChannel ele : deliveryChannels ) { this . deliveryChannels . add ( ele ) ; } return this ; |
public class InlineMediaSource { /** * Get resource with media inline data ( nt : file node ) .
* @ param mediaRequest Media reference
* @ return Resource or null if not present */
private Resource getMediaInlineResource ( MediaRequest mediaRequest ) { } } | Resource resource = mediaRequest . getResource ( ) ; if ( resource == null ) { return null ; } // check if resource itself is a nt : file node
if ( JcrBinary . isNtFileOrResource ( resource ) ) { return resource ; } // check if child node exists which is a nt : file node
String refProperty = StringUtils . defaultString ( mediaRequest . getRefProperty ( ) , mediaHandlerConfig . getMediaInlineNodeName ( ) ) ; Resource mediaInlineResource = resource . getChild ( refProperty ) ; if ( JcrBinary . isNtFileOrResource ( mediaInlineResource ) ) { return mediaInlineResource ; } // not found
return null ; |
public class Environment { /** * Adds a set of Windows variables to a set of properties .
* @ return the environment properties
* @ throws EnvironmentException if an error occurs */
private static Properties getUnixShellVariables ( ) throws EnvironmentException { } } | Properties properties = new Properties ( ) ; String cmdExec = getUnixEnv ( ) ; int exitValue = readEnvironment ( cmdExec , properties ) ; // Check that we exited normally before returning an invalid output
if ( 0 != exitValue ) { throw new EnvironmentException ( "Environment process failed with non-zero exit code of " + exitValue ) ; } return properties ; |
public class DimensionSelectorUtils { /** * Generic implementation of { @ link DimensionSelector # makeValueMatcher ( Predicate ) } , uses { @ link
* DimensionSelector # getRow ( ) } of the given { @ link DimensionSelector } . " Lazy " DimensionSelectors could delegate
* { @ code makeValueMatcher ( ) } to this method , but encouraged to implement { @ code makeValueMatcher ( ) } themselves ,
* bypassing the { @ link IndexedInts } abstraction . */
public static ValueMatcher makeValueMatcherGeneric ( DimensionSelector selector , Predicate < String > predicate ) { } } | int cardinality = selector . getValueCardinality ( ) ; if ( cardinality >= 0 && selector . nameLookupPossibleInAdvance ( ) ) { return makeDictionaryEncodedValueMatcherGeneric ( selector , predicate ) ; } else { return makeNonDictionaryEncodedValueMatcherGeneric ( selector , predicate ) ; } |
public class JobMonitoringListener { /** * { @ inheritDoc } */
public void handleNotification ( Notification notification , Object handback ) { } } | if ( notification instanceof AttributeChangeNotification ) { AttributeChangeNotification attributeChangeNotification = ( AttributeChangeNotification ) notification ; JobReport jobReport = ( JobReport ) attributeChangeNotification . getNewValue ( ) ; onJobReportUpdate ( jobReport ) ; } if ( notification instanceof JMXConnectionNotification ) { JMXConnectionNotification jmxConnectionNotification = ( JMXConnectionNotification ) notification ; String type = jmxConnectionNotification . getType ( ) ; switch ( type ) { case JMXConnectionNotification . OPENED : onConnectionOpened ( ) ; break ; case JMXConnectionNotification . CLOSED : onConnectionClosed ( ) ; break ; default : break ; } } |
public class DirLock { /** * if the lock on the directory is stale , take ownership */
public static DirLock takeOwnershipIfStale ( FileSystem fs , Path dirToLock , int lockTimeoutSec ) { } } | Path dirLockFile = getDirLockFile ( dirToLock ) ; long now = System . currentTimeMillis ( ) ; long expiryTime = now - ( lockTimeoutSec * 1000 ) ; try { long modTime = fs . getFileStatus ( dirLockFile ) . getModificationTime ( ) ; if ( modTime <= expiryTime ) { return takeOwnership ( fs , dirLockFile ) ; } return null ; } catch ( IOException e ) { return null ; } |
public class Ed25519FieldElement { /** * Gets this field element to the power of $ ( 2 ^ { 252 } - 3 ) $ . This is a helper function for
* calculating the square root .
* TODO - CR BR : I think it makes sense to have a sqrt function .
* @ return This field element to the power of $ ( 2 ^ { 252 } - 3 ) $ . */
public FieldElement pow22523 ( ) { } } | FieldElement t0 , t1 , t2 ; // 2 = = 2 * 1
t0 = square ( ) ; // 4 = = 2 * 2
t1 = t0 . square ( ) ; // 8 = = 2 * 4
t1 = t1 . square ( ) ; // z9 = z1 * z8
t1 = multiply ( t1 ) ; // 11 = = 9 + 2
t0 = t0 . multiply ( t1 ) ; // 22 = = 2 * 11
t0 = t0 . square ( ) ; // 31 = = 22 + 9
t0 = t1 . multiply ( t0 ) ; // 2 ^ 6 - 2 ^ 1
t1 = t0 . square ( ) ; // 2 ^ 10 - 2 ^ 5
for ( int i = 1 ; i < 5 ; ++ i ) { t1 = t1 . square ( ) ; } // 2 ^ 10 - 2 ^ 0
t0 = t1 . multiply ( t0 ) ; // 2 ^ 11 - 2 ^ 1
t1 = t0 . square ( ) ; // 2 ^ 20 - 2 ^ 10
for ( int i = 1 ; i < 10 ; ++ i ) { t1 = t1 . square ( ) ; } // 2 ^ 20 - 2 ^ 0
t1 = t1 . multiply ( t0 ) ; // 2 ^ 21 - 2 ^ 1
t2 = t1 . square ( ) ; // 2 ^ 40 - 2 ^ 20
for ( int i = 1 ; i < 20 ; ++ i ) { t2 = t2 . square ( ) ; } // 2 ^ 40 - 2 ^ 0
t1 = t2 . multiply ( t1 ) ; // 2 ^ 41 - 2 ^ 1
t1 = t1 . square ( ) ; // 2 ^ 50 - 2 ^ 10
for ( int i = 1 ; i < 10 ; ++ i ) { t1 = t1 . square ( ) ; } // 2 ^ 50 - 2 ^ 0
t0 = t1 . multiply ( t0 ) ; // 2 ^ 51 - 2 ^ 1
t1 = t0 . square ( ) ; // 2 ^ 100 - 2 ^ 50
for ( int i = 1 ; i < 50 ; ++ i ) { t1 = t1 . square ( ) ; } // 2 ^ 100 - 2 ^ 0
t1 = t1 . multiply ( t0 ) ; // 2 ^ 101 - 2 ^ 1
t2 = t1 . square ( ) ; // 2 ^ 200 - 2 ^ 100
for ( int i = 1 ; i < 100 ; ++ i ) { t2 = t2 . square ( ) ; } // 2 ^ 200 - 2 ^ 0
t1 = t2 . multiply ( t1 ) ; // 2 ^ 201 - 2 ^ 1
t1 = t1 . square ( ) ; // 2 ^ 250 - 2 ^ 50
for ( int i = 1 ; i < 50 ; ++ i ) { t1 = t1 . square ( ) ; } // 2 ^ 250 - 2 ^ 0
t0 = t1 . multiply ( t0 ) ; // 2 ^ 251 - 2 ^ 1
t0 = t0 . square ( ) ; // 2 ^ 252 - 2 ^ 2
t0 = t0 . square ( ) ; // 2 ^ 252 - 3
return multiply ( t0 ) ; |
public class PdfBasicMetadata { /** * Check that given file is supported by this sequencer . */
public boolean check ( ) throws Exception { } } | try ( PDDocument document = PDDocument . load ( in ) ) { PDDocumentCatalog catalog = document . getDocumentCatalog ( ) ; PDFPageable pageable = new PDFPageable ( document ) ; PageFormat firstPage = pageable . getPageFormat ( 0 ) ; encrypted = document . isEncrypted ( ) ; pageCount = document . getNumberOfPages ( ) ; orientation = ORIENTATION_STRINGS [ firstPage . getOrientation ( ) ] ; version = String . valueOf ( document . getDocument ( ) . getVersion ( ) ) ; String catalogVersion = catalog . getVersion ( ) ; if ( catalogVersion != null && ! catalogVersion . isEmpty ( ) ) { // According to specs version saved here should be determining instead
// the version in header . It is barely used , though .
version = catalogVersion ; } if ( ! encrypted ) { PDDocumentInformation metadata = document . getDocumentInformation ( ) ; author = metadata . getAuthor ( ) ; creationDate = metadata . getCreationDate ( ) ; creator = metadata . getCreator ( ) ; keywords = metadata . getKeywords ( ) ; modificationDate = metadata . getModificationDate ( ) ; producer = metadata . getProducer ( ) ; subject = metadata . getSubject ( ) ; title = metadata . getTitle ( ) ; } // extract all attached files from all pages
int pageNumber = 0 ; for ( Object page : catalog . getPages ( ) ) { pageNumber += 1 ; PdfPageMetadata pageMetadata = new PdfPageMetadata ( ) ; pageMetadata . setPageNumber ( pageNumber ) ; for ( PDAnnotation annotation : ( ( PDPage ) page ) . getAnnotations ( ) ) { if ( annotation instanceof PDAnnotationFileAttachment ) { PdfAttachmentMetadata attachmentMetadata = new PdfAttachmentMetadata ( ) ; PDAnnotationFileAttachment fann = ( PDAnnotationFileAttachment ) annotation ; PDComplexFileSpecification fileSpec = ( PDComplexFileSpecification ) fann . getFile ( ) ; PDEmbeddedFile embeddedFile = fileSpec . getEmbeddedFile ( ) ; attachmentMetadata . setSubject ( fann . getSubject ( ) ) ; attachmentMetadata . setName ( fileSpec . getFilename ( ) ) ; attachmentMetadata . setCreationDate ( embeddedFile . getCreationDate ( ) ) ; attachmentMetadata . setModificationDate ( embeddedFile . getModDate ( ) ) ; attachmentMetadata . setMimeType ( embeddedFile . getSubtype ( ) ) ; attachmentMetadata . setData ( embeddedFile . toByteArray ( ) ) ; pageMetadata . addAttachment ( attachmentMetadata ) ; } } pages . add ( pageMetadata ) ; } return true ; } |
public class BaseConnectionSource { /** * Clear the connection that was previously saved .
* @ return True if the connection argument had been saved . */
protected boolean clearSpecial ( DatabaseConnection connection , Logger logger ) { } } | NestedConnection currentSaved = specialConnection . get ( ) ; boolean cleared = false ; if ( connection == null ) { // ignored
} else if ( currentSaved == null ) { logger . error ( "no connection has been saved when clear() called" ) ; } else if ( currentSaved . connection == connection ) { if ( currentSaved . decrementAndGet ( ) == 0 ) { // we only clear the connection if nested counter is 0
specialConnection . set ( null ) ; } cleared = true ; } else { logger . error ( "connection saved {} is not the one being cleared {}" , currentSaved . connection , connection ) ; } // release should then be called after clear
return cleared ; |
public class IOUtils { /** * Given a source File , return its direct descendants if the File is a directory . Otherwise return the File itself .
* @ param source File or folder to be examined
* @ return a File [ ] array containing the files inside this folder , or a size - 1 array containing the file itself . */
public static File [ ] filesContainedIn ( File source ) { } } | if ( source . isDirectory ( ) ) { return source . listFiles ( ) ; } else { return new File [ ] { source } ; } |
public class LinearScaling { /** * Make a linear scaling from a given minimum and maximum . The minimum will be
* mapped to zero , the maximum to one .
* @ param min Minimum
* @ param max Maximum
* @ return New linear scaling . */
public static LinearScaling fromMinMax ( double min , double max ) { } } | double zoom = 1.0 / ( max - min ) ; return new LinearScaling ( zoom , - min * zoom ) ; |
public class MicroserviceRestAdapter { /** * Standard behavior is to publish event fitting standard pattern of default event
* used in DependenciesWaitActivity ( Microservice Dependencies Wait ) */
protected void notifyServiceSummaryUpdate ( ServiceSummary serviceSummary ) throws ActivityException { } } | WorkflowServices wfs = ServiceLocator . getWorkflowServices ( ) ; try { wfs . notify ( "service-summary-update-" + getMasterRequestId ( ) , null , 2 ) ; } catch ( ServiceException e ) { throw new ActivityException ( "Cannot publish Service Summary update event" , e ) ; } |
public class ApolloCallTracker { /** * < p > Adds provided { @ link ApolloQueryCall } that is currently in progress . < / p >
* < p > < b > Note < / b > : This method needs to be called right before an apolloCall is executed . < / p > */
void registerQueryCall ( @ NotNull ApolloQueryCall apolloQueryCall ) { } } | checkNotNull ( apolloQueryCall , "apolloQueryCall == null" ) ; OperationName operationName = apolloQueryCall . operation ( ) . name ( ) ; registerCall ( activeQueryCalls , operationName , apolloQueryCall ) ; |
public class LogManager { /** * Returns the current LoggerContext .
* WARNING - The LoggerContext returned by this method may not be the LoggerContext used to create a Logger for the
* calling class .
* @ return The current LoggerContext . */
public static LoggerContext getContext ( ) { } } | try { return factory . getContext ( FQCN , null , null , true ) ; } catch ( final IllegalStateException ex ) { LOGGER . warn ( ex . getMessage ( ) + " Using SimpleLogger" ) ; return new SimpleLoggerContextFactory ( ) . getContext ( FQCN , null , null , true ) ; } |
public class RuleBasedNumberFormat { /** * Bottleneck through which all the public format ( ) methods
* that take a long pass . By the time we get here , we know
* which rule set we ' re using to do the formatting .
* @ param number The number to format
* @ param ruleSet The rule set to use to format the number
* @ return The text that resulted from formatting the number */
private String format ( long number , NFRuleSet ruleSet ) { } } | // all API format ( ) routines that take a double vector through
// here . We have these two identical functions - - one taking a
// double and one taking a long - - the couple digits of precision
// that long has but double doesn ' t ( both types are 8 bytes long ,
// but double has to borrow some of the mantissa bits to hold
// the exponent ) .
// Create an empty string buffer where the result will
// be built , and pass it to the rule set ( along with an insertion
// position of 0 and the number being formatted ) to the rule set
// for formatting
StringBuilder result = new StringBuilder ( ) ; if ( number == Long . MIN_VALUE ) { // We can ' t handle this value right now . Provide an accurate default value .
result . append ( getDecimalFormat ( ) . format ( Long . MIN_VALUE ) ) ; } else { ruleSet . format ( number , result , 0 , 0 ) ; } postProcess ( result , ruleSet ) ; return result . toString ( ) ; |
public class JKMessage { /** * Gets the .
* @ param key the key
* @ param params the params
* @ return the string */
public static String get ( String key , Object ... params ) { } } | return getInstance ( ) . getLabel ( key , params ) ; |
public class KinesisConfigUtil { /** * Validate configuration properties for { @ link FlinkKinesisConsumer } . */
public static void validateConsumerConfiguration ( Properties config ) { } } | checkNotNull ( config , "config can not be null" ) ; validateAwsConfiguration ( config ) ; if ( ! ( config . containsKey ( AWSConfigConstants . AWS_REGION ) ^ config . containsKey ( ConsumerConfigConstants . AWS_ENDPOINT ) ) ) { // per validation in AwsClientBuilder
throw new IllegalArgumentException ( String . format ( "For FlinkKinesisConsumer either AWS region ('%s') or AWS endpoint ('%s') must be set in the config." , AWSConfigConstants . AWS_REGION , AWSConfigConstants . AWS_ENDPOINT ) ) ; } if ( config . containsKey ( ConsumerConfigConstants . STREAM_INITIAL_POSITION ) ) { String initPosType = config . getProperty ( ConsumerConfigConstants . STREAM_INITIAL_POSITION ) ; // specified initial position in stream must be either LATEST , TRIM _ HORIZON or AT _ TIMESTAMP
try { InitialPosition . valueOf ( initPosType ) ; } catch ( IllegalArgumentException e ) { StringBuilder sb = new StringBuilder ( ) ; for ( InitialPosition pos : InitialPosition . values ( ) ) { sb . append ( pos . toString ( ) ) . append ( ", " ) ; } throw new IllegalArgumentException ( "Invalid initial position in stream set in config. Valid values are: " + sb . toString ( ) ) ; } // specified initial timestamp in stream when using AT _ TIMESTAMP
if ( InitialPosition . valueOf ( initPosType ) == InitialPosition . AT_TIMESTAMP ) { if ( ! config . containsKey ( ConsumerConfigConstants . STREAM_INITIAL_TIMESTAMP ) ) { throw new IllegalArgumentException ( "Please set value for initial timestamp ('" + ConsumerConfigConstants . STREAM_INITIAL_TIMESTAMP + "') when using AT_TIMESTAMP initial position." ) ; } validateOptionalDateProperty ( config , ConsumerConfigConstants . STREAM_INITIAL_TIMESTAMP , config . getProperty ( ConsumerConfigConstants . STREAM_TIMESTAMP_DATE_FORMAT , ConsumerConfigConstants . DEFAULT_STREAM_TIMESTAMP_DATE_FORMAT ) , "Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream. " + "Must be a valid format: yyyy-MM-dd'T'HH:mm:ss.SSSXXX or non-negative double value. For example, 2016-04-04T19:58:46.480-00:00 or 1459799926.480 ." ) ; } } validateOptionalPositiveIntProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_MAX , "Invalid value given for maximum records per getRecords shard operation. Must be a valid non-negative integer value." ) ; validateOptionalPositiveIntProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_RETRIES , "Invalid value given for maximum retry attempts for getRecords shard operation. Must be a valid non-negative integer value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_BACKOFF_BASE , "Invalid value given for get records operation base backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_BACKOFF_MAX , "Invalid value given for get records operation max backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveDoubleProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_BACKOFF_EXPONENTIAL_CONSTANT , "Invalid value given for get records operation backoff exponential constant. Must be a valid non-negative double value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_GETRECORDS_INTERVAL_MILLIS , "Invalid value given for getRecords sleep interval in milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveIntProperty ( config , ConsumerConfigConstants . SHARD_GETITERATOR_RETRIES , "Invalid value given for maximum retry attempts for getShardIterator shard operation. Must be a valid non-negative integer value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_GETITERATOR_BACKOFF_BASE , "Invalid value given for get shard iterator operation base backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_GETITERATOR_BACKOFF_MAX , "Invalid value given for get shard iterator operation max backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveDoubleProperty ( config , ConsumerConfigConstants . SHARD_GETITERATOR_BACKOFF_EXPONENTIAL_CONSTANT , "Invalid value given for get shard iterator operation backoff exponential constant. Must be a valid non-negative double value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . SHARD_DISCOVERY_INTERVAL_MILLIS , "Invalid value given for shard discovery sleep interval in milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . LIST_SHARDS_BACKOFF_BASE , "Invalid value given for list shards operation base backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveLongProperty ( config , ConsumerConfigConstants . LIST_SHARDS_BACKOFF_MAX , "Invalid value given for list shards operation max backoff milliseconds. Must be a valid non-negative long value." ) ; validateOptionalPositiveDoubleProperty ( config , ConsumerConfigConstants . LIST_SHARDS_BACKOFF_EXPONENTIAL_CONSTANT , "Invalid value given for list shards operation backoff exponential constant. Must be a valid non-negative double value." ) ; if ( config . containsKey ( ConsumerConfigConstants . SHARD_GETRECORDS_INTERVAL_MILLIS ) ) { checkArgument ( Long . parseLong ( config . getProperty ( ConsumerConfigConstants . SHARD_GETRECORDS_INTERVAL_MILLIS ) ) < ConsumerConfigConstants . MAX_SHARD_GETRECORDS_INTERVAL_MILLIS , "Invalid value given for getRecords sleep interval in milliseconds. Must be lower than " + ConsumerConfigConstants . MAX_SHARD_GETRECORDS_INTERVAL_MILLIS + " milliseconds." ) ; } |
public class WasbUnderFileSystem { /** * Prepares the configuration for this Wasb as an HDFS configuration .
* @ param conf the configuration for this UFS
* @ return the created configuration */
public static Configuration createConfiguration ( UnderFileSystemConfiguration conf ) { } } | Configuration wasbConf = HdfsUnderFileSystem . createConfiguration ( conf ) ; for ( Map . Entry < String , String > entry : conf . toMap ( ) . entrySet ( ) ) { String key = entry . getKey ( ) ; String value = entry . getValue ( ) ; if ( PropertyKey . Template . UNDERFS_AZURE_ACCOUNT_KEY . matches ( key ) ) { wasbConf . set ( key , value ) ; } } wasbConf . set ( "fs.AbstractFileSystem.wasb.impl" , "org.apache.hadoop.fs.azure.Wasb" ) ; wasbConf . set ( "fs.wasb.impl" , "org.apache.hadoop.fs.azure.NativeAzureFileSystem" ) ; return wasbConf ; |
public class DisplayUtil { /** * Returns the logical density of the device ' s display .
* This is a scaling factor for the density - independent pixel unit , where one DIP is one pixel
* on an approximately 160 dpi screen ( for example a 240x320 , 1.5 " x2 " screen ) , providing the
* baseline of the system ' s display . Thus on a 160dpi screen this density value will be 1 ; on a
* 120 dpi screen it would be . 75 ; etc .
* @ param context
* The context , which should be used , as an instance of the class { @ link Context } . The
* context may not be null
* @ return The logical density of the device ' s display as a { @ link Float } value */
public static float getDensity ( @ NonNull final Context context ) { } } | Condition . INSTANCE . ensureNotNull ( context , "The context may not be null" ) ; return context . getResources ( ) . getDisplayMetrics ( ) . density ; |
public class CeCPMain { /** * Aligns the structures , duplicating ca2 regardless of
* { @ link CECPParameters . getDuplicationHint ( ) param . getDuplicationHint } .
* @ param ca1
* @ param ca2
* @ param cpparams
* @ return
* @ throws StructureException */
private AFPChain alignRight ( Atom [ ] ca1 , Atom [ ] ca2 , CECPParameters cpparams ) throws StructureException { } } | long startTime = System . currentTimeMillis ( ) ; Atom [ ] ca2m = StructureTools . duplicateCA2 ( ca2 ) ; if ( debug ) { System . out . format ( "Duplicating ca2 took %s ms\n" , System . currentTimeMillis ( ) - startTime ) ; startTime = System . currentTimeMillis ( ) ; } // Do alignment
AFPChain afpChain = super . align ( ca1 , ca2m , params ) ; // since the process of creating ca2m strips the name info away , set it explicitely
try { afpChain . setName2 ( ca2 [ 0 ] . getGroup ( ) . getChain ( ) . getStructure ( ) . getName ( ) ) ; } catch ( Exception e ) { } if ( debug ) { System . out . format ( "Running %dx2*%d alignment took %s ms\n" , ca1 . length , ca2 . length , System . currentTimeMillis ( ) - startTime ) ; startTime = System . currentTimeMillis ( ) ; } afpChain = postProcessAlignment ( afpChain , ca1 , ca2m , calculator , cpparams ) ; if ( debug ) { System . out . format ( "Finding CP point took %s ms\n" , System . currentTimeMillis ( ) - startTime ) ; startTime = System . currentTimeMillis ( ) ; } return afpChain ; |
public class EditService { /** * Delete all enabled overrides for a client
* @ param profileId profile ID of teh client
* @ param client _ uuid UUID of teh client */
public void disableAll ( int profileId , String client_uuid ) { } } | PreparedStatement statement = null ; try ( Connection sqlConnection = sqlService . getConnection ( ) ) { statement = sqlConnection . prepareStatement ( "DELETE FROM " + Constants . DB_TABLE_ENABLED_OVERRIDE + " WHERE " + Constants . CLIENT_PROFILE_ID + " = ?" + " AND " + Constants . CLIENT_CLIENT_UUID + " =? " ) ; statement . setInt ( 1 , profileId ) ; statement . setString ( 2 , client_uuid ) ; statement . executeUpdate ( ) ; statement . close ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } finally { try { if ( statement != null ) { statement . close ( ) ; } } catch ( Exception e ) { } } |
public class DescribeConfigurationOptionsResult { /** * A list of < a > ConfigurationOptionDescription < / a > .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setOptions ( java . util . Collection ) } or { @ link # withOptions ( java . util . Collection ) } if you want to override
* the existing values .
* @ param options
* A list of < a > ConfigurationOptionDescription < / a > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeConfigurationOptionsResult withOptions ( ConfigurationOptionDescription ... options ) { } } | if ( this . options == null ) { setOptions ( new com . amazonaws . internal . SdkInternalList < ConfigurationOptionDescription > ( options . length ) ) ; } for ( ConfigurationOptionDescription ele : options ) { this . options . add ( ele ) ; } return this ; |
public class SARLAnnotationUtil { /** * Extract the string value of the given annotation , if it exists .
* @ param op the annotated element .
* @ param annotationType the type of the annotation to consider
* @ return the value of the annotation , or { @ code null } if no annotation or no
* value . */
public String findStringValue ( JvmAnnotationTarget op , Class < ? extends Annotation > annotationType ) { } } | final JvmAnnotationReference reference = this . lookup . findAnnotation ( op , annotationType ) ; if ( reference != null ) { return findStringValue ( reference ) ; } return null ; |
public class ClassInterpreter { /** * Parse the line passed , type check it and evaluate it as an expression in the initial context .
* @ param line
* A VDM expression .
* @ return The value of the expression .
* @ throws Exception
* Parser , type checking or runtime errors . */
@ Override public Value execute ( String line , DBGPReader dbgp ) throws Exception { } } | PExp expr = parseExpression ( line , getDefaultName ( ) ) ; Environment env = getGlobalEnvironment ( ) ; Environment created = new FlatCheckedEnvironment ( assistantFactory , createdDefinitions . asList ( ) , env , NameScope . NAMESANDSTATE ) ; typeCheck ( expr , created ) ; return execute ( expr , dbgp ) ; |
public class JGrassRasterMapReader { /** * Opens the raster map and does some first checking
* @ return true if everything went alright */
public boolean open ( ) { } } | boolean ok ; if ( mapPath != null ) { ok = reader . open ( mapPath ) ; } else if ( locationPath != null && mapsetName != null && mapName != null ) { ok = reader . open ( mapName , locationPath , mapsetName ) ; } else { return false ; } return ok ; |
public class Searches { /** * Searches the first matching element returning just element if found ,
* nothing otherwise .
* @ param < E > the element type parameter
* @ param array the array to be searched
* @ param predicate the predicate to be applied to each element
* @ return just the element found or nothing */
public static < E > Optional < E > searchFirst ( E [ ] array , Predicate < E > predicate ) { } } | final FilteringIterator < E > filtered = new FilteringIterator < E > ( new ArrayIterator < E > ( array ) , predicate ) ; return new MaybeFirstElement < E > ( ) . apply ( filtered ) ; |
public class DateTimes { /** * Converts a string in the form of { @ code yyyy - MM - dd ' T ' HH : mm : ss } to an API date time in the time
* zone supplied . */
public static DateTime toDateTime ( String dateTime , String timeZoneId ) { } } | return dateTimesHelper . toDateTime ( dateTime , timeZoneId ) ; |
public class ApiOvhEmaildomain { /** * Change mailbox password ( length : [ 9;30 ] , no space at begin and end , no accent )
* REST : POST / email / domain / { domain } / account / { accountName } / changePassword
* @ param password [ required ] New password
* @ param domain [ required ] Name of your domain name
* @ param accountName [ required ] Name of account */
public OvhTaskPop domain_account_accountName_changePassword_POST ( String domain , String accountName , String password ) throws IOException { } } | String qPath = "/email/domain/{domain}/account/{accountName}/changePassword" ; StringBuilder sb = path ( qPath , domain , accountName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "password" , password ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTaskPop . class ) ; |
public class PortletMarketplaceController { /** * Returns a set of MarketplacePortletDefinitions . Supply a user to limit the set to only
* portlets the user can use . If user is null , this will return all portlets . Setting user to
* null will superscede all other parameters .
* @ param user - non - null user to limit results by . This will filter results to only portlets
* that user can use .
* @ return a set of portlets filtered that user can use , and other parameters */
public Map < String , Set < ? > > getRegistry ( final IPerson user , final PortletRequest req ) { } } | Map < String , Set < ? > > registry = new TreeMap < String , Set < ? > > ( ) ; // Empty , or the set of categories that are permitted to
// be displayed in the Portlet Marketplace ( portlet )
final Set < PortletCategory > permittedCategories = getPermittedCategories ( req ) ; final Set < MarketplaceEntry > visiblePortlets = this . marketplaceService . browseableMarketplaceEntriesFor ( user , permittedCategories ) ; final Set < PortletCategory > visibleCategories = this . marketplaceService . browseableNonEmptyPortletCategoriesFor ( user , permittedCategories ) ; final Set < MarketplaceEntry > featuredPortlets = this . marketplaceService . featuredEntriesForUser ( user , permittedCategories ) ; registry . put ( "portlets" , visiblePortlets ) ; registry . put ( "categories" , visibleCategories ) ; registry . put ( "featured" , featuredPortlets ) ; return registry ; |
public class AstFactory { /** * Given an iterable like { @ code rhs } in
* < pre > { @ code
* for await ( lhs of rhs ) { block ( ) ; }
* } < / pre >
* < p > returns a call node for the { @ code rhs } wrapped in a { @ code $ jscomp . makeAsyncIterator } call .
* < pre > { @ code
* $ jscomp . makeAsyncIterator ( rhs )
* } < / pre > */
Node createJSCompMakeAsyncIteratorCall ( Node iterable , Scope scope ) { } } | Node makeIteratorAsyncName = createQName ( scope , "$jscomp.makeAsyncIterator" ) ; // Since createCall ( currently ) doesn ' t handle templated functions , fill in the template types
// of makeIteratorName manually .
if ( isAddingTypes ( ) && ! makeIteratorAsyncName . getJSType ( ) . isUnknownType ( ) ) { // if makeIteratorName has the unknown type , we must have not injected the required runtime
// libraries - hopefully because this is in a test using NonInjectingCompiler .
// e . g get ` number ` from ` AsyncIterable < number > `
JSType asyncIterableType = JsIterables . maybeBoxIterableOrAsyncIterable ( iterable . getJSType ( ) , registry ) . orElse ( unknownType ) ; JSType makeAsyncIteratorType = makeIteratorAsyncName . getJSType ( ) ; // e . g . replace
// function ( AsyncIterable < T > ) : AsyncIterator < T >
// with
// function ( AsyncIterable < number > ) : AsyncIterator < number >
TemplateTypeMap typeMap = registry . createTemplateTypeMap ( makeAsyncIteratorType . getTemplateTypeMap ( ) . getTemplateKeys ( ) , ImmutableList . of ( asyncIterableType ) ) ; TemplateTypeMapReplacer replacer = new TemplateTypeMapReplacer ( registry , typeMap ) ; makeIteratorAsyncName . setJSType ( makeAsyncIteratorType . visit ( replacer ) ) ; } return createCall ( makeIteratorAsyncName , iterable ) ; |
public class MultiStringAvgDistance { /** * Explain how to combine the scores for each primitive distance
* function on each field . */
protected String explainScoreCombination ( double [ ] multiScore ) { } } | StringBuffer buf = new StringBuffer ( "" ) ; PrintfFormat fmt = new PrintfFormat ( " %.3f" ) ; buf . append ( "field-level scores [" ) ; for ( int i = 0 ; i < multiScore . length ; i ++ ) { buf . append ( fmt . sprintf ( multiScore [ i ] ) ) ; } buf . append ( "] Average score:" ) ; buf . append ( fmt . sprintf ( scoreCombination ( multiScore ) ) ) ; return buf . toString ( ) ; |
public class ImportInstanceRequest { /** * The disk image .
* @ return The disk image . */
public java . util . List < DiskImage > getDiskImages ( ) { } } | if ( diskImages == null ) { diskImages = new com . amazonaws . internal . SdkInternalList < DiskImage > ( ) ; } return diskImages ; |
public class Utils { /** * Convert ( encode ) the given binary value , beginning at the given offset and
* consisting of the given length , using Base64.
* @ param value A binary value .
* @ param offset Zero - based index where data begins .
* @ param length Number of bytes to encode .
* @ return Base64 - encoded value .
* @ throws IllegalArgumentException If the given value is null . */
public static String base64FromBinary ( byte [ ] value , int offset , int length ) throws IllegalArgumentException { } } | return DatatypeConverter . printBase64Binary ( Arrays . copyOfRange ( value , offset , offset + length ) ) ; |
public class ExtraDNSCache { /** * only allow 5 simultaneous DNS requests */
public static InetAddress getAddress ( String host ) throws UnknownHostException { } } | if ( timeToClean ( ) ) { synchronized ( storedAddresses ) { if ( timeToClean ( ) ) { cleanOldAddresses ( ) ; } } } Pair < InetAddress , Long > cachedAddress ; synchronized ( storedAddresses ) { cachedAddress = storedAddresses . get ( host ) ; } if ( cachedAddress != null ) { // host DNS entry was cached
InetAddress address = cachedAddress . getFirst ( ) ; if ( address == null ) { throw new UnknownHostException ( "Could not find host " + host + " (cached response)" ) ; } else { return address ; } } else { // host DNS entry was not cached
fetchDnsAddressLock . acquireUninterruptibly ( ) ; try { InetAddress addr = InetAddress . getByName ( host ) ; synchronized ( storedAddresses ) { storedAddresses . put ( host , new Pair < > ( addr , System . currentTimeMillis ( ) ) ) ; } return addr ; } catch ( UnknownHostException exp ) { synchronized ( storedAddresses ) { storedAddresses . put ( host , new Pair < InetAddress , Long > ( null , System . currentTimeMillis ( ) ) ) ; } Log . i ( "[Dns lookup] " + host + " --> not found" ) ; throw exp ; } finally { fetchDnsAddressLock . release ( ) ; } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.