signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JoinNode { /** * Returns nodes in the order they are joined in the tree by iterating the tree depth - first */
public List < JoinNode > generateAllNodesJoinOrder ( ) { } } | ArrayList < JoinNode > nodes = new ArrayList < > ( ) ; listNodesJoinOrderRecursive ( nodes , true ) ; return nodes ; |
public class DescriptorImporterBase { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . descriptor . api . DescriptorImporter # from ( java . lang . String ) */
@ Override @ Deprecated public T from ( final String string ) throws IllegalArgumentException , DescriptorImportException { } } | return fromString ( string ) ; |
public class InvalidRequestException { /** * The request omitted one or more required parameters .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRequiredParameters ( java . util . Collection ) } or { @ link # withRequiredParameters ( java . util . Collection ) } if
* you want to override the existing values .
* @ param requiredParameters
* The request omitted one or more required parameters .
* @ return Returns a reference to this object so that method calls can be chained together . */
public InvalidRequestException withRequiredParameters ( String ... requiredParameters ) { } } | if ( this . requiredParameters == null ) { setRequiredParameters ( new java . util . ArrayList < String > ( requiredParameters . length ) ) ; } for ( String ele : requiredParameters ) { this . requiredParameters . add ( ele ) ; } return this ; |
public class Downsampling { /** * Calculates the effective index interval after the entry at ` index ` in an IndexSummary . In other words , this
* returns the number of partitions in the primary on - disk index before the next partition that has an entry in
* the index summary . If samplingLevel = = BASE _ SAMPLING _ LEVEL , this will be equal to the index interval .
* @ param index an index into an IndexSummary
* @ param samplingLevel the current sampling level for that IndexSummary
* @ param minIndexInterval the min index interval ( effective index interval at full sampling )
* @ return the number of partitions before the next index summary entry , inclusive on one end */
public static int getEffectiveIndexIntervalAfterIndex ( int index , int samplingLevel , int minIndexInterval ) { } } | assert index >= 0 ; index %= samplingLevel ; List < Integer > originalIndexes = getOriginalIndexes ( samplingLevel ) ; int nextEntryOriginalIndex = ( index == originalIndexes . size ( ) - 1 ) ? BASE_SAMPLING_LEVEL : originalIndexes . get ( index + 1 ) ; return ( nextEntryOriginalIndex - originalIndexes . get ( index ) ) * minIndexInterval ; |
public class AbstractHibernateCriteriaBuilder { /** * Adds a projection that allows the criteria to return the distinct property count
* @ param propertyName The name of the property */
public org . grails . datastore . mapping . query . api . ProjectionList countDistinct ( String propertyName ) { } } | return countDistinct ( propertyName , null ) ; |
public class ConfigHelper { /** * may be null if unset */
public static KeyRange getInputKeyRange ( Configuration conf ) { } } | String str = conf . get ( INPUT_KEYRANGE_CONFIG ) ; return str == null ? null : keyRangeFromString ( str ) ; |
public class ListTaskDefinitionsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListTaskDefinitionsRequest listTaskDefinitionsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listTaskDefinitionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTaskDefinitionsRequest . getFamilyPrefix ( ) , FAMILYPREFIX_BINDING ) ; protocolMarshaller . marshall ( listTaskDefinitionsRequest . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( listTaskDefinitionsRequest . getSort ( ) , SORT_BINDING ) ; protocolMarshaller . marshall ( listTaskDefinitionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listTaskDefinitionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Continuations { /** * Return a { @ link SanitizedContent } valued continuation . Rendering logic is delegated to the
* { @ link WriteContinuation } , but it is assumed that the builder is the render target . */
static Continuation < SanitizedContent > strictContinuation ( WriteContinuation delegate , final StringBuilder buffer , final ContentKind kind ) { } } | if ( delegate . result ( ) . isDone ( ) ) { return new ResultContinuation < > ( UnsafeSanitizedContentOrdainer . ordainAsSafe ( buffer . toString ( ) , kind ) ) ; } return new AbstractContinuation < SanitizedContent > ( delegate ) { @ Override Continuation < SanitizedContent > nextContinuation ( WriteContinuation next ) { return strictContinuation ( next , buffer , kind ) ; } } ; |
public class ConditionDateFormatterFactory { /** * { @ link ConditionDateFormatter } インスタンスを作成する 。
* @ param store
* @ return
* @ throws IllegalArgumentException store is null . */
@ Override public ConditionDateFormatter create ( final TokenStore store ) { } } | ArgUtils . notNull ( store , "store" ) ; final ConditionDateFormatter formatter = new ConditionDateFormatter ( store . getConcatenatedToken ( ) ) ; for ( Token token : store . getTokens ( ) ) { if ( token instanceof Token . Condition ) { // 条件の場合
final Token . Condition conditionToken = token . asCondition ( ) ; final String condition = conditionToken . getCondition ( ) ; if ( PATTERN_ELAPSED_TIME . matcher ( token . getValue ( ) ) . matches ( ) ) { // [ h ] [ m ] [ s ] などの経過時刻のパターン
if ( Utils . startsWithIgnoreCase ( condition , "h" ) ) { formatter . addTerm ( DateTerm . elapsedHour ( condition ) ) ; } else if ( Utils . startsWithIgnoreCase ( condition , "m" ) ) { formatter . addTerm ( DateTerm . elapsedMinute ( condition ) ) ; } else if ( Utils . startsWithIgnoreCase ( condition , "s" ) ) { formatter . addTerm ( DateTerm . elapsedSecond ( condition ) ) ; } continue ; } formatter . addCondition ( condition ) ; if ( isConditionOperator ( conditionToken ) ) { setupConditionOperator ( formatter , conditionToken ) ; } else if ( isConditionLocale ( conditionToken ) ) { setupConditionLocale ( formatter , conditionToken ) ; } else if ( isConditionLocaleSymbol ( conditionToken ) ) { final LocaleSymbol localeSymbol = setupConditionLocaleSymbol ( formatter , conditionToken ) ; formatter . addTerm ( new LocaelSymbolTerm < Calendar > ( localeSymbol ) ) ; } else if ( isConditionDbNum ( conditionToken ) ) { setupConditionDbNum ( formatter , conditionToken ) ; } else if ( isConditionColor ( conditionToken ) ) { setupConditionColor ( formatter , conditionToken ) ; } } else if ( token instanceof Token . Word ) { formatter . addTerm ( new WordTerm < Calendar > ( token . asWord ( ) ) ) ; } else if ( token instanceof Token . EscapedChar ) { formatter . addTerm ( new EscapedCharTerm < Calendar > ( token . asEscapedChar ( ) ) ) ; } else if ( token instanceof Token . Underscore ) { formatter . addTerm ( new UnderscoreTerm < Calendar > ( token . asUnderscore ( ) ) ) ; } else if ( token instanceof Token . Asterisk ) { formatter . addTerm ( new AsteriskTerm < Calendar > ( token . asAsterisk ( ) ) ) ; } else if ( token instanceof Token . Factor ) { // 因子を日時用の書式に分解する
final List < Token > list = convertFactor ( token . asFactor ( ) ) ; for ( Token item : list ) { if ( item instanceof Token . Formatter ) { final String formatterItem = item . asFormatter ( ) . getValue ( ) ; if ( Utils . equalsAnyIgnoreCase ( formatterItem , new String [ ] { "am/pm" , "a/p" } ) ) { formatter . addTerm ( DateTerm . amPm ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "w" ) ) { formatter . addTerm ( DateTerm . weekNumber ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "y" ) ) { formatter . addTerm ( DateTerm . year ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "g" ) ) { formatter . addTerm ( DateTerm . eraName ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "e" ) ) { formatter . addTerm ( DateTerm . eraYear ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "r" ) ) { formatter . addTerm ( DateTerm . eraNameYear ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "m" ) ) { // 月か分かの判定は 、 全ての書式を組み立て後に行う 。
formatter . addTerm ( DateTerm . month ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "d" ) ) { formatter . addTerm ( DateTerm . day ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "a" ) ) { formatter . addTerm ( DateTerm . weekName ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "n" ) ) { formatter . addTerm ( DateTerm . weekNameForOO ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "h" ) ) { final boolean halfHour = store . containsAnyInFactorIgnoreCase ( new String [ ] { "am/pm" , "a/p" } ) ; formatter . addTerm ( DateTerm . hour ( formatterItem , halfHour ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "s" ) ) { formatter . addTerm ( DateTerm . second ( formatterItem ) ) ; } else if ( Utils . startsWithIgnoreCase ( formatterItem , "q" ) ) { formatter . addTerm ( DateTerm . quater ( formatterItem ) ) ; } else { // ここには到達しない
if ( logger . isWarnEnabled ( ) ) { logger . warn ( "unknown date format terms '{}'." , formatterItem ) ; } formatter . addTerm ( new OtherTerm < Calendar > ( item ) ) ; } } else { formatter . addTerm ( new OtherTerm < Calendar > ( item ) ) ; } } } else { formatter . addTerm ( new OtherTerm < Calendar > ( token ) ) ; } } // 書式 ' m ' の項を分に変換する処理を行う
convertMinuteTerm ( formatter ) ; return formatter ; |
public class H2GISFunctions { /** * Release geometry type
* @ param connection Active h2 connection with DROP DOMAIN and DROP ALIAS rights
* @ throws java . sql . SQLException */
public static void unRegisterGeometryType ( Connection connection ) throws SQLException { } } | Statement st = connection . createStatement ( ) ; DomainInfo [ ] domainInfos = getBuiltInsType ( ) ; for ( DomainInfo domainInfo : domainInfos ) { st . execute ( "DROP DOMAIN IF EXISTS " + domainInfo . getDomainName ( ) ) ; } |
public class InjectorBuilder { /** * Create the injector in the specified stage using the specified InjectorCreator
* strategy . The InjectorCreator will most likely perform additional error handling on top
* of the call to { @ link Guice # createInjector } .
* @ param stage Stage in which the injector is running . It is recommended to run in Stage . DEVELOPEMENT
* since it treats all singletons as lazy as opposed to defaulting to eager instantiation which
* could result in instantiating unwanted classes .
* @ param creator */
public < I extends Injector > I createInjector ( Stage stage , InjectorCreator < I > creator ) { } } | return creator . createInjector ( stage , module ) ; |
public class OmemoService { /** * Returns true , if a rotation of the signed preKey is necessary .
* @ param userDevice our OmemoDevice
* @ return true if rotation is necessary */
private boolean shouldRotateSignedPreKey ( OmemoDevice userDevice ) { } } | if ( ! OmemoConfiguration . getRenewOldSignedPreKeys ( ) ) { return false ; } Date now = new Date ( ) ; Date lastRenewal = getOmemoStoreBackend ( ) . getDateOfLastSignedPreKeyRenewal ( userDevice ) ; if ( lastRenewal == null ) { lastRenewal = new Date ( ) ; getOmemoStoreBackend ( ) . setDateOfLastSignedPreKeyRenewal ( userDevice , lastRenewal ) ; } long allowedAgeMillis = MILLIS_PER_HOUR * OmemoConfiguration . getRenewOldSignedPreKeysAfterHours ( ) ; return now . getTime ( ) - lastRenewal . getTime ( ) > allowedAgeMillis ; |
public class DogmaApi { /** * Get effect information Get information on a dogma effect - - - This route
* expires daily at 11:05
* @ param effectId
* A dogma effect ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ return DogmaEffectResponse
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public DogmaEffectResponse getDogmaEffectsEffectId ( Integer effectId , String datasource , String ifNoneMatch ) throws ApiException { } } | ApiResponse < DogmaEffectResponse > resp = getDogmaEffectsEffectIdWithHttpInfo ( effectId , datasource , ifNoneMatch ) ; return resp . getData ( ) ; |
public class Datamodel { /** * Creates a { @ link Claim } . It might be more convenient to use
* { @ link # makeStatement } directly if you want to build a statement .
* @ param subject
* the subject the Statement refers to
* @ param mainSnak
* the main Snak of the Statement
* @ param qualifiers
* the qualifiers of the Statement , grouped in SnakGroups
* @ return a { @ link Claim } corresponding to the input */
public static Claim makeClaim ( EntityIdValue subject , Snak mainSnak , List < SnakGroup > qualifiers ) { } } | return factory . getClaim ( subject , mainSnak , qualifiers ) ; |
public class BindTableGenerator { /** * Buld indexes .
* @ param entity
* the entity
* @ param List
* @ param unique
* the unique
* @ param counter
* the counter
* @ return the pair */
public static Triple < String , String , String > buldIndexes ( final SQLiteEntity entity , ArrayList < Pair < List < String > , Boolean > > indexList , boolean unique , int counter ) { } } | Triple < String , String , String > result = new Triple < > ( ) ; result . value0 = "" ; result . value1 = "" ; result . value2 = "" ; if ( indexList . size ( ) == 0 ) return result ; String uniqueString ; if ( unique ) { uniqueString = "UNIQUE " ; } else { uniqueString = "" ; } List < String > listCreateIndex = new ArrayList < > ( ) ; List < String > listDropIndex = new ArrayList < > ( ) ; List < String > listUniqueConstraint = new ArrayList < > ( ) ; for ( Pair < List < String > , Boolean > index : indexList ) { final List < String > listUniqueFields = new ArrayList < > ( ) ; String createIndex = String . format ( " CREATE %sINDEX idx_%s_%s on %s (%s)" , uniqueString , entity . getTableName ( ) , counter ++ , entity . getTableName ( ) , StringUtils . join ( index . value0 , ", " ) ) ; String dropIndex = String . format ( " DROP INDEX IF EXISTS idx_%s_%s" , entity . getTableName ( ) , counter ) ; final One < Integer > fieldCounter = new One < Integer > ( 0 ) ; createIndex = JQLChecker . getInstance ( ) . replace ( new JQLContext ( ) { @ Override public String getContextDescription ( ) { return "While table definition generation for entity " + entity . getName ( ) ; } @ Override public String getName ( ) { // TODO Auto - generated method stub
return null ; } @ Override public String getParentName ( ) { // TODO Auto - generated method stub
return null ; } @ Override public Finder < SQLProperty > findEntityByName ( String entityName ) { // TODO Auto - generated method stub
return null ; } } , createIndex , new JQLReplacerListenerImpl ( null ) { @ Override public String onColumnName ( String columnName ) { fieldCounter . value0 ++ ; SQLProperty property = entity . findPropertyByName ( columnName ) ; AssertKripton . assertTrue ( property != null , "class '%s' in @%s(indexes) use unknown property '%s'" , entity . getName ( ) , BindSqlType . class . getSimpleName ( ) , columnName ) ; listUniqueFields . add ( property . columnName ) ; return property . columnName ; } @ Override public String onColumnFullyQualifiedName ( String tableName , String columnName ) { AssertKripton . fail ( "Inconsistent state" ) ; return null ; } } ) ; AssertKripton . assertTrue ( fieldCounter . value0 > 0 , "class '%s' have @%s(indexes) with no well formed indexes" , entity . getName ( ) , BindSqlType . class . getSimpleName ( ) ) ; if ( unique ) { // add unique constraint
listUniqueConstraint . add ( String . format ( ", UNIQUE (%s)" , StringUtils . join ( listUniqueFields , ", " ) ) ) ; } listCreateIndex . add ( createIndex ) ; listDropIndex . add ( dropIndex ) ; } result . value0 = StringUtils . join ( listCreateIndex , ";" ) ; result . value1 = StringUtils . join ( listDropIndex , ";" ) ; result . value2 = StringUtils . join ( listUniqueConstraint , "" ) ; return result ; |
public class WriteFileRecordRequest { /** * addRequest - - add a new record request .
* @ param request Request record */
public void addRequest ( RecordRequest request ) { } } | if ( request . getRequestSize ( ) + getRequestSize ( ) > 248 ) { throw new IllegalArgumentException ( ) ; } if ( records == null ) { records = new RecordRequest [ 1 ] ; } else { RecordRequest old [ ] = records ; records = new RecordRequest [ old . length + 1 ] ; System . arraycopy ( old , 0 , records , 0 , old . length ) ; } records [ records . length - 1 ] = request ; setDataLength ( getRequestSize ( ) ) ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertCRCResourceManagementFmtQualToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class TypeUtils { /** * Checks if a { @ link TypeDef } is an instance of an other { @ link TypeDef } .
* @ param type The type to compare .
* @ param targetType The target type .
* @ param function
* @ return true if match , false otherwise . */
public static boolean isInstanceOf ( TypeRef type , TypeDef targetType , Function < TypeRef , Boolean > function ) { } } | if ( type instanceof ClassRef ) { ClassRef classRef = ( ClassRef ) type ; TypeDef definition = classRef . getDefinition ( ) ; if ( definition . getFullyQualifiedName ( ) . equals ( targetType . getFullyQualifiedName ( ) ) ) { return true ; } for ( TypeRef i : definition . getImplementsList ( ) ) { if ( function . apply ( i ) ) { return true ; } } for ( TypeRef e : definition . getExtendsList ( ) ) { if ( function . apply ( e ) ) { return true ; } } } return false ; |
public class ClustersInner { /** * Update the properties of a given cluster .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param clusterName The name of the cluster within the specified resource group . Cluster names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long .
* @ param parameters Additional parameters for cluster update .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ClusterInner > updateAsync ( String resourceGroupName , String clusterName , ClusterUpdateParameters parameters , final ServiceCallback < ClusterInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , clusterName , parameters ) , serviceCallback ) ; |
public class JKSecurityUtil { /** * Encode .
* @ param source the source
* @ return the string */
public static String encode ( final String source ) { } } | if ( source == null ) { return null ; } return encodeInToBase64 ( source . getBytes ( ) ) ; |
public class NS { /** * Returns an < code > IfNotExists < / code > object which represents an < a href =
* " http : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . Modifying . html "
* > if _ not _ exists ( path , operand ) < / a > function call where path refers to that
* of the current path operand ; used for building expressions .
* < pre >
* " if _ not _ exists ( path , operand ) – If the item does not contain an attribute
* at the specified path , then if _ not _ exists evaluates to operand ; otherwise ,
* it evaluates to path . You can use this function to avoid overwriting an
* attribute already present in the item . "
* < / pre >
* @ param defaultValue
* the default value that will be used as the operand to the
* if _ not _ exists function call . */
public < T extends Number > IfNotExistsFunction < NS > ifNotExists ( Set < T > defaultValue ) { } } | return new IfNotExistsFunction < NS > ( this , new LiteralOperand ( defaultValue ) ) ; |
public class ContentExposingResource { /** * Create a checksum URI object . */
protected static URI checksumURI ( final String checksum ) { } } | if ( ! isBlank ( checksum ) ) { return URI . create ( checksum ) ; } return null ; |
public class TransactionManager { /** * Removes all transaction ids started before the given time from invalid list .
* @ param time time in milliseconds
* @ return true if invalid list got changed , false otherwise
* @ throws InvalidTruncateTimeException if there are any in - progress transactions started before given time */
public boolean truncateInvalidTxBefore ( long time ) throws InvalidTruncateTimeException { } } | // guard against changes to the transaction log while processing
txMetricsCollector . rate ( "truncateInvalidTxBefore" ) ; Stopwatch timer = new Stopwatch ( ) . start ( ) ; this . logReadLock . lock ( ) ; try { boolean success ; synchronized ( this ) { ensureAvailable ( ) ; success = doTruncateInvalidTxBefore ( time ) ; } appendToLog ( TransactionEdit . createTruncateInvalidTxBefore ( time ) ) ; txMetricsCollector . histogram ( "truncateInvalidTxBefore.latency" , ( int ) timer . elapsedMillis ( ) ) ; return success ; } finally { this . logReadLock . unlock ( ) ; } |
public class Main { /** * Calculate the sum of the fifth power of the first ' length ' even natural numbers .
* Args :
* length : The number of initial even natural numbers to consider .
* Returns :
* An integer representing the sum of the fifth power of the even natural numbers .
* Examples :
* > > > sum _ even _ fifth _ powers ( 2)
* 1056
* > > > sum _ even _ fifth _ powers ( 3)
* 8832
* > > > sum _ even _ fifth _ powers ( 1)
* 32 */
public static int sum_even_fifth_powers ( int length ) { } public static void main ( String [ ] args ) { System . out . println ( sum_even_fifth_powers ( 2 ) ) ; // It will print : " 1056 " .
} } | int total_sum = 0 ; for ( int count = 1 ; count <= length ; count ++ ) { int value = 2 * count ; total_sum += Math . pow ( value , 5 ) ; } return total_sum ; |
public class AsmHelper { /** * Extract the package name from a fully qualified ( normalized ) class name , i . e . : converts
* " java . lang . Object " to " java . lang " .
* @ param className
* @ return */
public static final String extractPackageName ( String className ) { } } | if ( className == null || className . trim ( ) . isEmpty ( ) ) { return "" ; } final int idx = className . lastIndexOf ( '.' ) ; if ( idx == - 1 ) { return "" ; } else { return className . substring ( 0 , idx ) ; } |
public class IcedUtils { /** * Deep - copy clone given iced object . */
static public < T extends Iced > T deepCopy ( T iced ) { } } | if ( iced == null ) return null ; AutoBuffer ab = new AutoBuffer ( ) ; iced . write ( ab ) ; ab . flipForReading ( ) ; // Create a new instance
return ( T ) TypeMap . newInstance ( iced . frozenType ( ) ) . read ( ab ) ; |
public class DefaultJsonWebTokenImpl { /** * @ param jwt
* @ return */
private void handleClaims ( String jwt ) { } } | try { if ( claimsUtils == null ) { claimsUtils = new ClaimsUtils ( ) ; } this . claimsSet = claimsUtils . getJwtClaims ( jwt ) ; } catch ( JoseException e ) { Tr . error ( tc , "ERROR_GETTING_CLAIMS_FROM_JWT_STRING" , new Object [ ] { e . getLocalizedMessage ( ) } ) ; } |
public class FactoryDetectPoint { /** * Creates a Fast corner detector with feature intensity for additional pruning . Fast features
* have minimums and maximums .
* @ param configFast Configuration for FAST feature detector
* @ param configDetector Configuration for feature extractor .
* @ param imageType ype of input image .
* @ see FastCornerDetector */
@ SuppressWarnings ( "UnnecessaryLocalVariable" ) public static < T extends ImageGray < T > , D extends ImageGray < D > > GeneralFeatureDetector < T , D > createFast ( @ Nullable ConfigFastCorner configFast , ConfigGeneralDetector configDetector , Class < T > imageType ) { } } | if ( configFast == null ) configFast = new ConfigFastCorner ( ) ; configFast . checkValidity ( ) ; FastCornerDetector < T > alg = FactoryIntensityPointAlg . fast ( configFast . pixelTol , configFast . minContinuous , imageType ) ; alg . setMaxFeaturesFraction ( configFast . maxFeatures ) ; GeneralFeatureIntensity < T , D > intensity = new WrapperFastCornerIntensity < > ( alg ) ; return createGeneral ( intensity , configDetector ) ; |
public class SortedMapWritable { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) @ Override public void readFields ( DataInput in ) throws IOException { } } | super . readFields ( in ) ; // Read the number of entries in the map
int entries = in . readInt ( ) ; // Then read each key / value pair
for ( int i = 0 ; i < entries ; i ++ ) { WritableComparable key = ( WritableComparable ) ReflectionUtils . newInstance ( getClass ( in . readByte ( ) ) , getConf ( ) ) ; key . readFields ( in ) ; Writable value = ( Writable ) ReflectionUtils . newInstance ( getClass ( in . readByte ( ) ) , getConf ( ) ) ; value . readFields ( in ) ; instance . put ( key , value ) ; } |
public class HopcroftMinimization { /** * Minimizes the given Mealy machine . The result is returned in the form of a { @ link CompactMealy } .
* @ param mealy
* the Mealy machine to minimize
* @ param alphabet
* the input alphabet ( this will be the input alphabet of the resulting Mealy machine )
* @ param pruningMode
* the pruning mode ( see above )
* @ return a minimized version of the specified Mealy machine */
public static < I , O > CompactMealy < I , O > minimizeMealy ( MealyMachine < ? , I , ? , O > mealy , Alphabet < I > alphabet , PruningMode pruningMode ) { } } | return doMinimizeMealy ( mealy , alphabet , new CompactMealy . Creator < > ( ) , pruningMode ) ; |
public class ClientConnection { /** * Opens a new IP communication channel to a remote server .
* The communication state of this object is assumed to be closed state . This method
* is designed to be called only once during the objects lifetime !
* @ param localEP the local endpoint to use for communication channel
* @ param serverCtrlEP the remote server control endpoint used for connect request
* @ param cri connect request information used to configure the communication
* attributes
* @ param useNAT < code > true < / code > to use a NAT ( network address translation ) aware
* communication mechanism , < code > false < / code > to use the default way
* @ throws KNXException on socket communication error
* @ throws KNXTimeoutException on no connect response before connect timeout
* @ throws KNXRemoteException if response indicates an error condition at the server
* concerning the request
* @ throws KNXInvalidResponseException if connect response is in wrong format
* @ throws InterruptedException on interrupted thread during connect , all resources
* are cleaned up before passing on this exception */
protected void connect ( final InetSocketAddress localEP , final InetSocketAddress serverCtrlEP , final CRI cri , final boolean useNAT ) throws KNXException , InterruptedException { } } | if ( state != CLOSED ) throw new IllegalStateException ( "open connection" ) ; ctrlEndpt = serverCtrlEP ; if ( ctrlEndpt . isUnresolved ( ) ) throw new KNXException ( "server control endpoint is unresolved: " + serverCtrlEP ) ; if ( ctrlEndpt . getAddress ( ) . isMulticastAddress ( ) ) throw new KNXIllegalArgumentException ( "server control endpoint cannot be a multicast address (" + ctrlEndpt . getAddress ( ) . getHostAddress ( ) + ")" ) ; useNat = useNAT ; logger = LogService . getLogger ( "calimero.knxnetip." + getName ( ) ) ; // if we allow localEP to be null , we would create an unbound socket
if ( localEP == null ) throw new KNXIllegalArgumentException ( "no local endpoint specified" ) ; InetSocketAddress local = localEP ; try { if ( local . isUnresolved ( ) ) throw new KNXIllegalArgumentException ( "unresolved address " + local ) ; if ( local . getAddress ( ) . isAnyLocalAddress ( ) ) { final InetAddress addr = useNAT ? null : Optional . ofNullable ( serverCtrlEP . getAddress ( ) ) . flatMap ( this :: onSameSubnet ) . orElse ( InetAddress . getLocalHost ( ) ) ; local = new InetSocketAddress ( addr , localEP . getPort ( ) ) ; } socket = new DatagramSocket ( local ) ; ctrlSocket = socket ; logger . info ( "establish connection from " + socket . getLocalSocketAddress ( ) + " to " + ctrlEndpt ) ; // HPAI throws if wildcard local address ( 0.0.0.0 ) is supplied
final HPAI hpai = new HPAI ( HPAI . IPV4_UDP , useNat ? null : ( InetSocketAddress ) socket . getLocalSocketAddress ( ) ) ; final byte [ ] buf = PacketHelper . toPacket ( new ConnectRequest ( cri , hpai , hpai ) ) ; send ( buf , ctrlEndpt ) ; } catch ( final UnknownHostException e ) { throw new KNXException ( "no local host address available" , e ) ; } catch ( IOException | SecurityException e ) { if ( socket != null ) socket . close ( ) ; logger . error ( "communication failure on connect" , e ) ; if ( local . getAddress ( ) . isLoopbackAddress ( ) ) logger . warn ( "local endpoint uses loopback address ({}), try with a different IP address" , local . getAddress ( ) ) ; throw new KNXException ( "connecting from " + local + " to " + serverCtrlEP + ": " + e . getMessage ( ) ) ; } logger . debug ( "wait for connect response from " + ctrlEndpt + " ..." ) ; startReceiver ( ) ; try { final boolean changed = waitForStateChange ( CLOSED , CONNECT_REQ_TIMEOUT ) ; if ( state == OK ) { heartbeat = new HeartbeatMonitor ( ) ; heartbeat . start ( ) ; String optionalConnectionInfo = "" ; if ( tunnelingAddress != null ) optionalConnectionInfo = ", tunneling address " + tunnelingAddress ; logger . info ( "connection established (data endpoint {}:{}, channel {}{})" , dataEndpt . getAddress ( ) . getHostAddress ( ) , dataEndpt . getPort ( ) , channelId , optionalConnectionInfo ) ; return ; } final KNXException e ; if ( ! changed ) e = new KNXTimeoutException ( "timeout connecting to control endpoint " + ctrlEndpt ) ; else if ( state == ACK_ERROR ) e = new KNXRemoteException ( "error response from control endpoint " + ctrlEndpt + ": " + status ) ; else e = new KNXInvalidResponseException ( "invalid connect response from " + ctrlEndpt ) ; // quit , cleanup and notify user
connectCleanup ( e ) ; throw e ; } catch ( final InterruptedException e ) { connectCleanup ( e ) ; throw e ; } |
public class GoogleCalendarService { /** * Inserts a calendar into the google calendar .
* @ param calendar The calendar to be inserted .
* @ throws IOException For unexpected errors . */
public void insertCalendar ( GoogleCalendar calendar ) throws IOException { } } | com . google . api . services . calendar . model . Calendar cal ; cal = converter . convert ( calendar , com . google . api . services . calendar . model . Calendar . class ) ; cal = dao . calendars ( ) . insert ( cal ) . execute ( ) ; calendar . setId ( cal . getId ( ) ) ; |
public class MetricsImpl { /** * Retrieve metric data .
* Gets metric values for a single metric .
* @ param appId ID of the application . This is Application ID from the API Access settings blade in the Azure portal .
* @ param metricId ID of the metric . This is either a standard AI metric , or an application - specific custom metric . Possible values include : ' requests / count ' , ' requests / duration ' , ' requests / failed ' , ' users / count ' , ' users / authenticated ' , ' pageViews / count ' , ' pageViews / duration ' , ' client / processingDuration ' , ' client / receiveDuration ' , ' client / networkDuration ' , ' client / sendDuration ' , ' client / totalDuration ' , ' dependencies / count ' , ' dependencies / failed ' , ' dependencies / duration ' , ' exceptions / count ' , ' exceptions / browser ' , ' exceptions / server ' , ' sessions / count ' , ' performanceCounters / requestExecutionTime ' , ' performanceCounters / requestsPerSecond ' , ' performanceCounters / requestsInQueue ' , ' performanceCounters / memoryAvailableBytes ' , ' performanceCounters / exceptionsPerSecond ' , ' performanceCounters / processCpuPercentage ' , ' performanceCounters / processIOBytesPerSecond ' , ' performanceCounters / processPrivateBytes ' , ' performanceCounters / processorCpuPercentage ' , ' availabilityResults / availabilityPercentage ' , ' availabilityResults / duration ' , ' billing / telemetryCount ' , ' customEvents / count '
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the MetricsResult object if successful . */
public MetricsResult get ( String appId , MetricId metricId ) { } } | return getWithServiceResponseAsync ( appId , metricId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class MathUtils { /** * Calculate string similarity with tfidf weights relative to each character
* frequency and how many times a character appears in a given string
* @ param strings the strings to calculate similarity for
* @ return the cosine similarity between the strings */
public static double stringSimilarity ( String ... strings ) { } } | if ( strings == null ) return 0 ; Counter < String > counter = new Counter < > ( ) ; Counter < String > counter2 = new Counter < > ( ) ; for ( int i = 0 ; i < strings [ 0 ] . length ( ) ; i ++ ) counter . incrementCount ( String . valueOf ( strings [ 0 ] . charAt ( i ) ) , 1.0f ) ; for ( int i = 0 ; i < strings [ 1 ] . length ( ) ; i ++ ) counter2 . incrementCount ( String . valueOf ( strings [ 1 ] . charAt ( i ) ) , 1.0f ) ; Set < String > v1 = counter . keySet ( ) ; Set < String > v2 = counter2 . keySet ( ) ; Set < String > both = SetUtils . intersection ( v1 , v2 ) ; double sclar = 0 , norm1 = 0 , norm2 = 0 ; for ( String k : both ) sclar += counter . getCount ( k ) * counter2 . getCount ( k ) ; for ( String k : v1 ) norm1 += counter . getCount ( k ) * counter . getCount ( k ) ; for ( String k : v2 ) norm2 += counter2 . getCount ( k ) * counter2 . getCount ( k ) ; return sclar / Math . sqrt ( norm1 * norm2 ) ; |
public class RBBISetBuilder { /** * / CLOVER : OFF */
void printRangeGroups ( ) { } } | RangeDescriptor rlRange ; RangeDescriptor tRange ; int i ; int lastPrintedGroupNum = 0 ; System . out . print ( "\nRanges grouped by Unicode Set Membership...\n" ) ; for ( rlRange = fRangeList ; rlRange != null ; rlRange = rlRange . fNext ) { int groupNum = rlRange . fNum & 0xbfff ; if ( groupNum > lastPrintedGroupNum ) { lastPrintedGroupNum = groupNum ; if ( groupNum < 10 ) { System . out . print ( " " ) ; } System . out . print ( groupNum + " " ) ; if ( ( rlRange . fNum & 0x4000 ) != 0 ) { System . out . print ( " <DICT> " ) ; } for ( i = 0 ; i < rlRange . fIncludesSets . size ( ) ; i ++ ) { RBBINode usetNode = rlRange . fIncludesSets . get ( i ) ; String setName = "anon" ; RBBINode setRef = usetNode . fParent ; if ( setRef != null ) { RBBINode varRef = setRef . fParent ; if ( varRef != null && varRef . fType == RBBINode . varRef ) { setName = varRef . fText ; } } System . out . print ( setName ) ; System . out . print ( " " ) ; } i = 0 ; for ( tRange = rlRange ; tRange != null ; tRange = tRange . fNext ) { if ( tRange . fNum == rlRange . fNum ) { if ( i ++ % 5 == 0 ) { System . out . print ( "\n " ) ; } RBBINode . printHex ( tRange . fStartChar , - 1 ) ; System . out . print ( "-" ) ; RBBINode . printHex ( tRange . fEndChar , 0 ) ; } } System . out . print ( "\n" ) ; } } System . out . print ( "\n" ) ; |
public class JsonConverterUtil { /** * Loops through a list of { @ link JsonNode } instances , and stores the given property with given type in the returned list .
* In Java 8 , this probably could be done a lot cooler . */
public static Set < Long > gatherLongPropertyFromJsonNodes ( Iterable < JsonNode > jsonNodes , String propertyName ) { } } | Set < Long > result = new HashSet < Long > ( ) ; // Using a Set to filter out doubles
for ( JsonNode node : jsonNodes ) { if ( node . has ( propertyName ) ) { Long propertyValue = node . get ( propertyName ) . asLong ( ) ; if ( propertyValue > 0 ) { // Just to be safe
result . add ( propertyValue ) ; } } } return result ; |
public class ListConsCases { /** * Matches a list with a head element and a tail of remaining elements .
* < p > If matched , the { @ code head } value is decomposed to 0 and the { @ code tail } value is decomposed to */
public static < T , ET extends T , B1 > DecomposableMatchBuilder1 < List < T > , B1 > headTail ( DecomposableMatchBuilder0 < ET > head , DecomposableMatchBuilder1 < List < T > , B1 > tail ) { } } | List < Matcher < Object > > matchers = Lists . of ( ArgumentMatchers . any ( ) , ArgumentMatchers . any ( ) ) ; return new DecomposableMatchBuilder2 < List < T > , ET , List < T > > ( matchers , Tuple2 . of ( 0 , 1 ) , new ListConsHeadTailFieldExtractor < > ( ) ) . decomposeFirstAndSecond ( head , tail ) ; |
public class ImapRequestLineReader { /** * Sends a server command continuation request ' + ' back to the client ,
* requesting more data to be sent . */
public void commandContinuationRequest ( ) throws ProtocolException { } } | try { output . write ( '+' ) ; output . write ( ' ' ) ; output . write ( 'O' ) ; output . write ( 'K' ) ; output . write ( '\r' ) ; output . write ( '\n' ) ; output . flush ( ) ; } catch ( IOException e ) { throw new ProtocolException ( "Unexpected exception in sending command continuation request." , e ) ; } |
public class JsonParser { /** * Parses an array
* @ return the parsed array
* @ throws IOException if the input stream could not be read or if
* the input stream contained an unexpected token */
public List < Object > parseArray ( ) throws IOException { } } | Type t = lexer . readNextToken ( ) ; if ( t != Type . START_ARRAY ) { throw new IOException ( "Unexpected token: " + t ) ; } return parseArrayInternal ( ) ; |
public class MetricManager { /** * Shut down the background thread that is processing metrics */
public static synchronized void shutdown ( ) { } } | if ( BACKGROUND_SERVICE != null ) { try { BACKGROUND_SERVICE . stop ( ) ; } catch ( Throwable t ) { LOGGER . error ( "Exception stopping Stackify Metrics API service" , t ) ; } INITIALIZED . compareAndSet ( true , false ) ; } |
public class ArrayMath { /** * CASTS - - - - - */
public static float [ ] doubleArrayToFloatArray ( double [ ] a ) { } } | float [ ] result = new float [ a . length ] ; for ( int i = 0 ; i < a . length ; i ++ ) { result [ i ] = ( float ) a [ i ] ; } return result ; |
public class JawrConfigManager { /** * ( non - Javadoc )
* @ see net . jawr . web . config . jmx . JawrConfigManagerMBean # setUseBundleMapping (
* boolean ) */
@ Override public void setUseBundleMapping ( boolean usBundleMapping ) { } } | configProperties . setProperty ( JawrConfig . JAWR_USE_BUNDLE_MAPPING , Boolean . toString ( usBundleMapping ) ) ; |
public class Counters { /** * Returns a map view of the given counter . */
public static < E > Map < E , Double > asMap ( final Counter < E > counter ) { } } | return new AbstractMap < E , Double > ( ) { @ Override public int size ( ) { return counter . size ( ) ; } @ Override public Set < Entry < E , Double > > entrySet ( ) { return counter . entrySet ( ) ; } @ Override @ SuppressWarnings ( "unchecked" ) public boolean containsKey ( Object key ) { return counter . containsKey ( ( E ) key ) ; } @ Override @ SuppressWarnings ( "unchecked" ) public Double get ( Object key ) { return counter . getCount ( ( E ) key ) ; } @ Override public Double put ( E key , Double value ) { double last = counter . getCount ( key ) ; counter . setCount ( key , value ) ; return last ; } @ Override @ SuppressWarnings ( "unchecked" ) public Double remove ( Object key ) { return counter . remove ( ( E ) key ) ; } @ Override public Set < E > keySet ( ) { return counter . keySet ( ) ; } } ; |
public class StartAction { /** * { @ inheritDoc } */
@ Override public void execute ( ExecutorService executor ) { } } | _startTime . set ( System . currentTimeMillis ( ) ) ; if ( _tc . isInfoEnabled ( ) ) { AppMessageHelper . get ( _aii . getHandler ( ) ) . info ( "STARTING_APPLICATION" , _config . getName ( ) ) ; } long maxWait = ApplicationStateCoordinator . getApplicationStartTimeout ( ) ; _slowMessageAction . set ( ( ( ScheduledExecutorService ) executor ) . schedule ( new Runnable ( ) { @ SuppressWarnings ( "deprecation" ) @ Override public void run ( ) { AppMessageHelper . get ( _aii . getHandler ( ) ) . audit ( "APPLICATION_SLOW_STARTUP" , _config . getName ( ) , TimestampUtils . getElapsedTime ( _startTime . get ( ) ) ) ; } } , maxWait , TimeUnit . SECONDS ) ) ; try { @ SuppressWarnings ( "rawtypes" ) ApplicationHandler handler = _aii . getHandler ( ) ; @ SuppressWarnings ( "unchecked" ) ApplicationMonitoringInformation ami = handler . setUpApplicationMonitoring ( _aii ) ; _aii . setApplicationMonitoringInformation ( ami ) ; _appMonitor . addApplication ( _aii ) ; @ SuppressWarnings ( "unchecked" ) Future < Boolean > result = handler . install ( _aii ) ; if ( _tc . isDebugEnabled ( ) ) { Tr . debug ( _tc , "Handler install called, result: " + result ) ; } _monitor . onCompletion ( result , _listener ) ; } catch ( Throwable t ) { _listener . failedCompletion ( null , t ) ; } |
public class Authentication { /** * Checks if a given number for 2FA is valid for the given secret
* @ param secret The plaintext secret to use for checking
* @ param number The number entered by the user
* @ return True if number is valid , false otherwise */
public boolean validSecondFactor ( String secret , String number ) { } } | Objects . requireNonNull ( secret , Required . SECRET . toString ( ) ) ; Objects . requireNonNull ( number , Required . TOTP . toString ( ) ) ; return TotpUtils . verifiedTotp ( secret , number ) ; |
public class FormInputValueHelper { /** * Serializes form inputs and writes the data to the output directory to be used by future
* non - training crawls .
* @ param dir The output directory for the form input data .
* @ return The list of inputs */
public static List < FormInput > deserializeFormInputs ( File dir ) { } } | List < FormInput > deserialized = new ArrayList < > ( ) ; final File in = new File ( dir , FORMS_JSON_FILE ) ; if ( in . exists ( ) ) { LOGGER . info ( "Reading trained form inputs from " + in . getAbsolutePath ( ) ) ; Gson gson = new GsonBuilder ( ) . create ( ) ; try { deserialized = gson . fromJson ( FileUtils . readFileToString ( in , Charset . defaultCharset ( ) ) , new TypeToken < List < FormInput > > ( ) { } . getType ( ) ) ; } catch ( JsonSyntaxException | IOException e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } } return deserialized ; |
public class KunderaCriteriaBuilder { /** * ( non - Javadoc )
* @ see
* javax . persistence . criteria . CriteriaBuilder # or ( javax . persistence . criteria
* . Expression , javax . persistence . criteria . Expression ) */
@ Override public Predicate or ( Expression < Boolean > arg0 , Expression < Boolean > arg1 ) { } } | // TODO Auto - generated method stub
if ( arg0 != null && arg1 != null ) { if ( arg0 . getClass ( ) . isAssignableFrom ( ComparisonPredicate . class ) && arg1 . getClass ( ) . isAssignableFrom ( ComparisonPredicate . class ) ) { return new DisjunctionPredicate ( ( Predicate ) arg0 , ( Predicate ) arg1 ) ; } } return null ; |
public class BeanMappingConfigRespository { /** * 直接注册为默认mapping
* @ param src
* @ param dest */
public void register ( Class src , Class target ) { } } | List < BeanMappingObject > objects = BeanMappingParser . parseMapping ( src , target ) ; for ( BeanMappingObject object : objects ) { register ( object ) ; } |
public class LongDoubleDenseVector { /** * Updates this vector to be the entrywise sum of this vector with the other . */
public void add ( LongDoubleVector other ) { } } | if ( other instanceof LongDoubleUnsortedVector ) { LongDoubleUnsortedVector vec = ( LongDoubleUnsortedVector ) other ; for ( int i = 0 ; i < vec . top ; i ++ ) { this . add ( vec . idx [ i ] , vec . vals [ i ] ) ; } } else { // TODO : Add special case for LongDoubleDenseVector .
other . iterate ( new SparseBinaryOpApplier ( this , new Lambda . DoubleAdd ( ) ) ) ; } |
public class PropertiesFile { /** * Merge a Map of properties into our Properties object
* The additions will overwrite any existing properties
* @ param additionalProperties Map to merge into our Properties object */
public void addAndOverwriteProperties ( Map < String , String > additionalProperties ) { } } | additionalProperties . values ( ) . removeAll ( Collections . singleton ( null ) ) ; for ( Map . Entry < String , String > additionalPropertiesMap : additionalProperties . entrySet ( ) ) { if ( ! additionalPropertiesMap . getValue ( ) . trim ( ) . isEmpty ( ) ) { properties . setProperty ( additionalPropertiesMap . getKey ( ) , additionalPropertiesMap . getValue ( ) ) ; warnUserOfPossibleErrors ( additionalPropertiesMap . getKey ( ) , properties ) ; } } |
public class AutoClassDiscovery { /** * Returns all class names stored in the cache .
* @ return the class names */
public static List < String > getAllClassNames ( ) { } } | List < String > result = new ArrayList < > ( ) ; Iterator < String > pkgs = m_Cache . packages ( ) ; while ( pkgs . hasNext ( ) ) { String pkg = pkgs . next ( ) ; if ( pkg . startsWith ( "moa" ) ) { Set < String > classnames = m_Cache . getClassnames ( pkg ) ; result . addAll ( classnames ) ; } } return result ; |
public class SpringDataRestConfiguration { /** * tag : : alternate - type - builder [ ] */
private Type pageableMixin ( RepositoryRestConfiguration restConfiguration ) { } } | return new AlternateTypeBuilder ( ) . fullyQualifiedClassName ( String . format ( "%s.generated.%s" , Pageable . class . getPackage ( ) . getName ( ) , Pageable . class . getSimpleName ( ) ) ) . withProperties ( Stream . of ( property ( Integer . class , restConfiguration . getPageParamName ( ) ) , property ( Integer . class , restConfiguration . getLimitParamName ( ) ) , property ( String . class , restConfiguration . getSortParamName ( ) ) ) . collect ( toList ( ) ) ) . build ( ) ; |
public class ApiClient { /** * Formats the specified collection query parameters to a list of { @ code Pair } objects .
* Note that the values of each of the returned Pair objects are percent - encoded .
* @ param collectionFormat The collection format of the parameter .
* @ param name The name of the parameter .
* @ param value The value of the parameter .
* @ return A list of { @ code Pair } objects . */
public List < Pair > parameterToPairs ( String collectionFormat , String name , Collection value ) { } } | List < Pair > params = new ArrayList < Pair > ( ) ; // preconditions
if ( name == null || name . isEmpty ( ) || value == null || value . isEmpty ( ) ) { return params ; } // create the params based on the collection format
if ( "multi" . equals ( collectionFormat ) ) { for ( Object item : value ) { params . add ( new Pair ( name , escapeString ( parameterToString ( item ) ) ) ) ; } return params ; } // collectionFormat is assumed to be " csv " by default
String delimiter = "," ; // escape all delimiters except commas , which are URI reserved
// characters
if ( "ssv" . equals ( collectionFormat ) ) { delimiter = escapeString ( " " ) ; } else if ( "tsv" . equals ( collectionFormat ) ) { delimiter = escapeString ( "\t" ) ; } else if ( "pipes" . equals ( collectionFormat ) ) { delimiter = escapeString ( "|" ) ; } StringBuilder sb = new StringBuilder ( ) ; for ( Object item : value ) { sb . append ( delimiter ) ; sb . append ( escapeString ( parameterToString ( item ) ) ) ; } params . add ( new Pair ( name , sb . substring ( delimiter . length ( ) ) ) ) ; return params ; |
public class ContainerProperties { /** * A list of data volumes used in a job .
* @ param volumes
* A list of data volumes used in a job . */
public void setVolumes ( java . util . Collection < Volume > volumes ) { } } | if ( volumes == null ) { this . volumes = null ; return ; } this . volumes = new java . util . ArrayList < Volume > ( volumes ) ; |
public class ELKIServiceRegistry { /** * Register a class with the registry .
* @ param parent Parent class
* @ param cname Class name */
protected static void register ( Class < ? > parent , String cname ) { } } | Entry e = data . get ( parent ) ; if ( e == null ) { data . put ( parent , e = new Entry ( ) ) ; } e . addName ( cname ) ; |
public class CompilerThread { /** * Count the number of active sub tasks . */
public synchronized int numActiveSubTasks ( ) { } } | int c = 0 ; for ( Future < ? > f : subTasks ) { if ( ! f . isDone ( ) && ! f . isCancelled ( ) ) { c ++ ; } } return c ; |
public class LazyRemovalCache { /** * Adds all value which have not been marked as removable to the returned set
* @ return */
public Set < V > nonRemovedValues ( ) { } } | return map . values ( ) . stream ( ) . filter ( entry -> ! entry . removable ) . map ( entry -> entry . val ) . collect ( Collectors . toSet ( ) ) ; |
public class CachedResponseImpl { /** * Gets the first header value set for the given header in this response .
* If the header is not set , { @ code null } is returned .
* Useful for headers that don ' t have multiple values , like
* { @ code " Content - Type " } or { @ code " Content - Length " } .
* @ param pHeaderName the header name
* @ return a { @ code String } , or { @ code null } if there is no
* such header in this response . */
public String getHeaderValue ( final String pHeaderName ) { } } | List < String > values = headers . get ( pHeaderName ) ; return ( values != null && values . size ( ) > 0 ) ? values . get ( 0 ) : null ; |
public class Artifact { /** * Returns the first target that matched the given path based on the defined patterns . Returns null if the argument is
* < code > null < / code > .
* @ param targetPath
* Path to find .
* @ return Target . */
@ Nullable public final Target findTargetFor ( @ Nullable final String targetPath ) { } } | if ( targets == null ) { return null ; } if ( targetPath == null ) { return null ; } for ( final Target target : targets ) { if ( target . matches ( targetPath ) ) { return target ; } } return null ; |
public class IntegerDBIDArrayQuickSort { /** * Sort the array using the given comparator .
* @ param data Data to sort
* @ param start First index
* @ param end Last index ( exclusive )
* @ param comp Comparator */
public static void sort ( int [ ] data , int start , int end , Comparator < ? super DBIDRef > comp ) { } } | quickSort ( data , start , end - 1 , comp , new IntegerDBIDVar ( ) , new IntegerDBIDVar ( ) , new IntegerDBIDVar ( ) ) ; |
public class IonStructLite { /** * Validates the child and checks locks .
* @ param fieldName may be null
* @ param child must be validated and have field name or id set */
private void _add ( String fieldName , IonValueLite child ) { } } | hasNullFieldName |= fieldName == null ; int size = get_child_count ( ) ; // add this to the Container child collection
add ( size , child ) ; // if we have a hash map we need to update it now
if ( _field_map != null ) { add_field ( fieldName , child . _elementid ( ) ) ; } |
public class XmlEntities { /** * Adds entities to this entity .
* @ param entityArray
* array of entities to be added */
public void addEntities ( String [ ] [ ] entityArray ) { } } | for ( String [ ] anEntityArray : entityArray ) { addEntity ( anEntityArray [ 0 ] , Integer . parseInt ( anEntityArray [ 1 ] ) ) ; } |
public class AbstractModuleIndexWriter { /** * Adds the frame or non - frame module index to the documentation tree .
* @ param body the document tree to which the index will be added */
protected void addIndex ( Content body ) { } } | addIndexContents ( configuration . modules , "doclet.Module_Summary" , configuration . getText ( "doclet.Member_Table_Summary" , configuration . getText ( "doclet.Module_Summary" ) , configuration . getText ( "doclet.modules" ) ) , body ) ; |
public class CoffeeScriptGenerator { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . generator .
* PostInitializationAwareResourceGenerator # afterPropertiesSet ( ) */
@ Override public void afterPropertiesSet ( ) { } } | super . afterPropertiesSet ( ) ; StopWatch stopWatch = new StopWatch ( "initializing JS engine for Coffeescript" ) ; stopWatch . start ( ) ; // Load JavaScript Script Engine
String script = config . getProperty ( JAWR_JS_GENERATOR_COFFEE_SCRIPT_LOCATION , DEFAULT_COFFEE_SCRIPT_JS_LOCATION ) ; jsEngine = new JavascriptEngine ( config . getJavascriptEngineName ( JAWR_JS_GENERATOR_COFFEE_SCRIPT_JS_ENGINE ) ) ; try ( InputStream inputStream = getResourceInputStream ( script ) ) { jsEngine . evaluate ( "coffee-script.js" , inputStream ) ; } catch ( IOException e ) { throw new BundlingProcessException ( e ) ; } String strOptions = config . getProperty ( JAWR_JS_GENERATOR_COFFEE_SCRIPT_OPTIONS , COFFEE_SCRIPT_DEFAULT_OPTIONS ) ; options = jsEngine . execEval ( strOptions ) ; coffeeScript = jsEngine . execEval ( "CoffeeScript" ) ; stopWatch . stop ( ) ; if ( PERF_LOGGER . isDebugEnabled ( ) ) { PERF_LOGGER . debug ( stopWatch . shortSummary ( ) ) ; } |
public class Parser { /** * < p > Parses the given resource and evaluates it with the given evaluator . < / p >
* @ param < O > the return type of the evaluator
* @ param resource the resource to evaluate
* @ param evaluator the evaluator to use for transforming expressions
* @ return A list of objects relating to the transformation of expressions by the given evaluator .
* @ throws ParseException Something went wrong parsing */
public < O > List < O > evaluateAll ( @ NonNull Resource resource , @ NonNull Evaluator < ? extends O > evaluator ) throws ParseException { } } | ExpressionIterator iterator = parse ( resource ) ; List < O > evaluationResults = new ArrayList < > ( ) ; while ( iterator . hasNext ( ) ) { try { evaluationResults . add ( evaluator . eval ( iterator . next ( ) ) ) ; } catch ( Exception e ) { throw new ParseException ( e ) ; } } return evaluationResults ; |
public class PrototypeObjectFactory { /** * Initializes the object with the specified array of arguments providing the object implements the
* ParameterizedInitable interface , or calls the no argument init method if the object implements the Initable
* interface , and finally , does nothing if the object is not Initable .
* @ param < T > the Class type of the created object .
* @ param object the object / bean to initialize .
* @ param args the array of Object arguments used to initialize the object / bean .
* @ return the object after initialization .
* @ see # initialize ( Object , java . util . Map )
* @ see org . cp . elements . lang . Initable # init ( )
* @ see org . cp . elements . lang . ParameterizedInitable # init ( Object . . . ) */
protected < T > T initialize ( final T object , final Object ... args ) { } } | if ( object instanceof ParameterizedInitable ) { ( ( ParameterizedInitable ) object ) . init ( args ) ; } else if ( object instanceof Initable ) { ( ( Initable ) object ) . init ( ) ; } return object ; |
public class TaskTracker { /** * Obtain the maximum disk space ( free or total ) in bytes for each volume
* @ param free If true returns free space , else returns capacity
* @ return disk space in bytes
* @ throws IOException */
long getDiskSpace ( boolean free ) throws IOException { } } | long biggestSeenSoFar = 0 ; String [ ] localDirs = getLocalDirsFromConf ( fConf ) ; for ( int i = 0 ; i < localDirs . length ; i ++ ) { DF df = null ; if ( localDirsDf . containsKey ( localDirs [ i ] ) ) { df = localDirsDf . get ( localDirs [ i ] ) ; } else { df = new DF ( new File ( localDirs [ i ] ) , fConf ) ; localDirsDf . put ( localDirs [ i ] , df ) ; } long onThisVol = free ? df . getAvailable ( ) : df . getCapacity ( ) ; if ( onThisVol > biggestSeenSoFar ) { biggestSeenSoFar = onThisVol ; } } // Should ultimately hold back the space we expect running tasks to use but
// that estimate isn ' t currently being passed down to the TaskTrackers
return biggestSeenSoFar ; |
public class MaintenanceWindowTaskParameterValueExpressionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( MaintenanceWindowTaskParameterValueExpression maintenanceWindowTaskParameterValueExpression , ProtocolMarshaller protocolMarshaller ) { } } | if ( maintenanceWindowTaskParameterValueExpression == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( maintenanceWindowTaskParameterValueExpression . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StorageManager { /** * API to a project file into Azkaban Storage
* TODO clean up interface
* @ param project project
* @ param version The new version to be uploaded
* @ param localFile local file
* @ param uploader the user who uploaded */
public void uploadProject ( final Project project , final int version , final File localFile , final User uploader ) { } } | byte [ ] md5 = null ; if ( ! ( this . storage instanceof DatabaseStorage ) ) { md5 = computeHash ( localFile ) ; } final StorageMetadata metadata = new StorageMetadata ( project . getId ( ) , version , uploader . getUserId ( ) , md5 ) ; log . info ( String . format ( "Adding archive to storage. Meta:%s File: %s[%d bytes]" , metadata , localFile . getName ( ) , localFile . length ( ) ) ) ; /* upload to storage */
final String resourceId = this . storage . put ( metadata , localFile ) ; /* Add metadata to db */
// TODO spyne : remove hack . Database storage should go through the same flow
if ( ! ( this . storage instanceof DatabaseStorage ) ) { this . projectLoader . addProjectVersion ( project . getId ( ) , version , localFile , uploader . getUserId ( ) , requireNonNull ( md5 ) , requireNonNull ( resourceId ) ) ; log . info ( String . format ( "Added project metadata to DB. Meta:%s File: %s[%d bytes] URI: %s" , metadata , localFile . getName ( ) , localFile . length ( ) , resourceId ) ) ; } |
public class ErrorHandler { /** * < pre >
* Static file content to be served for this error .
* < / pre >
* < code > string static _ file = 2 ; < / code > */
public com . google . protobuf . ByteString getStaticFileBytes ( ) { } } | java . lang . Object ref = staticFile_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; staticFile_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; } |
public class MonoT { /** * Flat Map the wrapped Mono
* @ param f FlatMap function
* @ return MonoT that applies the flatMap function to the wrapped Mono */
public < B > MonoT < W , B > flatMapT ( final Function < ? super T , MonoT < W , B > > f ) { } } | MonoT < W , B > r = of ( run . map ( future -> Mono . from ( future . flatMap ( a -> { Mono < B > m = f . apply ( a ) . run . stream ( ) . toList ( ) . get ( 0 ) ; return m ; } ) ) ) ) ; return r ; |
public class Dj2JrCrosstabBuilder { /** * MOVED INSIDE ExpressionUtils
* protected JRDesignExpression getExpressionForConditionalStyle ( ConditionalStyle condition , String columExpression ) {
* String fieldsMap = " ( ( " + DJDefaultScriptlet . class . getName ( ) + " ) $ P { REPORT _ SCRIPTLET } ) . getCurrentFields ( ) " ;
* String parametersMap = " ( ( " + DJDefaultScriptlet . class . getName ( ) + " ) $ P { REPORT _ SCRIPTLET } ) . getCurrentParams ( ) " ;
* String variablesMap = " ( ( " + DJDefaultScriptlet . class . getName ( ) + " ) $ P { REPORT _ SCRIPTLET } ) . getCurrentVariables ( ) " ;
* String evalMethodParams = fieldsMap + " , " + variablesMap + " , " + parametersMap + " , " + columExpression ;
* String text = " ( ( " + ConditionStyleExpression . class . getName ( ) + " ) $ P { " + JRParameter . REPORT _ PARAMETERS _ MAP + " } . get ( \ " " + condition . getName ( ) + " \ " ) ) . " + CustomExpression . EVAL _ METHOD _ NAME + " ( " + evalMethodParams + " ) " ;
* JRDesignExpression expression = new JRDesignExpression ( ) ;
* expression . setValueClass ( Boolean . class ) ;
* expression . setText ( text ) ;
* return expression ; */
private Style getRowTotalStyle ( DJCrosstabRow crosstabRow ) { } } | return crosstabRow . getTotalStyle ( ) == null ? this . djcross . getRowTotalStyle ( ) : crosstabRow . getTotalStyle ( ) ; |
public class ParquetReader { /** * Retrieve avro schema from parquet file .
* @ param configuration Hadoop configuration .
* @ param filter Filter for Avro metadata .
* @ return avro schema from parquet file .
* @ throws IOException if the Avro schema couldn ' t be parsed from the parquet file . */
private Schema createAvroSchema ( final Configuration configuration , final MetadataFilter filter ) throws IOException { } } | final ParquetMetadata footer = ParquetFileReader . readFooter ( configuration , parquetFilePath , filter ) ; final AvroSchemaConverter converter = new AvroSchemaConverter ( ) ; final MessageType schema = footer . getFileMetaData ( ) . getSchema ( ) ; return converter . convert ( schema ) ; |
public class AmazonRedshiftClient { /** * Creates an Amazon Redshift parameter group .
* Creating parameter groups is independent of creating clusters . You can associate a cluster with a parameter group
* when you create the cluster . You can also associate an existing cluster with a parameter group after the cluster
* is created by using < a > ModifyCluster < / a > .
* Parameters in the parameter group define specific behavior that applies to the databases you create on the
* cluster . For more information about parameters and parameter groups , go to < a
* href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / working - with - parameter - groups . html " > Amazon Redshift
* Parameter Groups < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > .
* @ param createClusterParameterGroupRequest
* @ return Result of the CreateClusterParameterGroup operation returned by the service .
* @ throws ClusterParameterGroupQuotaExceededException
* The request would result in the user exceeding the allowed number of cluster parameter groups . For
* information about increasing your quota , go to < a
* href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / amazon - redshift - limits . html " > Limits in Amazon
* Redshift < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > .
* @ throws ClusterParameterGroupAlreadyExistsException
* A cluster parameter group with the same name already exists .
* @ throws TagLimitExceededException
* You have exceeded the number of tags allowed .
* @ throws InvalidTagException
* The tag is invalid .
* @ sample AmazonRedshift . CreateClusterParameterGroup
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / CreateClusterParameterGroup "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ClusterParameterGroup createClusterParameterGroup ( CreateClusterParameterGroupRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateClusterParameterGroup ( request ) ; |
public class CommonConfigUtils { /** * Calls { @ code java . lang . String . trim ( ) } on the provided value .
* @ param original
* @ return { @ code null } if { @ code original } is { @ code null } or empty after the { @ code java . lang . String . trim ( ) } operation .
* Otherwise returns the trimmed result . */
@ Trivial public static String trim ( String original ) { } } | if ( original == null ) { return null ; } String result = original . trim ( ) ; if ( result . isEmpty ( ) ) { return null ; } return result ; |
public class JDBC4ResultSetMetaData { /** * Returns an object that implements the given interface to allow access to non - standard methods , or standard methods not exposed by the proxy . */
public < T > T unwrap ( Class < T > iface ) throws SQLException { } } | try { return iface . cast ( this ) ; } catch ( ClassCastException cce ) { throw SQLError . get ( SQLError . ILLEGAL_ARGUMENT , iface . toString ( ) ) ; } |
public class ReflectionUtils { /** * Handle { @ link InvocationTargetException } by logging it and rethrown it as a { @ link ReflectionException }
* @ param methodName method name
* @ param e exception
* @ return wrapped exception */
private static ReflectionException handleException ( String methodName , InvocationTargetException e ) { } } | LOGGER . error ( "Couldn't invoke method " + methodName , e ) ; return new ReflectionException ( e ) ; |
public class ObjectEnvelopeOrdering { /** * Finds edges based to a specific object reference descriptor and
* adds them to the edge map .
* @ param vertex the object envelope vertex holding the object reference
* @ param rds the object reference descriptor */
private void addObjectReferenceEdges ( Vertex vertex , ObjectReferenceDescriptor rds ) { } } | Object refObject = rds . getPersistentField ( ) . get ( vertex . getEnvelope ( ) . getRealObject ( ) ) ; Class refClass = rds . getItemClass ( ) ; for ( int i = 0 ; i < vertices . length ; i ++ ) { Edge edge = null ; // ObjectEnvelope envelope = vertex . getEnvelope ( ) ;
Vertex refVertex = vertices [ i ] ; ObjectEnvelope refEnvelope = refVertex . getEnvelope ( ) ; if ( refObject == refEnvelope . getRealObject ( ) ) { edge = buildConcrete11Edge ( vertex , refVertex , rds . hasConstraint ( ) ) ; } else if ( refClass . isInstance ( refVertex . getEnvelope ( ) . getRealObject ( ) ) ) { edge = buildPotential11Edge ( vertex , refVertex , rds . hasConstraint ( ) ) ; } if ( edge != null ) { if ( ! edgeList . contains ( edge ) ) { edgeList . add ( edge ) ; } else { edge . increaseWeightTo ( edge . getWeight ( ) ) ; } } } |
public class CatalogUtil { /** * Validate Snmp Configuration .
* @ param snmpType */
private static void setSnmpInfo ( SnmpType snmpType ) { } } | if ( snmpType == null || ! snmpType . isEnabled ( ) ) { return ; } // Validate Snmp Configuration .
if ( snmpType . getTarget ( ) == null || snmpType . getTarget ( ) . trim ( ) . length ( ) == 0 ) { throw new IllegalArgumentException ( "Target must be specified for SNMP configuration." ) ; } if ( snmpType . getAuthkey ( ) != null && snmpType . getAuthkey ( ) . length ( ) < 8 ) { throw new IllegalArgumentException ( "SNMP Authkey must be > 8 characters." ) ; } if ( snmpType . getPrivacykey ( ) != null && snmpType . getPrivacykey ( ) . length ( ) < 8 ) { throw new IllegalArgumentException ( "SNMP Privacy Key must be > 8 characters." ) ; } |
public class GPXTablesFactory { /** * Creat the track table
* @ param connection
* @ param trackTableName
* @ param isH2 set true if it ' s an H2 database
* @ return
* @ throws SQLException */
public static PreparedStatement createTrackTable ( Connection connection , String trackTableName , boolean isH2 ) throws SQLException { } } | try ( Statement stmt = connection . createStatement ( ) ) { StringBuilder sb = new StringBuilder ( "CREATE TABLE " ) ; sb . append ( trackTableName ) . append ( " (" ) ; if ( isH2 ) { sb . append ( "the_geom MULTILINESTRING CHECK ST_SRID(THE_GEOM) = 4326," ) ; } else { sb . append ( "the_geom GEOMETRY(MULTILINESTRING, 4326)," ) ; } sb . append ( " id INT," ) ; sb . append ( GPXTags . NAME . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( GPXTags . CMT . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( "description" ) . append ( " TEXT," ) ; sb . append ( GPXTags . SRC . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( GPXTags . HREF . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( GPXTags . HREFTITLE . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( GPXTags . NUMBER . toLowerCase ( ) ) . append ( " INT," ) ; sb . append ( GPXTags . TYPE . toLowerCase ( ) ) . append ( " TEXT," ) ; sb . append ( GPXTags . EXTENSIONS . toLowerCase ( ) ) . append ( " TEXT);" ) ; stmt . execute ( sb . toString ( ) ) ; } // We return the preparedstatement of the route table
StringBuilder insert = new StringBuilder ( "INSERT INTO " ) . append ( trackTableName ) . append ( " VALUES ( ?" ) ; for ( int i = 1 ; i < GpxMetadata . RTEFIELDCOUNT ; i ++ ) { insert . append ( ",?" ) ; } insert . append ( ");" ) ; return connection . prepareStatement ( insert . toString ( ) ) ; |
public class P2sVpnServerConfigurationsInner { /** * Retrieves all P2SVpnServerConfigurations for a particular VirtualWan .
* @ param resourceGroupName The resource group name of the VirtualWan .
* @ param virtualWanName The name of the VirtualWan .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; P2SVpnServerConfigurationInner & gt ; object */
public Observable < Page < P2SVpnServerConfigurationInner > > listByVirtualWanAsync ( final String resourceGroupName , final String virtualWanName ) { } } | return listByVirtualWanWithServiceResponseAsync ( resourceGroupName , virtualWanName ) . map ( new Func1 < ServiceResponse < Page < P2SVpnServerConfigurationInner > > , Page < P2SVpnServerConfigurationInner > > ( ) { @ Override public Page < P2SVpnServerConfigurationInner > call ( ServiceResponse < Page < P2SVpnServerConfigurationInner > > response ) { return response . body ( ) ; } } ) ; |
public class InternalXtextParser { /** * InternalXtext . g : 1445:1 : entryRuleEnumLiteralDeclaration : ruleEnumLiteralDeclaration EOF ; */
public final void entryRuleEnumLiteralDeclaration ( ) throws RecognitionException { } } | try { // InternalXtext . g : 1446:1 : ( ruleEnumLiteralDeclaration EOF )
// InternalXtext . g : 1447:1 : ruleEnumLiteralDeclaration EOF
{ before ( grammarAccess . getEnumLiteralDeclarationRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleEnumLiteralDeclaration ( ) ; state . _fsp -- ; after ( grammarAccess . getEnumLiteralDeclarationRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ; |
public class JvmMemberImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case TypesPackage . JVM_MEMBER__DECLARING_TYPE : return getDeclaringType ( ) != null ; case TypesPackage . JVM_MEMBER__VISIBILITY : return visibility != VISIBILITY_EDEFAULT ; case TypesPackage . JVM_MEMBER__SIMPLE_NAME : return SIMPLE_NAME_EDEFAULT == null ? simpleName != null : ! SIMPLE_NAME_EDEFAULT . equals ( simpleName ) ; case TypesPackage . JVM_MEMBER__IDENTIFIER : return IDENTIFIER_EDEFAULT == null ? identifier != null : ! IDENTIFIER_EDEFAULT . equals ( identifier ) ; case TypesPackage . JVM_MEMBER__DEPRECATED : return isSetDeprecated ( ) ; } return super . eIsSet ( featureID ) ; |
public class HttpClientFactory { /** * The method to use when all the dependencies are resolved .
* It means iPojo guarantees that both the manager and the HTTP
* service are not null .
* @ throws Exception */
public void start ( ) throws Exception { } } | // Is the DM part of the distribution ?
boolean found = false ; for ( Bundle b : this . bundleContext . getBundles ( ) ) { if ( "net.roboconf.dm" . equals ( b . getSymbolicName ( ) ) ) { found = true ; break ; } } // If we are on an agent , we have nothing to do .
// Otherwise , we must register a servlet .
if ( found ) { this . logger . fine ( "iPojo registers a servlet for HTTP messaging." ) ; Hashtable < String , String > initParams = new Hashtable < String , String > ( ) ; initParams . put ( "servlet-name" , "Roboconf DM (HTTP messaging)" ) ; DmWebSocketServlet messagingServlet = new DmWebSocketServlet ( this ) ; this . httpService . registerServlet ( HttpConstants . DM_SOCKET_PATH , messagingServlet , initParams , null ) ; } else { this . logger . warning ( "Roboconf's DM bundle was not found. No servlet will be registered." ) ; } |
public class SmartsFragmentExtractor { /** * Set the mode of SMARTS substructure selection
* @ param mode the mode */
public void setMode ( int mode ) { } } | // check arg
switch ( mode ) { case MODE_EXACT : case MODE_JCOMPOUNDMAPPER : break ; default : throw new IllegalArgumentException ( "Invalid mode specified!" ) ; } this . mode = mode ; // re - gen atom expressions
int numAtoms = mol . getAtomCount ( ) ; for ( int atomIdx = 0 ; atomIdx < numAtoms ; atomIdx ++ ) this . aexpr [ atomIdx ] = encodeAtomExpr ( atomIdx ) ; |
public class LocalTransformExecutor { /** * Execute the specified TransformProcess with the given < i > sequence < / i > input data < br >
* Note : this method can only be used if the TransformProcess starts with sequence data , but returns < i > non - sequential < / i >
* data ( after reducing or converting sequential data to individual examples )
* @ param inputSequence Input sequence data to process
* @ param transformProcess TransformProcess to execute
* @ return Processed ( non - sequential ) data */
public static List < List < Writable > > executeSequenceToSeparate ( List < List < List < Writable > > > inputSequence , TransformProcess transformProcess ) { } } | if ( transformProcess . getFinalSchema ( ) instanceof SequenceSchema ) { throw new IllegalStateException ( "Cannot return sequence data with this method" ) ; } return execute ( null , inputSequence , transformProcess ) . getFirst ( ) ; |
public class StreamEx { /** * Returns a new { @ code StreamEx } which elements are results of reduction of
* all possible n - tuples composed from the elements of supplied collections .
* The whole stream forms an n - fold Cartesian product of input collection
* with itself or n - ary Cartesian power of the input collection .
* The reduction is performed using the provided identity object and the
* accumulator function which is capable to accumulate new element . The
* accumulator function must not modify the previous accumulated value , but
* must produce new value instead . That ' s because partially accumulated
* values are reused for subsequent elements .
* This method is equivalent to the following :
* < pre >
* { @ code StreamEx . cartesianPower ( n , source ) . map ( list - > StreamEx . of ( list ) . foldLeft ( identity , accumulator ) ) }
* < / pre >
* However it may perform much faster as partial reduction results are
* reused .
* The supplied collection is assumed to be unchanged during the operation .
* @ param < T > the type of the input elements
* @ param < U > the type of the elements of the resulting stream
* @ param n the number of elements to incorporate into single element of the
* resulting stream .
* @ param source the input collection of collections which is used to
* generate the Cartesian power .
* @ param identity the identity value
* @ param accumulator a < a
* href = " package - summary . html # NonInterference " > non - interfering < / a > ,
* < a href = " package - summary . html # Statelessness " > stateless < / a >
* function for incorporating an additional element from source
* collection into a stream element .
* @ return the new stream .
* @ see # cartesianProduct ( Collection , Object , BiFunction )
* @ see # cartesianPower ( int , Collection )
* @ since 0.4.0 */
public static < T , U > StreamEx < U > cartesianPower ( int n , Collection < T > source , U identity , BiFunction < U , ? super T , U > accumulator ) { } } | if ( n == 0 ) return of ( identity ) ; return of ( new CrossSpliterator . Reducing < > ( Collections . nCopies ( n , source ) , identity , accumulator ) ) ; |
public class AmazonCloudFrontClient { /** * Create a new distribution with tags .
* @ param createDistributionWithTagsRequest
* The request to create a new distribution with tags .
* @ return Result of the CreateDistributionWithTags operation returned by the service .
* @ throws CNAMEAlreadyExistsException
* @ throws DistributionAlreadyExistsException
* The caller reference you attempted to create the distribution with is associated with another
* distribution .
* @ throws InvalidOriginException
* The Amazon S3 origin server specified does not refer to a valid Amazon S3 bucket .
* @ throws InvalidOriginAccessIdentityException
* The origin access identity is not valid or doesn ' t exist .
* @ throws AccessDeniedException
* Access denied .
* @ throws TooManyTrustedSignersException
* Your request contains more trusted signers than are allowed per distribution .
* @ throws TrustedSignerDoesNotExistException
* One or more of your trusted signers don ' t exist .
* @ throws InvalidViewerCertificateException
* @ throws InvalidMinimumProtocolVersionException
* @ throws MissingBodyException
* This operation requires a body . Ensure that the body is present and the < code > Content - Type < / code > header
* is set .
* @ throws TooManyDistributionCNAMEsException
* Your request contains more CNAMEs than are allowed per distribution .
* @ throws TooManyDistributionsException
* Processing your request would cause you to exceed the maximum number of distributions allowed .
* @ throws InvalidDefaultRootObjectException
* The default root object file name is too big or contains an invalid character .
* @ throws InvalidRelativePathException
* The relative path is too big , is not URL - encoded , or does not begin with a slash ( / ) .
* @ throws InvalidErrorCodeException
* @ throws InvalidResponseCodeException
* @ throws InvalidArgumentException
* The argument is invalid .
* @ throws InvalidRequiredProtocolException
* This operation requires the HTTPS protocol . Ensure that you specify the HTTPS protocol in your request ,
* or omit the < code > RequiredProtocols < / code > element from your distribution configuration .
* @ throws NoSuchOriginException
* No origin exists with the specified < code > Origin Id < / code > .
* @ throws TooManyOriginsException
* You cannot create more origins for the distribution .
* @ throws TooManyOriginGroupsPerDistributionException
* Processing your request would cause you to exceed the maximum number of origin groups allowed .
* @ throws TooManyCacheBehaviorsException
* You cannot create more cache behaviors for the distribution .
* @ throws TooManyCookieNamesInWhiteListException
* Your request contains more cookie names in the whitelist than are allowed per cache behavior .
* @ throws InvalidForwardCookiesException
* Your request contains forward cookies option which doesn ' t match with the expectation for the
* < code > whitelisted < / code > list of cookie names . Either list of cookie names has been specified when not
* allowed or list of cookie names is missing when expected .
* @ throws TooManyHeadersInForwardedValuesException
* @ throws InvalidHeadersForS3OriginException
* @ throws InconsistentQuantitiesException
* The value of < code > Quantity < / code > and the size of < code > Items < / code > don ' t match .
* @ throws TooManyCertificatesException
* You cannot create anymore custom SSL / TLS certificates .
* @ throws InvalidLocationCodeException
* @ throws InvalidGeoRestrictionParameterException
* @ throws InvalidProtocolSettingsException
* You cannot specify SSLv3 as the minimum protocol version if you only want to support only clients that
* support Server Name Indication ( SNI ) .
* @ throws InvalidTTLOrderException
* @ throws InvalidWebACLIdException
* @ throws TooManyOriginCustomHeadersException
* @ throws InvalidTaggingException
* @ throws TooManyQueryStringParametersException
* @ throws InvalidQueryStringParametersException
* @ throws TooManyDistributionsWithLambdaAssociationsException
* Processing your request would cause the maximum number of distributions with Lambda function associations
* per owner to be exceeded .
* @ throws TooManyLambdaFunctionAssociationsException
* Your request contains more Lambda function associations than are allowed per distribution .
* @ throws InvalidLambdaFunctionAssociationException
* The specified Lambda function association is invalid .
* @ throws InvalidOriginReadTimeoutException
* @ throws InvalidOriginKeepaliveTimeoutException
* @ throws NoSuchFieldLevelEncryptionConfigException
* The specified configuration for field - level encryption doesn ' t exist .
* @ throws IllegalFieldLevelEncryptionConfigAssociationWithCacheBehaviorException
* The specified configuration for field - level encryption can ' t be associated with the specified cache
* behavior .
* @ throws TooManyDistributionsAssociatedToFieldLevelEncryptionConfigException
* The maximum number of distributions have been associated with the specified configuration for field - level
* encryption .
* @ sample AmazonCloudFront . CreateDistributionWithTags
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / CreateDistributionWithTags "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CreateDistributionWithTagsResult createDistributionWithTags ( CreateDistributionWithTagsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateDistributionWithTags ( request ) ; |
public class SiteResourceLoader { /** * Loads the specific resource , from the site - specific jar file if one
* exists and contains the specified resource . If no resource exists
* with that path , null will be returned .
* @ param req the http request for which we are loading a resource
* ( this will be used to determine for which site the resource will be
* loaded ) .
* @ param path the path to the desired resource .
* @ return an input stream via which the resource can be read or null
* if no resource could be located with the specified path .
* @ exception IOException thrown if an I / O error occurs while loading
* a resource . */
public InputStream getResourceAsStream ( HttpServletRequest req , String path ) throws IOException { } } | return getResourceAsStream ( _siteIdent . identifySite ( req ) , path ) ; |
public class DataIO { /** * Check if large files can be mapped into memory .
* For example 32bit JVM can only address 2GB and large files can not be mapped ,
* so for 32bit JVM this function returns false . */
public static boolean JVMSupportsLargeMappedFiles ( ) { } } | String arch = System . getProperty ( "os.arch" ) ; if ( arch == null || ! arch . contains ( "64" ) ) { return false ; } if ( isWindows ( ) ) return false ; // TODO better check for 32bit JVM
return true ; |
public class LoginHandler { /** * The groups of the given person are updated with the information from the
* JAAS login context .
* TODO : no real check or update is done
* @ param _ login JAAS login context
* @ param _ person person for which the groups must be updated
* @ throws EFapsException if a method of the principals inside the JAAS
* login contexts could not be executed or the
* groups for the given person could not be set . */
protected void updateCompanies ( final LoginContext _login , final Person _person ) throws EFapsException { } } | if ( ! JAASSystem . getAllJAASSystems ( ) . isEmpty ( ) ) { _person . setCompanies ( JAASSystem . getAllJAASSystems ( ) . iterator ( ) . next ( ) , _person . getCompaniesFromDB ( null ) ) ; } |
public class JSON { /** * 将对象转换成json字符串
* @ param obj
* @ return */
public static String toJson ( Object obj ) { } } | try { return objectMapper . writeValueAsString ( obj ) ; } catch ( JsonProcessingException e ) { throw new RuntimeException ( e ) ; } |
public class CmsOuTree { /** * Load and expand given item . < p >
* @ param itemId to be expanded
* @ param type of item */
private void loadAndExpand ( Object itemId , I_CmsOuTreeType type ) { } } | if ( type . isOrgUnit ( ) ) { addChildrenForOUNode ( ( CmsOrganizationalUnit ) itemId ) ; } if ( type . isGroup ( ) ) { addChildrenForGroupsNode ( type , ( String ) itemId ) ; } if ( type . isRole ( ) ) { addChildrenForRolesNode ( ( String ) itemId ) ; } expandItem ( itemId ) ; |
public class InstantWidgetsFragment { /** * Implementation of the OnItemPickerEventListener interface */
@ Override public void onInstantChanged ( FloatingLabelInstantPicker source , Instant instant ) { } } | Toast . makeText ( getActivity ( ) , source . getInstantPrinter ( ) . print ( instant ) , Toast . LENGTH_SHORT ) . show ( ) ; |
public class JschUtil { /** * 执行Shell命令
* @ param session Session会话
* @ param cmd 命令
* @ param charset 发送和读取内容的编码
* @ return { @ link ChannelExec }
* @ since 4.0.3 */
public static String exec ( Session session , String cmd , Charset charset ) { } } | return exec ( session , cmd , charset , System . err ) ; |
public class ParallelIndexSupervisorTask { /** * Allocate a new { @ link SegmentIdWithShardSpec } for a request from { @ link ParallelIndexSubTask } .
* The returned segmentIdentifiers have different { @ code partitionNum } ( thereby different { @ link NumberedShardSpec } )
* per bucket interval . */
@ POST @ Path ( "/segment/allocate" ) @ Produces ( SmileMediaTypes . APPLICATION_JACKSON_SMILE ) @ Consumes ( SmileMediaTypes . APPLICATION_JACKSON_SMILE ) public Response allocateSegment ( DateTime timestamp , @ Context final HttpServletRequest req ) { } } | ChatHandlers . authorizationCheck ( req , Action . READ , getDataSource ( ) , authorizerMapper ) ; if ( toolbox == null ) { return Response . status ( Response . Status . SERVICE_UNAVAILABLE ) . entity ( "task is not running yet" ) . build ( ) ; } try { final SegmentIdWithShardSpec segmentIdentifier = allocateNewSegment ( timestamp ) ; return Response . ok ( toolbox . getObjectMapper ( ) . writeValueAsBytes ( segmentIdentifier ) ) . build ( ) ; } catch ( IOException | IllegalStateException e ) { return Response . serverError ( ) . entity ( Throwables . getStackTraceAsString ( e ) ) . build ( ) ; } catch ( IllegalArgumentException e ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( Throwables . getStackTraceAsString ( e ) ) . build ( ) ; } |
public class JdbcRepository { /** * getConnection . default using current JdbcTransaction ' s connection , if null get a new one .
* @ return { @ link Connection } */
private Connection getConnection ( ) { } } | final JdbcTransaction jdbcTransaction = TX . get ( ) ; if ( null != jdbcTransaction && jdbcTransaction . isActive ( ) ) { return jdbcTransaction . getConnection ( ) ; } Connection ret = CONN . get ( ) ; try { if ( null != ret && ! ret . isClosed ( ) ) { return ret ; } ret = Connections . getConnection ( ) ; } catch ( final SQLException e ) { LOGGER . log ( Level . ERROR , "Gets a connection failed" , e ) ; } CONN . set ( ret ) ; return ret ; |
public class HttpUtils { /** * Step 1 . prepare PUT request
* @ param path target url
* @ param headers request headers
* @ param parameters to PUT
* @ param timeOutInSeconds time out in seconds or null for default time out
* @ return put response */
public static HttpRequestBase put ( String path , Map < String , String > headers , Map < String , String > parameters , Integer timeOutInSeconds ) { } } | return put ( path , headers , parameters , null , timeOutInSeconds ) ; |
public class LeaderLatch { /** * Add this instance to the leadership election and attempt to acquire leadership .
* @ throws Exception errors */
public void start ( ) throws Exception { } } | Preconditions . checkState ( state . compareAndSet ( State . LATENT , State . STARTED ) , "Cannot be started more than once" ) ; startTask . set ( AfterConnectionEstablished . execute ( client , new Runnable ( ) { @ Override public void run ( ) { try { internalStart ( ) ; } finally { startTask . set ( null ) ; } } } ) ) ; |
public class FpUtils { /** * Returns unbiased exponent of a { @ code float } ; for
* subnormal values , the number is treated as if it were
* normalized . That is for all finite , non - zero , positive numbers
* < i > x < / i > , < code > scalb ( < i > x < / i > , - ilogb ( < i > x < / i > ) ) < / code > is
* always in the range [ 1 , 2 ) .
* Special cases :
* < ul >
* < li > If the argument is NaN , then the result is 2 < sup > 30 < / sup > .
* < li > If the argument is infinite , then the result is 2 < sup > 28 < / sup > .
* < li > If the argument is zero , then the result is - ( 2 < sup > 28 < / sup > ) .
* < / ul >
* @ param f floating - point number whose exponent is to be extracted
* @ return unbiased exponent of the argument .
* @ author Joseph D . Darcy */
public static int ilogb ( float f ) { } } | int exponent = getExponent ( f ) ; switch ( exponent ) { case FloatConsts . MAX_EXPONENT + 1 : // NaN or infinity
if ( isNaN ( f ) ) return ( 1 << 30 ) ; // 2 ^ 30
else // infinite value
return ( 1 << 28 ) ; // 2 ^ 28
case FloatConsts . MIN_EXPONENT - 1 : // zero or subnormal
if ( f == 0.0f ) { return - ( 1 << 28 ) ; // - ( 2 ^ 28)
} else { int transducer = Float . floatToRawIntBits ( f ) ; /* * To avoid causing slow arithmetic on subnormals ,
* the scaling to determine when f ' s significand
* is normalized is done in integer arithmetic .
* ( there must be at least one " 1 " bit in the
* significand since zero has been screened out . */
// isolate significand bits
transducer &= FloatConsts . SIGNIF_BIT_MASK ; assert ( transducer != 0 ) ; // This loop is simple and functional . We might be
// able to do something more clever that was faster ;
// e . g . number of leading zero detection on
// ( transducer < < ( # exponent and sign bits ) .
while ( transducer < ( 1 << ( FloatConsts . SIGNIFICAND_WIDTH - 1 ) ) ) { transducer *= 2 ; exponent -- ; } exponent ++ ; assert ( exponent >= FloatConsts . MIN_EXPONENT - ( FloatConsts . SIGNIFICAND_WIDTH - 1 ) && exponent < FloatConsts . MIN_EXPONENT ) ; return exponent ; } default : assert ( exponent >= FloatConsts . MIN_EXPONENT && exponent <= FloatConsts . MAX_EXPONENT ) ; return exponent ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.