signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TSSGSSUPMechConfig { /** * { @ inheritDoc } */ @ FFDCIgnore ( { } }
UnsupportedEncodingException . class } ) @ Override public boolean isTrusted ( TrustedIDEvaluator trustedIDEvaluator , EstablishContext msg , Codec codec ) { // Get user and password from EstablishContext message and validate trust if ( msg != null && msg . client_authentication_token != null && msg . client_authentication_token . length > 0 ) { InitialContextToken token = new InitialContextToken ( ) ; if ( Util . decodeGSSUPToken ( codec , msg . client_authentication_token , token ) ) { try { String user = Util . extractUserNameFromScopedName ( token . username ) ; String password = new String ( token . password , "UTF8" ) ; return trustedIDEvaluator . isTrusted ( user , password ) ; } catch ( UnsupportedEncodingException e ) { // TODO : Determine if a message is needed } } } return false ;
public class GoogleHadoopOutputStream { /** * Writes the specified byte to this output stream . */ @ Override public void write ( int b ) throws IOException { } }
long startTime = System . nanoTime ( ) ; out . write ( b ) ; statistics . incrementBytesWritten ( 1 ) ; long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . WRITE1 ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . WRITE1_TIME , duration ) ;
public class TypesLoaderVisitor { /** * preliminary " this " to allow depth first inheritance tree scope loading */ private void addPreliminaryThis ( Scope scope , Symbol symbol , TypeDeclaration type ) { } }
Symbol < TypeDeclaration > thisSymbol = new Symbol < TypeDeclaration > ( "this" , symbol . getType ( ) , type , ReferenceType . VARIABLE ) ; thisSymbol . setInnerScope ( scope ) ; scope . addSymbol ( thisSymbol ) ;
public class LocalTransactionContext { /** * Callableステートメント初期化 * @ param sqlContext SQLコンテキスト * @ return CallableStatement * @ throws SQLException SQL例外 */ CallableStatement getCallableStatement ( final SqlContext sqlContext ) throws SQLException { } }
Connection conn = null ; if ( sqlContext . getDbAlias ( ) != null ) { conn = getConnection ( sqlContext . getDbAlias ( ) ) ; } else { conn = getConnection ( ) ; } if ( conn == null ) { throw new IllegalArgumentException ( sqlContext . getDbAlias ( ) ) ; } CallableStatement stmt = sqlFilterManager . doCallableStatement ( sqlContext , conn . prepareCall ( sqlContext . getExecutableSql ( ) , sqlContext . getResultSetType ( ) , sqlContext . getResultSetConcurrency ( ) ) ) ; return stmt ;
public class AdExclusionRule { /** * Sets the type value for this AdExclusionRule . * @ param type * The derived type of this rule : whether it is associated with * labels , unified entities , * or competitive groups . Because it is derived , it * is also read - only , so changes made to this * field will not be persisted . */ public void setType ( com . google . api . ads . admanager . axis . v201902 . AdExclusionRuleType type ) { } }
this . type = type ;
public class OperaLauncherRunner { /** * { @ inheritDoc } * @ throws OperaRunnerException if launcher is shut down or not running */ public void stopOpera ( ) throws OperaRunnerException { } }
assertLauncherAlive ( ) ; if ( ! isOperaRunning ( ) ) { return ; } try { LauncherStopRequest . Builder request = LauncherStopRequest . newBuilder ( ) ; ResponseEncapsulation res = protocol . sendRequest ( MessageType . MSG_STOP , request . build ( ) . toByteArray ( ) ) ; if ( handleStatusMessage ( res . getResponse ( ) ) == StatusType . RUNNING ) { throw new IOException ( "launcher unable to stop binary" ) ; } } catch ( IOException e ) { throw new OperaRunnerException ( "Could not stop Opera: " + e . getMessage ( ) ) ; }
public class AmazonGlacierClient { /** * This operation completes the vault locking process by transitioning the vault lock from the * < code > InProgress < / code > state to the < code > Locked < / code > state , which causes the vault lock policy to become * unchangeable . A vault lock is put into the < code > InProgress < / code > state by calling < a > InitiateVaultLock < / a > . You * can obtain the state of the vault lock by calling < a > GetVaultLock < / a > . For more information about the vault * locking process , < a href = " http : / / docs . aws . amazon . com / amazonglacier / latest / dev / vault - lock . html " > Amazon Glacier * Vault Lock < / a > . * This operation is idempotent . This request is always successful if the vault lock is in the < code > Locked < / code > * state and the provided lock ID matches the lock ID originally used to lock the vault . * If an invalid lock ID is passed in the request when the vault lock is in the < code > Locked < / code > state , the * operation returns an < code > AccessDeniedException < / code > error . If an invalid lock ID is passed in the request * when the vault lock is in the < code > InProgress < / code > state , the operation throws an * < code > InvalidParameter < / code > error . * @ param completeVaultLockRequest * The input values for < code > CompleteVaultLock < / code > . * @ return Result of the CompleteVaultLock operation returned by the service . * @ throws ResourceNotFoundException * Returned if the specified resource ( such as a vault , upload ID , or job ID ) doesn ' t exist . * @ throws InvalidParameterValueException * Returned if a parameter of the request is incorrectly specified . * @ throws MissingParameterValueException * Returned if a required header or parameter is missing from the request . * @ throws ServiceUnavailableException * Returned if the service cannot complete the request . * @ sample AmazonGlacier . CompleteVaultLock */ @ Override public CompleteVaultLockResult completeVaultLock ( CompleteVaultLockRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCompleteVaultLock ( request ) ;
public class QueryLimitOverride { /** * Iterates over the list of overrides and return the first that matches or * the default if no match is found . * NOTE : The set of expressions is not sorted so if more than one regex * matches the string , the result is indeterministic . * If the metric is null or empty , the default limit is returned . * @ param metric The string to match * @ return The matched or default limit . */ public synchronized long getDataPointLimit ( final String metric ) { } }
if ( metric == null || metric . isEmpty ( ) ) { return default_data_points_limit ; } for ( final QueryLimitOverrideItem item : overrides . values ( ) ) { if ( item . matches ( metric ) ) { return item . getDataPointsLimit ( ) ; } } return default_data_points_limit ;
public class ApiOvhSms { /** * Get this object properties * REST : GET / sms / { serviceName } / phonebooks / { bookKey } * @ param serviceName [ required ] The internal name of your SMS offer * @ param bookKey [ required ] Identifier of the phonebook */ public OvhPhonebook serviceName_phonebooks_bookKey_GET ( String serviceName , String bookKey ) throws IOException { } }
String qPath = "/sms/{serviceName}/phonebooks/{bookKey}" ; StringBuilder sb = path ( qPath , serviceName , bookKey ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhPhonebook . class ) ;
public class DayOfYear { /** * Obtains the current day - of - year from the specified clock . * This will query the specified clock to obtain the current day - of - year . * Using this method allows the use of an alternate clock for testing . * The alternate clock may be introduced using { @ link Clock dependency injection } . * @ param clock the clock to use , not null * @ return the current day - of - year , not null */ public static DayOfYear now ( Clock clock ) { } }
final LocalDate now = LocalDate . now ( clock ) ; // called once return DayOfYear . of ( now . getDayOfYear ( ) ) ;
public class BaseConstraint { /** * Searches for the constraint child element in the element and gets a constraint from the * factory . If there is no constraint child element , a constraint _ ref element is searched and * the constraint is taken from the factory map . * @ return the constraint contained in this element */ protected final Constraint getConstraint ( final Element element ) { } }
Element constraintElement = element . getChild ( XMLTags . CONSTRAINT ) ; if ( constraintElement != null ) { return generator . getConstraintFactory ( ) . createModel ( random , constraintElement ) ; } constraintElement = element . getChild ( XMLTags . CONSTRAINT_REF ) ; if ( constraintElement != null ) { try { return generator . getConstraintFactory ( ) . getModel ( constraintElement ) ; } catch ( IdNotFoundException e ) { log . error ( "Could not find constraint in map. Maybe it has not been initialised;" + " in this case, try rearranging order of constraints in the xml file." , e ) ; } } throw new IllegalStateException ( "No element constraint or constraint_ref could be found for " + this ) ;
public class ConcurrentCommonCache { /** * deal with the backed cache guarded by read lock * @ param action the content to complete */ private < R > R doWithReadLock ( Action < K , V , R > action ) { } }
readLock . lock ( ) ; try { return action . doWith ( commonCache ) ; } finally { readLock . unlock ( ) ; }
public class SyntacticFeatureFactory { /** * Region start # * Functions that create feature vectors of a specific word in the * sentence */ public FeatureVector createWordFeatures ( DependencyInstance dependencyInstance , int i ) { } }
int [ ] pos = dependencyInstance . getPostagids ( ) ; int [ ] posA = dependencyInstance . getCpostagids ( ) ; int [ ] toks = dependencyInstance . getFormids ( ) ; int [ ] lemma = dependencyInstance . getLemmaids ( ) ; int p0 = pos [ i ] ; int pp = i > 0 ? pos [ i - 1 ] : this . tokenStart ; int pn = i < pos . length - 1 ? pos [ i + 1 ] : this . tokenEnd ; int c0 = posA [ i ] ; int cp = i > 0 ? posA [ i - 1 ] : this . tokenStart ; int cn = i < posA . length - 1 ? posA [ i + 1 ] : this . tokenEnd ; int w0 = toks [ i ] ; int wp = i == 0 ? this . tokenStart : toks [ i - 1 ] ; int wn = i == dependencyInstance . getLength ( ) - 1 ? this . tokenEnd : toks [ i + 1 ] ; int l0 = 0 ; int lp = 0 ; int ln = 0 ; if ( lemma != null ) { l0 = lemma [ i ] ; lp = i == 0 ? this . tokenStart : lemma [ i - 1 ] ; ln = i == dependencyInstance . getLength ( ) - 1 ? this . tokenEnd : lemma [ i + 1 ] ; } FeatureVector fv = new FeatureVector ( ) ; long code ; code = createWordCodeP ( WORDFV_BIAS , 0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeW ( WORDFV_W0 , w0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeW ( WORDFV_Wp , wp ) ; addWordFeature ( code , fv ) ; code = createWordCodeW ( WORDFV_Wn , wn ) ; addWordFeature ( code , fv ) ; if ( l0 != 0 ) { code = createWordCodeW ( WORDFV_W0 , l0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeW ( WORDFV_Wp , lp ) ; addWordFeature ( code , fv ) ; code = createWordCodeW ( WORDFV_Wn , ln ) ; addWordFeature ( code , fv ) ; } code = createWordCodeP ( WORDFV_P0 , p0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeP ( WORDFV_Pp , pp ) ; addWordFeature ( code , fv ) ; code = createWordCodeP ( WORDFV_Pn , pn ) ; addWordFeature ( code , fv ) ; code = createWordCodeP ( WORDFV_P0 , c0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeP ( WORDFV_Pp , cp ) ; addWordFeature ( code , fv ) ; code = createWordCodeP ( WORDFV_Pn , cn ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_PpP0 , pp , p0 ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_P0Pn , p0 , pn ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_PpPn , pp , pn ) ; addWordFeature ( code , fv ) ; code = createWordCodePPP ( WORDFV_PpP0Pn , pp , p0 , pn ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_PpP0 , cp , c0 ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_P0Pn , c0 , cn ) ; addWordFeature ( code , fv ) ; code = createWordCodePP ( WORDFV_PpPn , cp , cn ) ; addWordFeature ( code , fv ) ; code = createWordCodePPP ( WORDFV_PpP0Pn , cp , c0 , cn ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_W0P0 , w0 , p0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_W0P0 , w0 , c0 ) ; addWordFeature ( code , fv ) ; if ( l0 != 0 ) { code = createWordCodeWP ( WORDFV_W0P0 , l0 , p0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_W0P0 , l0 , c0 ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_W0Pp , l0 , cp ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_W0Pn , l0 , cn ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_WpPp , lp , cp ) ; addWordFeature ( code , fv ) ; code = createWordCodeWP ( WORDFV_WnPn , ln , cn ) ; addWordFeature ( code , fv ) ; } int [ ] [ ] feats = dependencyInstance . getFeatids ( ) ; if ( feats [ i ] != null ) { for ( int u = 0 ; u < feats [ i ] . length ; ++ u ) { int f = feats [ i ] [ u ] ; code = createWordCodeP ( WORDFV_P0 , f ) ; addWordFeature ( code , fv ) ; if ( l0 != 0 ) { code = createWordCodeWP ( WORDFV_W0P0 , l0 , f ) ; addWordFeature ( code , fv ) ; } } } return fv ;
public class DDF { /** * This will be called via the { @ link ISupportPhantomReference } interface if this object was registered under * { @ link PhantomReference } . */ @ Override public void cleanup ( ) { } }
// @ formatter : off this . setMLSupporter ( null ) . setStatisticsSupporter ( null ) . setIndexingHandler ( null ) . setJoinsHandler ( null ) . setMetaDataHandler ( null ) . setMiscellanyHandler ( null ) . setMissingDataHandler ( null ) . setMutabilityHandler ( null ) . setSqlHandler ( null ) . setPersistenceHandler ( null ) . setRepresentationHandler ( null ) . setReshapingHandler ( null ) . setSchemaHandler ( null ) . setStreamingDataHandler ( null ) . setTimeSeriesHandler ( null ) ; // @ formatter : on
public class KeyVaultClientBaseImpl { /** * Encrypts an arbitrary sequence of bytes using an encryption key that is stored in a key vault . * The ENCRYPT operation encrypts an arbitrary sequence of bytes using an encryption key that is stored in Azure Key Vault . Note that the ENCRYPT operation only supports a single block of data , the size of which is dependent on the target key and the encryption algorithm to be used . The ENCRYPT operation is only strictly necessary for symmetric keys stored in Azure Key Vault since protection with an asymmetric key can be performed using public portion of the key . This operation is supported for asymmetric keys as a convenience for callers that have a key - reference but do not have access to the public key material . This operation requires the keys / encypt permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param keyName The name of the key . * @ param keyVersion The version of the key . * @ param algorithm algorithm identifier . Possible values include : ' RSA - OAEP ' , ' RSA - OAEP - 256 ' , ' RSA1_5' * @ param value the Base64Url value * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the KeyOperationResult object */ public Observable < KeyOperationResult > encryptAsync ( String vaultBaseUrl , String keyName , String keyVersion , JsonWebKeyEncryptionAlgorithm algorithm , byte [ ] value ) { } }
return encryptWithServiceResponseAsync ( vaultBaseUrl , keyName , keyVersion , algorithm , value ) . map ( new Func1 < ServiceResponse < KeyOperationResult > , KeyOperationResult > ( ) { @ Override public KeyOperationResult call ( ServiceResponse < KeyOperationResult > response ) { return response . body ( ) ; } } ) ;
public class Predicates { /** * Returns a Predicate that evaluates to true iff any one of its components * evaluates to true . The components are evaluated in order , and evaluation * will be " short - circuited " as soon as the answer is determined . */ public static < T > Predicate < T > or ( Predicate < ? super T > ... components ) { } }
return or ( Arrays . asList ( components ) ) ;
public class FnNumber { /** * Determines whether the result of executing the specified function * on the target object is less or equal to the specified object parameter * in value , this is , whether < tt > functionResult . compareTo ( object ) & lt ; = 0 < / tt > . Both * the target and the specified object have to implement { @ link Comparable } . * @ param object the object to compare to the target * @ return true if function result is less or equal to the specified object , false if not */ public static final Function < Number , Boolean > lessOrEqToBy ( final IFunction < Number , ? > by , final Number object ) { } }
return FnObject . lessOrEqToBy ( by , object ) ;
public class ArteVideoDetailsDeserializer { /** * Liefert den Beginn der Ausstrahlung , * wenn * - heute im Zeitraum von CatchUpRights liegt * - oder heute vor dem Zeitraum liegt * - oder CatchUpRights nicht gesetzt ist * @ param broadcastObject * @ return der Beginn der Ausstrahlung oder " " */ private String getBroadcastDateConsideringCatchupRights ( JsonObject broadcastObject ) { } }
String broadcastDate = "" ; JsonElement elementBegin = broadcastObject . get ( JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_BEGIN ) ; JsonElement elementEnd = broadcastObject . get ( JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_END ) ; if ( ! elementBegin . isJsonNull ( ) && ! elementEnd . isJsonNull ( ) ) { String begin = elementBegin . getAsString ( ) ; String end = elementEnd . getAsString ( ) ; try { Calendar beginDate = Calendar . getInstance ( ) ; beginDate . setTime ( broadcastDateFormat . parse ( begin ) ) ; Calendar endDate = Calendar . getInstance ( ) ; endDate . setTime ( broadcastDateFormat . parse ( end ) ) ; if ( ( DateWithoutTimeComparer . compare ( today , beginDate ) >= 0 && DateWithoutTimeComparer . compare ( today , endDate ) <= 0 ) || ( DateWithoutTimeComparer . compare ( today , beginDate ) < 0 ) ) { // wenn das heutige Datum zwischen begin und end liegt , // dann ist es die aktuelle Ausstrahlung broadcastDate = broadcastObject . get ( JSON_ELEMENT_BROADCAST ) . getAsString ( ) ; } } catch ( ParseException ex ) { LOG . debug ( ex ) ; } } else { String broadcast = broadcastObject . get ( JSON_ELEMENT_BROADCAST ) . getAsString ( ) ; try { Calendar broadcastCal = Calendar . getInstance ( ) ; broadcastCal . setTime ( broadcastDateFormat . parse ( broadcast ) ) ; broadcastDate = broadcast ; } catch ( ParseException ex ) { LOG . debug ( ex ) ; } } return broadcastDate ;
public class DVWCWordsiMain { /** * { @ inheritDoc } */ protected void addExtraOptions ( ArgOptions options ) { } }
super . addExtraOptions ( options ) ; options . addOption ( 'H' , "usePartsOfSpeech" , "If provided, parts of speech will be used as part " + "of the word occurrence features." , false , null , "Optional" ) ; options . addOption ( 'O' , "useWordOrdering" , "If provided, parts of speech will be used as part " + "of the word occurrence features." , false , null , "Optional" ) ;
public class GoogleClientSecrets { /** * Loads the { @ code client _ secrets . json } file from the given reader . * @ since 1.15 */ public static GoogleClientSecrets load ( JsonFactory jsonFactory , Reader reader ) throws IOException { } }
return jsonFactory . fromReader ( reader , GoogleClientSecrets . class ) ;
public class ExpressionsRetinaApiImpl { /** * { @ inheritDoc } */ @ Override public Fingerprint resolve ( Model model ) throws JsonProcessingException , ApiException { } }
return resolve ( null , model ) ;
public class ApiOvhEmailexchange { /** * Get this object properties * REST : GET / email / exchange / { organizationName } / service / { exchangeService } / mailingList / { mailingListAddress } / alias / { alias } * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service * @ param mailingListAddress [ required ] The mailing list address * @ param alias [ required ] Alias */ public OvhExchangeMailingListAlias organizationName_service_exchangeService_mailingList_mailingListAddress_alias_alias_GET ( String organizationName , String exchangeService , String mailingListAddress , String alias ) throws IOException { } }
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/mailingList/{mailingListAddress}/alias/{alias}" ; StringBuilder sb = path ( qPath , organizationName , exchangeService , mailingListAddress , alias ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhExchangeMailingListAlias . class ) ;
public class DatamodelConverter { /** * Converts a map of language keys to lists of { @ link MonolingualTextValue } * objects to a flat list of such objects , as required for the factory * methods , where the values in the flat lists are new copies of the * original values . * @ param aliasMap * the map to convert * @ return the flattened list with copied values */ private List < MonolingualTextValue > copyAliasMap ( Map < String , List < MonolingualTextValue > > aliasMap ) { } }
List < MonolingualTextValue > aliases = new ArrayList < > ( ) ; for ( Entry < String , List < MonolingualTextValue > > langAliases : aliasMap . entrySet ( ) ) { if ( filter . includeLanguage ( langAliases . getKey ( ) ) ) { for ( MonolingualTextValue mtv : langAliases . getValue ( ) ) { aliases . add ( copy ( mtv ) ) ; } } } return aliases ;
public class vpnvserver_auditnslogpolicy_binding { /** * Use this API to fetch vpnvserver _ auditnslogpolicy _ binding resources of given name . */ public static vpnvserver_auditnslogpolicy_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
vpnvserver_auditnslogpolicy_binding obj = new vpnvserver_auditnslogpolicy_binding ( ) ; obj . set_name ( name ) ; vpnvserver_auditnslogpolicy_binding response [ ] = ( vpnvserver_auditnslogpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class ConfServlet { /** * Guts of the servlet - extracted for easy testing . */ static void writeResponse ( Configuration conf , Writer out , String format ) throws IOException , BadFormatException { } }
if ( FORMAT_JSON . equals ( format ) ) { Configuration . dumpConfiguration ( conf , out ) ; } else if ( FORMAT_XML . equals ( format ) ) { conf . writeXml ( out ) ; } else { throw new BadFormatException ( "Bad format: " + format ) ; }
public class Time { /** * Creates a string representation of the time object . * The string is returned on the form m : ss , * where m is variable digits minutes and ss is two digits seconds . * @ return a string representation of the time object * @ throws NumberFormatException * @ see TimeFormat # parse ( String ) * @ see # toString ( String ) * @ deprecated */ public static Time parseTime ( String pStr ) { } }
TimeFormat tf = TimeFormat . getInstance ( ) ; return tf . parse ( pStr ) ;
public class CommerceAccountUserRelUtil { /** * Returns the commerce account user rels before and after the current commerce account user rel in the ordered set where commerceAccountUserId = & # 63 ; . * @ param commerceAccountUserRelPK the primary key of the current commerce account user rel * @ param commerceAccountUserId the commerce account user ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce account user rel * @ throws NoSuchAccountUserRelException if a commerce account user rel with the primary key could not be found */ public static CommerceAccountUserRel [ ] findByCommerceAccountUserId_PrevAndNext ( CommerceAccountUserRelPK commerceAccountUserRelPK , long commerceAccountUserId , OrderByComparator < CommerceAccountUserRel > orderByComparator ) throws com . liferay . commerce . account . exception . NoSuchAccountUserRelException { } }
return getPersistence ( ) . findByCommerceAccountUserId_PrevAndNext ( commerceAccountUserRelPK , commerceAccountUserId , orderByComparator ) ;
public class CliCommandBuilder { /** * Adds an argument to be passed to the CLI command ignore the argument if { @ code null } . * @ param arg the argument to pass * @ return the builder */ public CliCommandBuilder addCliArgument ( final String arg ) { } }
if ( arg != null ) { final Argument argument = Arguments . parse ( arg ) ; final CliArgument cliArgument = CliArgument . find ( argument . getKey ( ) ) ; if ( cliArgument != null ) { // Remove the alternate key if required if ( cliArgument . altKey != null ) { cliArgs . remove ( cliArgument . altKey ) ; } } cliArgs . set ( argument ) ; } return this ;
public class MSPDITimephasedWorkNormaliser { /** * This method merges together assignment data for the same day . * @ param calendar current calendar * @ param list assignment data */ private void mergeSameDay ( ProjectCalendar calendar , LinkedList < TimephasedWork > list ) { } }
LinkedList < TimephasedWork > result = new LinkedList < TimephasedWork > ( ) ; TimephasedWork previousAssignment = null ; for ( TimephasedWork assignment : list ) { if ( previousAssignment == null ) { assignment . setAmountPerDay ( assignment . getTotalAmount ( ) ) ; result . add ( assignment ) ; } else { Date previousAssignmentStart = previousAssignment . getStart ( ) ; Date previousAssignmentStartDay = DateHelper . getDayStartDate ( previousAssignmentStart ) ; Date assignmentStart = assignment . getStart ( ) ; Date assignmentStartDay = DateHelper . getDayStartDate ( assignmentStart ) ; if ( previousAssignmentStartDay . getTime ( ) == assignmentStartDay . getTime ( ) ) { Duration previousAssignmentWork = previousAssignment . getTotalAmount ( ) ; Duration assignmentWork = assignment . getTotalAmount ( ) ; if ( previousAssignmentWork . getDuration ( ) != 0 && assignmentWork . getDuration ( ) == 0 ) { continue ; } result . removeLast ( ) ; if ( previousAssignmentWork . getDuration ( ) != 0 && assignmentWork . getDuration ( ) != 0 ) { double work = previousAssignment . getTotalAmount ( ) . getDuration ( ) ; work += assignment . getTotalAmount ( ) . getDuration ( ) ; Duration totalWork = Duration . getInstance ( work , TimeUnit . MINUTES ) ; TimephasedWork merged = new TimephasedWork ( ) ; merged . setStart ( previousAssignment . getStart ( ) ) ; merged . setFinish ( assignment . getFinish ( ) ) ; merged . setTotalAmount ( totalWork ) ; assignment = merged ; } else { if ( assignmentWork . getDuration ( ) == 0 ) { assignment = previousAssignment ; } } } assignment . setAmountPerDay ( assignment . getTotalAmount ( ) ) ; result . add ( assignment ) ; } Duration calendarWork = calendar . getWork ( assignment . getStart ( ) , assignment . getFinish ( ) , TimeUnit . MINUTES ) ; Duration assignmentWork = assignment . getTotalAmount ( ) ; if ( calendarWork . getDuration ( ) == 0 && assignmentWork . getDuration ( ) == 0 ) { result . removeLast ( ) ; } else { previousAssignment = assignment ; } } list . clear ( ) ; list . addAll ( result ) ;
public class NetUtils { /** * Get the default socket factory as specified by the configuration * parameter < tt > hadoop . rpc . socket . factory . default < / tt > * @ param conf the configuration * @ return the default socket factory as specified in the configuration or * the JVM default socket factory if the configuration does not * contain a default socket factory property . */ public static SocketFactory getDefaultSocketFactory ( Configuration conf ) { } }
String propValue = conf . get ( "hadoop.rpc.socket.factory.class.default" ) ; if ( ( propValue == null ) || ( propValue . length ( ) == 0 ) ) return SocketFactory . getDefault ( ) ; return getSocketFactoryFromProperty ( conf , propValue ) ;
public class IfcShapeModelImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcShapeAspect > getOfShapeAspect ( ) { } }
return ( EList < IfcShapeAspect > ) eGet ( Ifc2x3tc1Package . Literals . IFC_SHAPE_MODEL__OF_SHAPE_ASPECT , true ) ;
public class StratifiedSampling { /** * Calculate the variance from the sample * @ param sampleDataCollection * @ param populationNh * @ return */ public static double variance ( TransposeDataCollection sampleDataCollection , AssociativeArray populationNh ) { } }
double variance = 0.0 ; int populationN = 0 ; double mean = mean ( sampleDataCollection , populationNh ) ; for ( Map . Entry < Object , FlatDataCollection > entry : sampleDataCollection . entrySet ( ) ) { Object strata = entry . getKey ( ) ; Integer strataPopulation = ( ( Number ) populationNh . get ( strata ) ) . intValue ( ) ; if ( strataPopulation == null ) { throw new IllegalArgumentException ( "Invalid strata population size." ) ; } populationN += strataPopulation ; // Analysis of Variance // Within Strata variance += ( strataPopulation - 1 ) * SimpleRandomSampling . variance ( entry . getValue ( ) ) ; // Between Strata variance += strataPopulation * Math . pow ( SimpleRandomSampling . mean ( entry . getValue ( ) ) - mean , 2 ) ; } variance /= ( populationN - 1 ) ; return variance ;
public class PolicyComplianceDetail { /** * Details about problems with dependent services , such as AWS WAF or AWS Config , that are causing a resource to be * non - compliant . The details include the name of the dependent service and the error message received that * indicates the problem with the service . * @ param issueInfoMap * Details about problems with dependent services , such as AWS WAF or AWS Config , that are causing a resource * to be non - compliant . The details include the name of the dependent service and the error message received * that indicates the problem with the service . * @ return Returns a reference to this object so that method calls can be chained together . */ public PolicyComplianceDetail withIssueInfoMap ( java . util . Map < String , String > issueInfoMap ) { } }
setIssueInfoMap ( issueInfoMap ) ; return this ;
public class AbstractErrorWebExceptionHandler { /** * Check whether the trace attribute has been set on the given request . * @ param request the source request * @ return { @ code true } if the error trace has been requested , { @ code false } otherwise */ protected boolean isTraceEnabled ( ServerRequest request ) { } }
String parameter = request . queryParam ( "trace" ) . orElse ( "false" ) ; return ! "false" . equalsIgnoreCase ( parameter ) ;
public class JsonOutput { /** * Writes a JSON object start . * @ throws IOException if an error occurs */ void writeObjectStart ( ) throws IOException { } }
output . append ( '{' ) ; currentIndent = currentIndent + indent ; commaDepth ++ ; commaState . set ( commaDepth , false ) ;
public class GeneratePluginConfigListener { /** * ( non - Javadoc ) * @ see com . ibm . ws . container . service . state . ApplicationStateListener # applicationStopped ( com . ibm . ws . container . service . app . deploy . ApplicationInfo ) */ @ Override public void applicationStopped ( ApplicationInfo appInfo ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "application stopped : " + appInfo . getName ( ) ) ; runFutureGeneratePluginTask ( ) ; this . appsInService -- ;
public class Sso { /** * Build a { @ linkplain Response } with the provided base64 encoded XML string * @ param samlResponse * @ return * @ throws MnoException */ public Response buildResponse ( String samlResponse ) throws MnoException { } }
try { return Response . loadFromBase64XML ( this , samlResponse ) ; } catch ( Exception e ) { throw new MnoException ( "Could not build Response from samlResponse: " + samlResponse , e ) ; }
public class VoltXMLElementHelper { /** * Build VoltXMLElement for expression like " LIMIT 1 " . */ public static List < VoltXMLElement > buildLimitElements ( int limit , String limitValueElementId ) { } }
if ( limitValueElementId == null ) { return null ; } List < VoltXMLElement > retval = new ArrayList < VoltXMLElement > ( ) ; retval . add ( new VoltXMLElement ( "offset" ) ) ; VoltXMLElement limitElement = new VoltXMLElement ( "limit" ) ; String strLimit = String . valueOf ( limit ) ; limitElement . attributes . put ( "limit" , strLimit ) ; limitElement . children . add ( buildValueElement ( limitValueElementId , false , strLimit , "BIGINT" ) ) ; retval . add ( limitElement ) ; return retval ;
public class AbatisService { /** * BeanClass fields名からmethod名を取得する 。 * @ param fieldName * @ param type * @ return String MethodName */ private String getBeanMethodName ( String fieldName , int type ) { } }
if ( fieldName == null || fieldName == "" ) { return "" ; } String methodName = "" ; if ( type == 0 ) { methodName = "get" ; } else { methodName = "set" ; } methodName += fieldName . substring ( 0 , 1 ) . toUpperCase ( ) ; if ( fieldName . length ( ) == 1 ) { return methodName ; } methodName += fieldName . substring ( 1 ) ; // Log . d ( TAG , " fieldName : " + fieldName + " beanMethod : " + methodName ) ; return methodName ;
public class BsLabelType { @ Override public Map < String , Object > toSource ( ) { } }
Map < String , Object > sourceMap = new HashMap < > ( ) ; if ( createdBy != null ) { addFieldToSource ( sourceMap , "createdBy" , createdBy ) ; } if ( createdTime != null ) { addFieldToSource ( sourceMap , "createdTime" , createdTime ) ; } if ( excludedPaths != null ) { addFieldToSource ( sourceMap , "excludedPaths" , excludedPaths ) ; } if ( includedPaths != null ) { addFieldToSource ( sourceMap , "includedPaths" , includedPaths ) ; } if ( name != null ) { addFieldToSource ( sourceMap , "name" , name ) ; } if ( permissions != null ) { addFieldToSource ( sourceMap , "permissions" , permissions ) ; } if ( sortOrder != null ) { addFieldToSource ( sourceMap , "sortOrder" , sortOrder ) ; } if ( updatedBy != null ) { addFieldToSource ( sourceMap , "updatedBy" , updatedBy ) ; } if ( updatedTime != null ) { addFieldToSource ( sourceMap , "updatedTime" , updatedTime ) ; } if ( value != null ) { addFieldToSource ( sourceMap , "value" , value ) ; } if ( virtualHost != null ) { addFieldToSource ( sourceMap , "virtualHost" , virtualHost ) ; } return sourceMap ;
public class InitialReportWorker { /** * Processes a single initial block reports , by spawning multiple threads to * handle insertion to the blocks map . Each thread stores the inserted blocks * in a local list , and at the end , the list are concatenated for a single * datanode descriptor . */ static void processReport ( FSNamesystem namesystem , Collection < Block > toRetry , BlockListAsLongs newReport , DatanodeDescriptor node , ExecutorService initialBlockReportExecutor ) throws IOException { } }
// spawn one thread for blocksPerShardBR blocks int numShards = Math . min ( namesystem . parallelProcessingThreads , ( ( newReport . getNumberOfBlocks ( ) + namesystem . parallelBRblocksPerShard - 1 ) / namesystem . parallelBRblocksPerShard ) ) ; List < Future < List < Block > > > workers = new ArrayList < Future < List < Block > > > ( numShards ) ; // submit tasks for execution for ( int i = 0 ; i < numShards ; i ++ ) { workers . add ( initialBlockReportExecutor . submit ( new InitialReportWorker ( newReport , i , numShards , node , namesystem . getNameNode ( ) . shouldRetryAbsentBlocks ( ) , namesystem ) ) ) ; } // get results and add to retry list if need try { for ( Future < List < Block > > worker : workers ) { if ( namesystem . getNameNode ( ) . shouldRetryAbsentBlocks ( ) ) { toRetry . addAll ( worker . get ( ) ) ; } else { worker . get ( ) ; } } } catch ( ExecutionException e ) { LOG . warn ( "Parallel report failed" , e ) ; throw new IOException ( e ) ; } catch ( InterruptedException e ) { throw new IOException ( "Interruption" , e ) ; }
public class ThreadPool { public void join ( ) { } }
while ( isStarted ( ) && _pool != null ) { synchronized ( _join ) { try { if ( isStarted ( ) && _pool != null ) _join . wait ( 30000 ) ; } catch ( Exception e ) { LogSupport . ignore ( log , e ) ; } } }
public class LdapTemplate { /** * { @ inheritDoc } */ @ Override public < T > List < T > find ( LdapQuery query , Class < T > clazz ) { } }
SearchControls searchControls = searchControlsForQuery ( query , RETURN_OBJ_FLAG ) ; return find ( query . base ( ) , query . filter ( ) , searchControls , clazz ) ;
public class GatewayMicroService { /** * The rate limiter component . */ protected void registerRateLimiterComponent ( ) { } }
String componentPropName = GatewayConfigProperties . COMPONENT_PREFIX + IRateLimiterComponent . class . getSimpleName ( ) ; setConfigProperty ( componentPropName , ESRateLimiterComponent . class . getName ( ) ) ; setConfigProperty ( componentPropName + ".client.type" , "jest" ) ; setConfigProperty ( componentPropName + ".client.protocol" , "${apiman.es.protocol}" ) ; setConfigProperty ( componentPropName + ".client.host" , "${apiman.es.host}" ) ; setConfigProperty ( componentPropName + ".client.port" , "${apiman.es.port}" ) ; setConfigProperty ( componentPropName + ".client.username" , "${apiman.es.username}" ) ; setConfigProperty ( componentPropName + ".client.password" , "${apiman.es.password}" ) ;
public class Matrix4d { /** * Set the value of the matrix element at column 2 and row 0. * @ param m20 * the new value * @ return this */ public Matrix4d m20 ( double m20 ) { } }
this . m20 = m20 ; properties &= ~ PROPERTY_ORTHONORMAL ; if ( m20 != 0.0 ) properties &= ~ ( PROPERTY_IDENTITY | PROPERTY_PERSPECTIVE | PROPERTY_TRANSLATION ) ; return this ;
public class Neo4JIndexManager { /** * Adds Relationship Index for all singular attributes ( including ID ) of a * given relationship entity * @ param entityMetadata * @ param graphDb * @ param relationship */ private void addRelationshipIndex ( EntityMetadata entityMetadata , Relationship relationship , Index < Relationship > relationshipIndex , MetamodelImpl metaModel ) { } }
// MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( // entityMetadata . getPersistenceUnit ( ) ) ; // ID attribute has to be indexed first String idColumnName = ( ( AbstractAttribute ) entityMetadata . getIdAttribute ( ) ) . getJPAColumnName ( ) ; relationshipIndex . add ( relationship , idColumnName , relationship . getProperty ( idColumnName ) ) ; // Index all other fields , for whom indexing is enabled for ( Attribute attribute : metaModel . entity ( entityMetadata . getEntityClazz ( ) ) . getSingularAttributes ( ) ) { Field field = ( Field ) attribute . getJavaMember ( ) ; if ( ! attribute . isCollection ( ) && ! attribute . isAssociation ( ) && entityMetadata . getIndexProperties ( ) . keySet ( ) . contains ( field . getName ( ) ) ) { String columnName = ( ( AbstractAttribute ) attribute ) . getJPAColumnName ( ) ; relationshipIndex . add ( relationship , columnName , relationship . getProperty ( columnName ) ) ; } }
public class Routes { /** * Removes a particular route from the collection of those that have been previously routed . * Search for a previously established routes using the given path and HTTP method , removing * any matches that are found . * @ param path the route path * @ param httpMethod the http method * @ return < tt > true < / tt > if this a matching route has been previously routed * @ throws IllegalArgumentException if < tt > path < / tt > is null or blank or if < tt > httpMethod < / tt > is null , blank * or an invalid HTTP method * @ since 2.2 */ public boolean remove ( String path , String httpMethod ) { } }
if ( StringUtils . isEmpty ( path ) ) { throw new IllegalArgumentException ( "path cannot be null or blank" ) ; } if ( StringUtils . isEmpty ( httpMethod ) ) { throw new IllegalArgumentException ( "httpMethod cannot be null or blank" ) ; } // Catches invalid input and throws IllegalArgumentException HttpMethod method = HttpMethod . valueOf ( httpMethod ) ; return removeRoute ( method , path ) ;
public class DashboardEndpoint { /** * GET application variants * @ param request the request * @ return list of { @ link ApplicationVariant } s */ @ GET @ Path ( "/warnings" ) @ Produces ( MediaType . APPLICATION_JSON ) public Response getVariantsWithWarnings ( @ Context HttpServletRequest request ) { } }
final List < ApplicationVariant > variantsWithWarnings = service . getSearchService ( ) . getVariantsWithWarnings ( ) ; return Response . ok ( variantsWithWarnings ) . build ( ) ;
public class IntervalStatsLogger { /** * Called by the timer . Writes the current status to the logger . */ @ Override public void intervalUpdated ( Interval caller ) { } }
output . out ( "===============================================================================" ) ; output . out ( "=== SNAPSHOT Interval " + interval . getName ( ) + " updated, Entity: " + id ) ; output . out ( "=== Timestamp: " + Date . currentDate ( ) + ", ServiceId: " + target . getProducerId ( ) ) ; output . out ( "===============================================================================" ) ; for ( IStats stat : target . getStats ( ) ) { output . out ( stat . toStatsString ( interval . getName ( ) ) ) ; } output . out ( "===============================================================================" ) ; output . out ( "== END: Interval " + interval . getName ( ) + ", Entity: " + id ) ; output . out ( "===============================================================================" ) ;
public class RethinkDBQuery { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . query . QueryImpl # populateEntities ( com . impetus . kundera . * metadata . model . EntityMetadata , com . impetus . kundera . client . Client ) */ @ Override protected List populateEntities ( EntityMetadata m , Client client ) { } }
RethinkDB r = ( ( RethinkDBClient ) client ) . getR ( ) ; Connection conn = ( ( RethinkDBClient ) client ) . getConnection ( ) ; Filter filter = null ; Pluck pluck ; Table table = r . db ( m . getSchema ( ) ) . table ( m . getTableName ( ) ) ; Cursor cursor = null ; List results = new ArrayList ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( m . getEntityClazz ( ) ) ; JPQLExpression jpqlExp = kunderaQuery . getJpqlExpression ( ) ; List < String > selectColumns = KunderaQueryUtils . getSelectColumns ( jpqlExp ) ; if ( KunderaQueryUtils . hasWhereClause ( jpqlExp ) ) { // add filters WhereClause whereClause = KunderaQueryUtils . getWhereClause ( jpqlExp ) ; Expression whereExp = whereClause . getConditionalExpression ( ) ; filter = table . filter ( parseAndBuildFilters ( entityType , whereExp ) ) ; } if ( ! selectColumns . isEmpty ( ) ) { // select by specific columns , consider last for plucking pluck = filter == null ? table . pluck ( selectColumns ) : filter . pluck ( selectColumns ) ; cursor = pluck . run ( conn ) ; } if ( cursor == null ) { cursor = ( Cursor ) ( filter == null ? table . run ( conn ) : filter . run ( conn ) ) ; } for ( Object obj : cursor ) { Object entity = KunderaCoreUtils . createNewInstance ( m . getEntityClazz ( ) ) ; buildEntityFromCursor ( entity , ( HashMap ) obj , entityType ) ; results . add ( entity ) ; } return results ;
public class SecureUtil { /** * Encrypts a secret key and salt with a public key . */ public static byte [ ] encryptBytes ( PublicKey key , byte [ ] secret , byte [ ] salt ) { } }
byte [ ] encrypt = new byte [ secret . length + salt . length ] ; for ( int ii = 0 ; ii < secret . length ; ii ++ ) { encrypt [ ii ] = secret [ ii ] ; } for ( int ii = 0 ; ii < salt . length ; ii ++ ) { encrypt [ secret . length + ii ] = salt [ ii ] ; } try { return getRSACipher ( key ) . doFinal ( encrypt ) ; } catch ( GeneralSecurityException gse ) { log . warning ( "Failed to encrypt bytes" , gse ) ; } return encrypt ;
public class Options { /** * Puts a { @ link Long } value for the given option name . * @ param key * the option name . * @ param value * the { @ link Long } value . */ public Options putLong ( String key , IModel < Long > value ) { } }
putOption ( key , new LongOption ( value ) ) ; return this ;
public class PersistentPageFile { /** * Reads the page with the given id from this file . * @ param pageID the id of the page to be returned * @ return the page with the given pageId */ @ Override public P readPage ( int pageID ) { } }
try { countRead ( ) ; long offset = ( ( long ) ( header . getReservedPages ( ) + pageID ) ) * ( long ) pageSize ; byte [ ] buffer = new byte [ pageSize ] ; file . seek ( offset ) ; file . read ( buffer ) ; return byteArrayToPage ( buffer ) ; } catch ( IOException e ) { throw new RuntimeException ( "IOException occurred during reading of page " + pageID + "\n" , e ) ; }
public class SparseVector { /** * Calculate the cosine value between vectors . */ double cosine ( SparseVector vec1 , SparseVector vec2 ) { } }
double norm1 = vec1 . norm ( ) ; double norm2 = vec2 . norm ( ) ; double result = 0.0f ; if ( norm1 == 0 && norm2 == 0 ) { return result ; } else { double prod = inner_product ( vec1 , vec2 ) ; result = prod / ( norm1 * norm2 ) ; return Double . isNaN ( result ) ? 0.0f : result ; }
public class CPTaxCategoryServiceBaseImpl { /** * Sets the user remote service . * @ param userService the user remote service */ public void setUserService ( com . liferay . portal . kernel . service . UserService userService ) { } }
this . userService = userService ;
public class BytecodeInjectReactive { /** * Utility method for returning the ( inferred ) classpath of classloading from the given Class . * @ param clazz the enclosing Class . * @ return the ( inferred ) classpath of clazz */ public static String classpathFromClass ( Class < ? > clazz ) { } }
String aname = clazz . getPackage ( ) . getName ( ) . replaceAll ( "\\." , "/" ) + "/" + clazz . getSimpleName ( ) + ".class" ; String apath = ClassLoader . getSystemClassLoader ( ) . getResource ( aname ) . getPath ( ) ; String path = null ; if ( apath . contains ( "!" ) ) { path = apath . substring ( 0 , apath . indexOf ( "!" ) ) . replace ( "file:" , "" ) ; } else { path = apath . substring ( 0 , apath . indexOf ( aname ) ) ; } return path ;
public class DatagramStream { /** * Closes the underlying sockets and socket streams . */ public void close ( ) throws IOException { } }
DatagramSocket s = _s ; _s = null ; OutputStream os = _os ; _os = null ; InputStream is = _is ; _is = null ; try { if ( os != null ) os . close ( ) ; if ( is != null ) is . close ( ) ; } finally { if ( s != null ) s . close ( ) ; }
public class CcgParse { /** * Gets all dependency structures populated during parsing . * @ return */ public List < DependencyStructure > getAllDependencies ( ) { } }
List < DependencyStructure > deps = Lists . newArrayList ( ) ; if ( ! isTerminal ( ) ) { deps . addAll ( left . getAllDependencies ( ) ) ; deps . addAll ( right . getAllDependencies ( ) ) ; } deps . addAll ( dependencies ) ; return deps ;
public class ByteBufferWriter { /** * 重新设置指定位置的值 */ public ByteBufferWriter putInt ( final int index , int value ) { } }
int start = 0 ; ByteBuffer [ ] buffs = this . buffers ; for ( int i = 0 ; i < buffs . length ; i ++ ) { int pos = buffs [ i ] . position ( ) ; if ( pos + start > index ) { int r = pos + start - index ; if ( r >= 4 ) { buffs [ i ] . putInt ( index - start , value ) ; return this ; } else { byte b1 = bigEndian ? ( byte ) ( ( value >> 24 ) & 0xFF ) : ( byte ) ( value & 0xFF ) ; byte b2 = bigEndian ? ( byte ) ( ( value >> 16 ) & 0xFF ) : ( byte ) ( ( value >> 8 ) & 0xFF ) ; byte b3 = bigEndian ? ( byte ) ( ( value >> 8 ) & 0xFF ) : ( byte ) ( ( value >> 16 ) & 0xFF ) ; byte b4 = bigEndian ? ( byte ) ( value & 0xFF ) : ( byte ) ( ( value >> 24 ) & 0xFF ) ; if ( r == 3 ) { buffs [ i ] . put ( index - start , b1 ) ; buffs [ i ] . put ( index - start + 1 , b2 ) ; buffs [ i ] . put ( index - start + 2 , b3 ) ; buffs [ i + 1 ] . put ( 0 , b4 ) ; } else if ( r == 2 ) { buffs [ i ] . put ( index - start , b1 ) ; buffs [ i ] . put ( index - start + 1 , b2 ) ; buffs [ i + 1 ] . put ( 0 , b3 ) ; buffs [ i + 1 ] . put ( 1 , b4 ) ; } else if ( r == 1 ) { buffs [ i ] . put ( index - start , b1 ) ; buffs [ i + 1 ] . put ( 0 , b2 ) ; buffs [ i + 1 ] . put ( 1 , b3 ) ; buffs [ i + 1 ] . put ( 2 , b4 ) ; } return this ; } } else { start += pos ; } } throw new ArrayIndexOutOfBoundsException ( index ) ;
public class Patterns { /** * Returns a { @ link Pattern } object that matches if the current character in the input satisfies { @ code predicate } , in * which case { @ code 1 } is returned as match length . */ public static Pattern isChar ( final CharPredicate predicate ) { } }
return new Pattern ( ) { @ Override public int match ( CharSequence src , int begin , int end ) { if ( begin >= end ) return MISMATCH ; else if ( predicate . isChar ( src . charAt ( begin ) ) ) return 1 ; else return MISMATCH ; } @ Override public String toString ( ) { return predicate . toString ( ) ; } } ;
public class ColorNames { /** * Replies the color name for the given color value . * < p > See the documentation of the { @ link # ColorNames } type for obtaining a list of the colors . * @ param colorValue the color value . * @ return the color name , or { @ code null } if the value does not correspond to an known color . */ @ Pure public static String getColorNameFromValue ( int colorValue ) { } }
for ( final Entry < String , Integer > entry : COLOR_MATCHES . entrySet ( ) ) { final int knownValue = entry . getValue ( ) . intValue ( ) ; if ( colorValue == knownValue ) { return entry . getKey ( ) ; } } return null ;
public class ApiOvhEmaildomain { /** * Get this object properties * REST : GET / email / domain / { domain } / responder / { account } * @ param domain [ required ] Name of your domain name * @ param account [ required ] Name of account */ public OvhResponder domain_responder_account_GET ( String domain , String account ) throws IOException { } }
String qPath = "/email/domain/{domain}/responder/{account}" ; StringBuilder sb = path ( qPath , domain , account ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhResponder . class ) ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setDateTimeParameter ( DateTimeParameterType newDateTimeParameter ) { } }
( ( FeatureMap . Internal ) getMixed ( ) ) . set ( BpsimPackage . Literals . DOCUMENT_ROOT__DATE_TIME_PARAMETER , newDateTimeParameter ) ;
public class Solver { /** * Find the model such that the equation * < center > * < code > * objectiveFunctions . getValue ( model ) = 0 * < / code > * < / center > * holds . * @ param objectsToCalibrate The set of parameterized objects to calibrate . * @ return A reference to a calibrated clone of the given model . * @ throws net . finmath . optimizer . SolverException Thrown if the underlying optimizer does not find a solution . */ public AnalyticModel getCalibratedModel ( Set < ParameterObject > objectsToCalibrate ) throws SolverException { } }
final ParameterAggregation < ParameterObject > parameterAggregate = new ParameterAggregation < > ( objectsToCalibrate ) ; // Set solver parameters final RandomVariable [ ] initialParameters ; // Apply parameter transformation to solver parameter space if ( parameterTransformation != null ) { initialParameters = parameterTransformation . getSolverParameter ( parameterAggregate . getParameter ( ) ) ; } else { initialParameters = parameterAggregate . getParameter ( ) ; } final RandomVariable [ ] zeros = new RandomVariable [ calibrationProducts . size ( ) ] ; final RandomVariable [ ] ones = new RandomVariable [ calibrationProducts . size ( ) ] ; final RandomVariable [ ] lowerBound = new RandomVariable [ initialParameters . length ] ; final RandomVariable [ ] upperBound = new RandomVariable [ initialParameters . length ] ; java . util . Arrays . fill ( zeros , new RandomVariableFromDoubleArray ( 0.0 ) ) ; java . util . Arrays . fill ( ones , new RandomVariableFromDoubleArray ( 1.0 ) ) ; java . util . Arrays . fill ( lowerBound , new RandomVariableFromDoubleArray ( Double . NEGATIVE_INFINITY ) ) ; java . util . Arrays . fill ( upperBound , new RandomVariableFromDoubleArray ( Double . POSITIVE_INFINITY ) ) ; StochasticOptimizer . ObjectiveFunction objectiveFunction = new StochasticOptimizer . ObjectiveFunction ( ) { @ Override public void setValues ( RandomVariable [ ] parameters , RandomVariable [ ] values ) throws SolverException { RandomVariable [ ] modelParameters = parameters ; try { if ( parameterTransformation != null ) { modelParameters = parameterTransformation . getParameter ( parameters ) ; // Copy back the parameter constrain to inform the optimizer System . arraycopy ( parameterTransformation . getSolverParameter ( modelParameters ) , 0 , parameters , 0 , parameters . length ) ; } Map < ParameterObject , RandomVariable [ ] > curvesParameterPairs = parameterAggregate . getObjectsToModifyForParameter ( modelParameters ) ; AnalyticModel modelClone = model . getCloneForParameter ( curvesParameterPairs ) ; for ( int i = 0 ; i < calibrationProducts . size ( ) ; i ++ ) { values [ i ] = calibrationProducts . get ( i ) . getValue ( evaluationTime , modelClone ) ; } if ( calibrationTargetValues != null ) { for ( int i = 0 ; i < calibrationTargetValues . size ( ) ; i ++ ) { values [ i ] . sub ( calibrationTargetValues . get ( i ) ) ; } } } catch ( CloneNotSupportedException e ) { throw new SolverException ( e ) ; } } } ; if ( optimizerFactory == null ) { int maxThreads = Math . min ( 2 * Math . max ( Runtime . getRuntime ( ) . availableProcessors ( ) , 1 ) , initialParameters . length ) ; optimizerFactory = new StochasticPathwiseOptimizerFactoryLevenbergMarquardt ( maxIterations , calibrationAccuracy , maxThreads ) ; } StochasticOptimizer optimizer = optimizerFactory . getOptimizer ( objectiveFunction , initialParameters , lowerBound , upperBound , zeros ) ; optimizer . run ( ) ; iterations = optimizer . getIterations ( ) ; RandomVariable [ ] bestParameters = optimizer . getBestFitParameters ( ) ; if ( parameterTransformation != null ) { bestParameters = parameterTransformation . getParameter ( bestParameters ) ; } AnalyticModel calibratedModel = null ; try { Map < ParameterObject , RandomVariable [ ] > curvesParameterPairs = parameterAggregate . getObjectsToModifyForParameter ( bestParameters ) ; calibratedModel = model . getCloneForParameter ( curvesParameterPairs ) ; } catch ( CloneNotSupportedException e ) { throw new SolverException ( e ) ; } accuracy = 0.0 ; for ( int i = 0 ; i < calibrationProducts . size ( ) ; i ++ ) { double error = calibrationProducts . get ( i ) . getValue ( evaluationTime , calibratedModel ) . getStandardDeviation ( ) ; if ( calibrationTargetValues != null ) { error -= calibrationTargetValues . get ( i ) ; } accuracy += error * error ; } accuracy = Math . sqrt ( accuracy / calibrationProducts . size ( ) ) ; return calibratedModel ;
public class MultiChangeBuilder { /** * Replaces a range of characters with the given rich - text document . */ public MultiChangeBuilder < PS , SEG , S > replace ( int start , int end , StyledDocument < PS , SEG , S > replacement ) { } }
return relativeReplace ( start , end , ReadOnlyStyledDocument . from ( replacement ) ) ;
public class CmsDriverManager { /** * Writes a resource to the OpenCms VFS , including it ' s content . < p > * Applies only to resources of type < code > { @ link CmsFile } < / code > * i . e . resources that have a binary content attached . < p > * Certain resource types might apply content validation or transformation rules * before the resource is actually written to the VFS . The returned result * might therefore be a modified version from the provided original . < p > * @ param dbc the current database context * @ param resource the resource to apply this operation to * @ return the written resource ( may have been modified ) * @ throws CmsException if something goes wrong * @ see CmsObject # writeFile ( CmsFile ) * @ see I _ CmsResourceType # writeFile ( CmsObject , CmsSecurityManager , CmsFile ) */ public CmsFile writeFile ( CmsDbContext dbc , CmsFile resource ) throws CmsException { } }
resource . setUserLastModified ( dbc . currentUser ( ) . getId ( ) ) ; resource . setContents ( resource . getContents ( ) ) ; // to be sure the content date is updated getVfsDriver ( dbc ) . writeResource ( dbc , dbc . currentProject ( ) . getUuid ( ) , resource , UPDATE_RESOURCE_STATE ) ; byte [ ] contents = resource . getContents ( ) ; getVfsDriver ( dbc ) . writeContent ( dbc , resource . getResourceId ( ) , contents ) ; // log it log ( dbc , new CmsLogEntry ( dbc , resource . getStructureId ( ) , CmsLogEntryType . RESOURCE_CONTENT_MODIFIED , new String [ ] { resource . getRootPath ( ) } ) , false ) ; // read the file back from db resource = new CmsFile ( readResource ( dbc , resource . getStructureId ( ) , CmsResourceFilter . ALL ) ) ; resource . setContents ( contents ) ; deleteRelationsWithSiblings ( dbc , resource ) ; // update the cache m_monitor . clearResourceCache ( ) ; Map < String , Object > data = new HashMap < String , Object > ( 2 ) ; data . put ( I_CmsEventListener . KEY_RESOURCE , resource ) ; data . put ( I_CmsEventListener . KEY_CHANGE , new Integer ( CHANGED_CONTENT ) ) ; OpenCms . fireCmsEvent ( new CmsEvent ( I_CmsEventListener . EVENT_RESOURCE_MODIFIED , data ) ) ; return resource ;
public class AbstractStaticHistogram { /** * Compute the bin number . Has a special case for rounding max down to the * last bin . * @ param coord Coordinate * @ return bin number */ protected int getBinNr ( double coord ) { } }
if ( Double . isInfinite ( coord ) || Double . isNaN ( coord ) ) { throw new UnsupportedOperationException ( "Encountered non-finite value in Histogram: " + coord ) ; } if ( coord == max ) { // System . err . println ( " Triggered special case : " + ( Math . floor ( ( coord - // base ) / binsize ) + offset ) + " vs . " + ( size - 1 ) ) ; return size - 1 ; } return ( int ) Math . floor ( ( coord - base ) / binsize ) + offset ;
public class DSRemotingClient { /** * Encode the method call . * @ param method * Remote method being called * @ param params * Method parameters * @ return Byte buffer with data to perform remoting call */ private IoBuffer encodeInvoke ( String method , Object [ ] params ) { } }
log . debug ( "RemotingClient encodeInvoke - method: {} params: {}" , method , params ) ; IoBuffer result = IoBuffer . allocate ( 1024 ) ; result . setAutoExpand ( true ) ; // force version 3 result . putShort ( ( short ) 3 ) ; // Headers Collection < RemotingHeader > hdr = headers . values ( ) ; result . putShort ( ( short ) hdr . size ( ) ) ; for ( RemotingHeader header : hdr ) { Output . putString ( result , header . getName ( ) ) ; result . put ( header . getMustUnderstand ( ) ? ( byte ) 0x01 : ( byte ) 0x00 ) ; IoBuffer tmp = IoBuffer . allocate ( 1024 ) ; tmp . setAutoExpand ( true ) ; Output tmpOut = new Output ( tmp ) ; Serializer . serialize ( tmpOut , header . getValue ( ) ) ; tmp . flip ( ) ; // Size of header data result . putInt ( tmp . limit ( ) ) ; // Header data result . put ( tmp ) ; tmp . free ( ) ; tmp = null ; } // One body result . putShort ( ( short ) 1 ) ; // Method name Output . putString ( result , method ) ; // Client callback for response // Output . putString ( result , " " ) ; // responseURI Output . putString ( result , "/" + sequenceCounter ++ ) ; // Serialize parameters IoBuffer tmp = IoBuffer . allocate ( 1024 ) ; tmp . setAutoExpand ( true ) ; Output tmpOut = new Output ( tmp ) ; // if the params are null send the NULL AMF type // this should fix APPSERVER - 296 if ( params == null ) { tmpOut . writeNull ( ) ; } else { tmpOut . writeArray ( params ) ; } tmp . flip ( ) ; // Store size and parameters result . putInt ( tmp . limit ( ) ) ; result . put ( tmp ) ; tmp . free ( ) ; tmp = null ; result . flip ( ) ; return result ;
public class FilterBasedTriggeringPolicy { /** * { @ inheritDoc } */ public boolean isTriggeringEvent ( LoggingEvent event ) { } }
// in the abnormal case of no contained filters // always return true to avoid each logging event // from having its own file . if ( headFilter == null ) { return false ; } // otherwise loop through the filters for ( Filter f = headFilter ; f != null ; f = f . next ) { switch ( f . decide ( event ) ) { case Filter . DENY : return false ; case Filter . ACCEPT : return true ; } } return true ;
public class SqlBuilder { /** * 替引用的SQL * @ param sql 需要构建的SQL对象 * @ param subSQLs 引用的其他SQL语句 * @ return 构建后的SQL语句 */ private String replaceSQL ( SqlObject sqlObj , Map < String , Object > subSQLs ) { } }
String query = sqlObj . getValue ( ) ; if ( query != null ) { String regexs = "[$][{](\\S*)[}]" ; // 子句替换模式 Pattern regex = Pattern . compile ( regexs ) ; Matcher regexMatcher = regex . matcher ( query ) ; while ( regexMatcher . find ( ) ) { String subSQLName = regexMatcher . group ( 1 ) ; SqlObject subSQL = this . sqlMap . get ( subSQLName ) ; if ( subSQL != null ) { String subSQLContent = replaceSQL ( subSQL , subSQLs ) ; query = query . replace ( regexMatcher . group ( 0 ) , subSQLContent ) ; sqlObj . setValue ( query ) ; } } query = replaceChoose ( sqlObj , subSQLs ) ; query = replaceVariables ( query , subSQLs ) ; } return query ;
public class OidcUtil { /** * calculate the cookie value of Nonce */ public static String createNonceCookieValue ( String nonceValue , String state , ConvergedClientConfig clientConfig ) { } }
return HashUtils . digest ( nonceValue + state + clientConfig . getClientSecret ( ) ) ;
public class SipSessionImpl { /** * Perform the internal processing required to passivate * this session . */ public void passivate ( ) { } }
// Notify ActivationListeners SipSessionEvent event = null ; if ( this . sipSessionAttributeMap != null ) { Set < String > keySet = getAttributeMap ( ) . keySet ( ) ; for ( String key : keySet ) { Object attribute = getAttributeMap ( ) . get ( key ) ; if ( attribute instanceof SipSessionActivationListener ) { if ( event == null ) event = new SipSessionActivationEvent ( this , SessionActivationNotificationCause . PASSIVATION ) ; try { ( ( SipSessionActivationListener ) attribute ) . sessionWillPassivate ( event ) ; } catch ( Throwable t ) { logger . error ( "SipSessionActivationListener threw exception" , t ) ; } } } }
public class ConstructorWriterImpl { /** * { @ inheritDoc } */ @ Override protected void addNavDetailLink ( boolean link , Content liNav ) { } }
if ( link ) { liNav . addContent ( writer . getHyperLink ( SectionName . CONSTRUCTOR_DETAIL , contents . navConstructor ) ) ; } else { liNav . addContent ( contents . navConstructor ) ; }
public class Rollbar { /** * Initialize the singleton instance of Rollbar . * @ param context Android context to use . * @ param accessToken a Rollbar access token with at least post _ client _ item scope * @ param environment the environment to set for items * @ param registerExceptionHandler whether or not to handle uncaught exceptions . * @ param includeLogcat whether or not to include logcat output with items * @ return the managed instance of Rollbar . */ public static Rollbar init ( Context context , String accessToken , String environment , boolean registerExceptionHandler , boolean includeLogcat ) { } }
return init ( context , accessToken , environment , registerExceptionHandler , includeLogcat , null ) ;
public class HttpServer { /** * Add a servlet in the server . * @ param name The name of the servlet ( can be passed as null ) * @ param pathSpec The path spec for the servlet * @ param clazz The servlet class */ public void addServlet ( String name , String pathSpec , Class < ? extends HttpServlet > clazz ) { } }
addInternalServlet ( name , pathSpec , clazz ) ; addFilterPathMapping ( pathSpec , webAppContext ) ;
public class PluginProperties { /** * Loads the Graylog plugin properties file from the given JAR file . * The path to the properties file resource inside the JAR file is stored in the " Graylog - Plugin - Properties - Path " * attribute of the JAR manifest . ( Example : { @ code org . graylog . plugins . graylog - plugin - map - widget } ) * If the plugin properties file does not exist or cannot be found ( like in older plugins ) a default * { @ link PluginProperties } object will be returned . * @ param filename path to the JAR file * @ return the plugin properties object */ public static PluginProperties fromJarFile ( final String filename ) { } }
final Properties properties = new Properties ( ) ; try { final JarFile jarFile = new JarFile ( requireNonNull ( filename ) ) ; final Optional < String > propertiesPath = getPropertiesPath ( jarFile ) ; if ( propertiesPath . isPresent ( ) ) { LOG . debug ( "Loading <{}> from <{}>" , propertiesPath . get ( ) , filename ) ; final ZipEntry entry = jarFile . getEntry ( propertiesPath . get ( ) ) ; if ( entry != null ) { properties . load ( jarFile . getInputStream ( entry ) ) ; } else { LOG . debug ( "Plugin properties <{}> are missing in <{}>" , propertiesPath . get ( ) , filename ) ; } } } catch ( Exception e ) { LOG . debug ( "Unable to load properties from plugin <{}>" , filename , e ) ; } return new PluginProperties ( properties ) ;
public class PhaseApplication { /** * Sets the phase - agnostic runtime configuration . */ private void setOptions ( RuntimeConfiguration rc ) { } }
if ( hasOption ( SHORT_OPT_TIME ) ) { rc . setTime ( true ) ; } if ( hasOption ( LONG_OPT_DEBUG ) ) { rc . setDebug ( true ) ; } if ( hasOption ( LONG_OPT_VERBOSE ) ) { rc . setVerbose ( true ) ; } if ( hasOption ( LONG_OPT_PEDANTIC ) ) { rc . setWarningsAsErrors ( true ) ; }
public class ConstraintDetailMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ConstraintDetail constraintDetail , ProtocolMarshaller protocolMarshaller ) { } }
if ( constraintDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( constraintDetail . getConstraintId ( ) , CONSTRAINTID_BINDING ) ; protocolMarshaller . marshall ( constraintDetail . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( constraintDetail . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( constraintDetail . getOwner ( ) , OWNER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param container parent containing element . * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setContainer ( WebLocator container ) { } }
this . container = container ; return ( T ) this ;
public class vpntrafficpolicy_aaagroup_binding { /** * Use this API to fetch vpntrafficpolicy _ aaagroup _ binding resources of given name . */ public static vpntrafficpolicy_aaagroup_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
vpntrafficpolicy_aaagroup_binding obj = new vpntrafficpolicy_aaagroup_binding ( ) ; obj . set_name ( name ) ; vpntrafficpolicy_aaagroup_binding response [ ] = ( vpntrafficpolicy_aaagroup_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class CFGFactory { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . classfile . IAnalysisEngine # analyze ( edu . umd . cs . findbugs * . classfile . IAnalysisCache , java . lang . Object ) */ @ Override public CFG analyze ( IAnalysisCache analysisCache , MethodDescriptor descriptor ) throws CheckedAnalysisException { } }
// Construct the CFG in its raw form MethodGen methodGen = analysisCache . getMethodAnalysis ( MethodGen . class , descriptor ) ; if ( methodGen == null ) { JavaClass jclass = analysisCache . getClassAnalysis ( JavaClass . class , descriptor . getClassDescriptor ( ) ) ; Method method = analysisCache . getMethodAnalysis ( Method . class , descriptor ) ; JavaClassAndMethod javaClassAndMethod = new JavaClassAndMethod ( jclass , method ) ; AnalysisContext . currentAnalysisContext ( ) . getLookupFailureCallback ( ) . reportSkippedAnalysis ( descriptor ) ; throw new MethodUnprofitableException ( javaClassAndMethod ) ; } CFGBuilder cfgBuilder = CFGBuilderFactory . create ( descriptor , methodGen ) ; cfgBuilder . build ( ) ; CFG cfg = cfgBuilder . getCFG ( ) ; // Mark as busy while we ' re pruning the CFG . cfg . setFlag ( CFG . BUSY ) ; // Important : eagerly put the CFG in the analysis cache . // Recursively performed analyses required to prune the CFG , // such as TypeAnalysis , will operate on the raw CFG . analysisCache . eagerlyPutMethodAnalysis ( CFG . class , descriptor , cfg ) ; // Record method name and signature for informational purposes cfg . setMethodName ( SignatureConverter . convertMethodSignature ( methodGen ) ) ; cfg . setMethodGen ( methodGen ) ; // System . out . println ( " CC : getting refined CFG for " + methodId ) ; if ( CFGFactory . DEBUG_CFG ) { String methodId = methodGen . getClassName ( ) + "." + methodGen . getName ( ) + ":" + methodGen . getSignature ( ) ; System . out . println ( "CC: getting refined CFG for " + methodId ) ; } if ( ClassContext . DEBUG ) { String methodId = methodGen . getClassName ( ) + "." + methodGen . getName ( ) + ":" + methodGen . getSignature ( ) ; System . out . println ( "ClassContext: request to prune " + methodId ) ; } // Remove CFG edges corresponding to failed assertions . boolean changed = false ; boolean ASSUME_ASSERTIONS_ENABLED = true ; if ( ASSUME_ASSERTIONS_ENABLED ) { LinkedList < Edge > edgesToRemove = new LinkedList < > ( ) ; for ( Iterator < Edge > i = cfg . edgeIterator ( ) ; i . hasNext ( ) ; ) { Edge e = i . next ( ) ; if ( e . getType ( ) == EdgeTypes . IFCMP_EDGE ) { try { BasicBlock source = e . getSource ( ) ; InstructionHandle last = source . getLastInstruction ( ) ; Instruction lastInstruction = last . getInstruction ( ) ; InstructionHandle prev = last . getPrev ( ) ; Instruction prevInstruction = prev . getInstruction ( ) ; if ( prevInstruction instanceof GETSTATIC && lastInstruction instanceof IFNE ) { GETSTATIC getStatic = ( GETSTATIC ) prevInstruction ; if ( "$assertionsDisabled" . equals ( getStatic . getFieldName ( methodGen . getConstantPool ( ) ) ) && "Z" . equals ( getStatic . getSignature ( methodGen . getConstantPool ( ) ) ) ) { edgesToRemove . add ( e ) ; } } } catch ( RuntimeException exception ) { assert true ; // ignore it } } } if ( edgesToRemove . size ( ) > 0 ) { changed = true ; for ( Edge e : edgesToRemove ) { cfg . removeEdge ( e ) ; } } } cfg . setFlag ( CFG . PRUNED_FAILED_ASSERTION_EDGES ) ; final boolean PRUNE_INFEASIBLE_EXCEPTION_EDGES = AnalysisContext . currentAnalysisContext ( ) . getBoolProperty ( AnalysisFeatures . ACCURATE_EXCEPTIONS ) ; if ( PRUNE_INFEASIBLE_EXCEPTION_EDGES && ! cfg . isFlagSet ( CFG . PRUNED_INFEASIBLE_EXCEPTIONS ) ) { try { TypeDataflow typeDataflow = analysisCache . getMethodAnalysis ( TypeDataflow . class , descriptor ) ; // Exception edge pruning based on ExceptionSets . // Note : this is quite slow . PruneInfeasibleExceptionEdges pruner = new PruneInfeasibleExceptionEdges ( cfg , methodGen , typeDataflow ) ; pruner . execute ( ) ; changed = changed || pruner . wasCFGModified ( ) ; } catch ( MissingClassException e ) { AnalysisContext . currentAnalysisContext ( ) . getLookupFailureCallback ( ) . reportMissingClass ( e . getClassNotFoundException ( ) ) ; } catch ( DataflowAnalysisException e ) { AnalysisContext . currentAnalysisContext ( ) . getLookupFailureCallback ( ) . logError ( "unable to extract type analysis" , e ) ; } catch ( ClassNotFoundException e ) { AnalysisContext . currentAnalysisContext ( ) . getLookupFailureCallback ( ) . reportMissingClass ( e ) ; } } cfg . setFlag ( CFG . PRUNED_INFEASIBLE_EXCEPTIONS ) ; final boolean PRUNE_UNCONDITIONAL_EXCEPTION_THROWER_EDGES = ! AnalysisContext . currentAnalysisContext ( ) . getBoolProperty ( AnalysisFeatures . CONSERVE_SPACE ) ; if ( PRUNE_UNCONDITIONAL_EXCEPTION_THROWER_EDGES && ! cfg . isFlagSet ( CFG . PRUNED_UNCONDITIONAL_THROWERS ) ) { try { JavaClass jclass = analysisCache . getClassAnalysis ( JavaClass . class , descriptor . getClassDescriptor ( ) ) ; Method method = analysisCache . getMethodAnalysis ( Method . class , descriptor ) ; ConstantPoolGen cpg = analysisCache . getClassAnalysis ( ConstantPoolGen . class , descriptor . getClassDescriptor ( ) ) ; TypeDataflow typeDataflow = analysisCache . getMethodAnalysis ( TypeDataflow . class , descriptor ) ; PruneUnconditionalExceptionThrowerEdges pruner = new PruneUnconditionalExceptionThrowerEdges ( jclass , method , methodGen , cfg , cpg , typeDataflow , AnalysisContext . currentAnalysisContext ( ) ) ; pruner . execute ( ) ; if ( pruner . wasCFGModified ( ) ) { changed = true ; } } catch ( DataflowAnalysisException e ) { AnalysisContext . logError ( "Error pruning normal return edges for unconditionally throwing methods for " + descriptor , e ) ; } } cfg . setFlag ( CFG . PRUNED_UNCONDITIONAL_THROWERS ) ; // Now we are done with the CFG refining process cfg . setFlag ( CFG . REFINED ) ; cfg . clearFlag ( CFG . BUSY ) ; // If the CFG changed as a result of pruning , purge all analysis results // for the method . if ( changed ) { DepthFirstSearch dfs = new DepthFirstSearch ( cfg ) ; dfs . search ( ) ; Collection < BasicBlock > unreachable = dfs . unvisitedVertices ( ) ; if ( ! unreachable . isEmpty ( ) ) { if ( DEBUG_CFG ) { System . out . println ( "Unreachable blocks" ) ; } for ( BasicBlock b : unreachable ) { if ( DEBUG_CFG ) { System . out . println ( " removing " + b ) ; } cfg . removeVertex ( b ) ; } } Global . getAnalysisCache ( ) . purgeMethodAnalyses ( descriptor ) ; } return cfg ;
public class Functional { /** * Applies the given functor to all elements in the input set . * @ param set * the set to which the functor will be applied . * @ param state * an state variable , that will be used as the return vale for the iteration . * @ param functor * a function to be applied to all elements in the set . * @ return * the state object after the processing . */ public static final < S , E > S forEach ( Set < E > set , S state , Fx < S , E > functor ) { } }
return new FunctionalSet < S , E > ( set ) . forEach ( state , functor ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcColour ( ) { } }
if ( ifcColourEClass == null ) { ifcColourEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 942 ) ; } return ifcColourEClass ;
public class SimpleDistanceConstraint { /** * Inverts the < em > from < / em > and < em > to < / em > reference of this constraints and updates * the { @ link # minimum } and { @ link # maximum } bounds accordingly . * @ return an inverted copy of this constraint . */ public final SimpleDistanceConstraint invert ( ) { } }
SimpleDistanceConstraint newConstraint = new SimpleDistanceConstraint ( ) ; newConstraint . setMinimum ( - this . getMaximum ( ) ) ; newConstraint . setMaximum ( - this . getMinimum ( ) ) ; newConstraint . setFrom ( this . getTo ( ) ) ; newConstraint . setTo ( this . getFrom ( ) ) ; return newConstraint ;
public class HawkbitCommonUtil { /** * Remove the prefix from text . * @ param text * name * @ param prefix * text to be removed * @ return String name */ public static String removePrefix ( final String text , final String prefix ) { } }
if ( text != null ) { return text . replaceFirst ( prefix , "" ) ; } return null ;
public class FilteredPropertyAccessor { /** * FactoryMethod that creates a filtered property accessor by decorating a given accessor with a class filter . * @ param pa to be decorated * @ param filter Class to be filtered , must extend from R . * @ param < D > Domain of the original accessor * @ param < R > Range of the original accessor * @ return A filtered accessor . */ public static < D extends BioPAXElement , R > PropertyAccessor < D , R > create ( PropertyAccessor < D , R > pa , Class filter ) { } }
return new FilteredPropertyAccessor < D , R > ( pa , filter ) ;
public class SolutionListUtils { /** * Removes a number of solutions from a list * @ param solutionList The list of solutions * @ param numberOfSolutionsToRemove */ public static < S > void removeSolutionsFromList ( List < S > solutionList , int numberOfSolutionsToRemove ) { } }
if ( solutionList . size ( ) < numberOfSolutionsToRemove ) { throw new JMetalException ( "The list size (" + solutionList . size ( ) + ") is lower than " + "the number of solutions to remove (" + numberOfSolutionsToRemove + ")" ) ; } for ( int i = 0 ; i < numberOfSolutionsToRemove ; i ++ ) { solutionList . remove ( 0 ) ; }
public class ProtectionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Protection protection , ProtocolMarshaller protocolMarshaller ) { } }
if ( protection == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( protection . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( protection . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( protection . getResourceArn ( ) , RESOURCEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BoundedList { /** * Adds an element at the tail . Removes an object from the head if capacity is exceeded * @ param obj The object to be added */ public boolean add ( T obj ) { } }
if ( obj == null ) return false ; while ( size ( ) >= max_capacity && size ( ) > 0 ) { poll ( ) ; } return super . add ( obj ) ;
public class Sha1 { /** * Updates the digest using the specified array of bytes , * starting at the specified offset . * Input length can be any size . May require internal buffering , * if input blocks are not multiple of 64 bytes . * Overrides the protected abstract method of * java . security . MessageDigestSpi . * @ param input the array of bytes to use for the update . * @ param offset the offset to start from in the array of bytes . * @ param len the number of bytes to use , starting at offset . */ public void engineUpdate ( byte [ ] input , int offset , int len ) { } }
if ( offset >= 0 && len >= 0 && offset + len <= input . length ) { bytes += len ; /* Terminate the previous block . */ int padlen = 64 - padding ; if ( padding > 0 && len >= padlen ) { System . arraycopy ( input , offset , pad , padding , padlen ) ; computeBlock ( pad , 0 ) ; padding = 0 ; offset += padlen ; len -= padlen ; } /* Loop on large sets of complete blocks . */ while ( len >= 512 ) { computeBlock ( input , offset ) ; computeBlock ( input , offset + 64 ) ; computeBlock ( input , offset + 128 ) ; computeBlock ( input , offset + 192 ) ; computeBlock ( input , offset + 256 ) ; computeBlock ( input , offset + 320 ) ; computeBlock ( input , offset + 384 ) ; computeBlock ( input , offset + 448 ) ; offset += 512 ; len -= 512 ; } /* Loop on remaining complete blocks . */ while ( len >= 64 ) { computeBlock ( input , offset ) ; offset += 64 ; len -= 64 ; } /* remaining bytes kept for next block . */ if ( len > 0 ) { System . arraycopy ( input , offset , pad , padding , len ) ; padding += len ; } return ; } throw new ArrayIndexOutOfBoundsException ( offset ) ;
public class BoneCPConfig { /** * Sets the time to wait when close connection watch threads are enabled . 0 = wait forever . * @ param closeConnectionWatchTimeout the watchTimeout to set * @ param timeUnit Time granularity */ public void setCloseConnectionWatchTimeout ( long closeConnectionWatchTimeout , TimeUnit timeUnit ) { } }
this . closeConnectionWatchTimeoutInMs = TimeUnit . MILLISECONDS . convert ( closeConnectionWatchTimeout , timeUnit ) ;
public class AbstractQueryBuilderFactory { /** * add filter before * @ param filter * @ param beforeFilter */ public void addFilterBefore ( IRuleFilter filter , Class < ? extends IRuleFilter > beforeFilter ) { } }
int index = getIndexOfClass ( filters , beforeFilter ) ; if ( index == - 1 ) { throw new FilterAddException ( "filter " + beforeFilter . getSimpleName ( ) + " has not been added" ) ; } filters . add ( index , filter ) ;
public class BidiOrder { /** * Set resultTypes from start up to ( but not including ) limit to newType . */ private void setTypes ( int start , int limit , byte newType ) { } }
for ( int i = start ; i < limit ; ++ i ) { resultTypes [ i ] = newType ; }
public class ASrvOrm { /** * < p > Load ORM configuration from files . < / p > * @ param pDirName properties base dir name e . g . " beige - orm " * @ throws Exception - any exception */ public final void loadPropertiesBase ( final String pDirName ) throws Exception { } }
this . propertiesBase = new PropertiesBase ( ) ; propertiesBase . setDirectory ( pDirName ) ; this . propertiesBase . setJdbcDriverClass ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_JDBC_DRIVER_CLASS ) ) ; this . propertiesBase . setDatabaseName ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_DATABASE_NAME ) ) ; this . propertiesBase . setDataSourceClassName ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_DATASOURCE_CLASS ) ) ; this . propertiesBase . setUserName ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_USER_NAME ) ) ; this . propertiesBase . setUserPassword ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_USER_PASSWORD ) ) ; this . propertiesBase . setDatabaseUrl ( this . mngSettings . getAppSettings ( ) . get ( PropertiesBase . KEY_DATABASE_URL ) ) ; String currDir = System . getProperty ( "user.dir" ) ; if ( this . propertiesBase . getDatabaseName ( ) != null && this . propertiesBase . getDatabaseName ( ) . contains ( WORD_CURRENT_DIR ) ) { this . propertiesBase . setDatabaseName ( this . propertiesBase . getDatabaseName ( ) . replace ( WORD_CURRENT_DIR , currDir + File . separator ) ) ; } else if ( this . propertiesBase . getDatabaseName ( ) != null && this . propertiesBase . getDatabaseName ( ) . contains ( WORD_CURRENT_PARENT_DIR ) ) { File fcd = new File ( currDir ) ; this . propertiesBase . setDatabaseName ( this . propertiesBase . getDatabaseName ( ) . replace ( WORD_CURRENT_PARENT_DIR , fcd . getParent ( ) + File . separator ) ) ; } if ( this . propertiesBase . getDatabaseUrl ( ) != null && this . propertiesBase . getDatabaseUrl ( ) . contains ( WORD_CURRENT_DIR ) ) { this . propertiesBase . setDatabaseUrl ( this . propertiesBase . getDatabaseUrl ( ) . replace ( WORD_CURRENT_DIR , currDir + File . separator ) ) ; } else if ( this . propertiesBase . getDatabaseUrl ( ) != null && this . propertiesBase . getDatabaseUrl ( ) . contains ( WORD_CURRENT_PARENT_DIR ) ) { File fcd = new File ( currDir ) ; this . propertiesBase . setDatabaseUrl ( this . propertiesBase . getDatabaseUrl ( ) . replace ( WORD_CURRENT_PARENT_DIR , fcd . getParent ( ) + File . separator ) ) ; }
public class JmsSpout { /** * < code > javax . jms . MessageListener < / code > implementation . * Stored the JMS message in an internal queue for processing * by the < code > nextTuple ( ) < / code > method . */ public void onMessage ( Message msg ) { } }
try { LOG . debug ( "Queuing msg [" + msg . getJMSMessageID ( ) + "]" ) ; } catch ( JMSException e ) { } this . queue . offer ( msg ) ;
public class ProcessCommunicatorImpl { /** * / * ( non - Javadoc ) * @ see tuwien . auto . calimero . process . ProcessCommunicator # readBool * ( tuwien . auto . calimero . GroupAddress ) */ public boolean readBool ( GroupAddress dst ) throws KNXTimeoutException , KNXRemoteException , KNXLinkClosedException , KNXFormatException { } }
final byte [ ] apdu = readFromGroup ( dst , priority , 0 , 0 ) ; final DPTXlatorBoolean t = new DPTXlatorBoolean ( DPTXlatorBoolean . DPT_BOOL ) ; extractGroupASDU ( apdu , t ) ; return t . getValueBoolean ( ) ;
public class ResolverUtils { /** * Randomize server list using local IPv4 address hash as a seed . * @ return a copy of the original list with elements in the random order */ public static < T extends EurekaEndpoint > List < T > randomize ( List < T > list ) { } }
List < T > randomList = new ArrayList < > ( list ) ; if ( randomList . size ( ) < 2 ) { return randomList ; } Random random = new Random ( LOCAL_IPV4_ADDRESS . hashCode ( ) ) ; int last = randomList . size ( ) - 1 ; for ( int i = 0 ; i < last ; i ++ ) { int pos = random . nextInt ( randomList . size ( ) - i ) ; if ( pos != i ) { Collections . swap ( randomList , i , pos ) ; } } return randomList ;
public class Stack { /** * SNS topic ARNs to which stack related events are published . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNotificationARNs ( java . util . Collection ) } or { @ link # withNotificationARNs ( java . util . Collection ) } if you * want to override the existing values . * @ param notificationARNs * SNS topic ARNs to which stack related events are published . * @ return Returns a reference to this object so that method calls can be chained together . */ public Stack withNotificationARNs ( String ... notificationARNs ) { } }
if ( this . notificationARNs == null ) { setNotificationARNs ( new com . amazonaws . internal . SdkInternalList < String > ( notificationARNs . length ) ) ; } for ( String ele : notificationARNs ) { this . notificationARNs . add ( ele ) ; } return this ;
public class AbstractSegmentation { /** * 将句子切分为词 * @ param sentence 句子 * @ return 词集合 */ private List < Word > segSentence ( final String sentence ) { } }
if ( sentence . length ( ) == 1 ) { if ( KEEP_WHITESPACE ) { List < Word > result = new ArrayList < > ( 1 ) ; result . add ( new Word ( KEEP_CASE ? sentence : sentence . toLowerCase ( ) ) ) ; return result ; } else { if ( ! Character . isWhitespace ( sentence . charAt ( 0 ) ) ) { List < Word > result = new ArrayList < > ( 1 ) ; result . add ( new Word ( KEEP_CASE ? sentence : sentence . toLowerCase ( ) ) ) ; return result ; } } } if ( sentence . length ( ) > 1 ) { List < Word > list = segImpl ( sentence ) ; if ( list != null ) { if ( PERSON_NAME_RECOGNIZE ) { list = PersonName . recognize ( list ) ; } return list ; } else { LOGGER . error ( "文本 " + sentence + " 没有获得分词结果" ) ; } } return Collections . emptyList ( ) ;
public class ImplicitIckleServiceInjector { /** * { @ inheritDoc } */ @ Override public void inject ( Configuration config ) { } }
Set < Field > fields = config . getInjectionTargets ( InjectionCategory . ICKLE_SERVICE ) ; Class < ? extends Object > implementationClass = null ; for ( Field field : fields ) { try { if ( ! field . isAccessible ( ) ) field . setAccessible ( true ) ; Class < ? extends Object > contractClass = field . getType ( ) ; IckleService ickleService = contractClass . getAnnotation ( IckleService . class ) ; implementationClass = ickleService . value ( ) ; try { field . set ( config . getContext ( ) , implementationClass . newInstance ( ) ) ; } catch ( InstantiationException ie ) { Constructor < ? extends Object > constructor = implementationClass . getConstructor ( Context . class ) ; if ( constructor == null ) { StringBuilder errorContext = new StringBuilder ( ) . append ( "The Ickle Service implementation " ) . append ( implementationClass . getSimpleName ( ) ) . append ( " must expose a public no-argument constructor " ) . append ( "or a constructor which takes only a single " ) . append ( Context . class . getName ( ) ) . append ( ". " ) ; throw new InjectionException ( new InstantiationException ( errorContext . toString ( ) ) ) ; } else { Context baseContext = ContextUtils . discover ( config . getContext ( ) ) ; field . set ( config . getContext ( ) , constructor . newInstance ( baseContext ) ) ; } } } catch ( Exception e ) { StringBuilder errorContext = new StringBuilder ( ) . append ( "Ickle Service injection failed" ) ; if ( implementationClass != null ) { errorContext . append ( " for " ) . append ( implementationClass . getName ( ) ) . append ( ". " ) ; } else { errorContext . append ( ". " ) ; } Log . e ( getClass ( ) . getName ( ) , errorContext . toString ( ) , e ) ; } }