signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CxxPreprocessor { /** * Parse defines spited into key - value format * ( sonar . cxx . jsonCompilationDatabase ) */ private Map < String , Macro > parseMacroDefinitions ( Map < String , String > defines ) { } }
final List < String > margedDefines = defines . entrySet ( ) . stream ( ) . map ( e -> e . getKey ( ) + " " + e . getValue ( ) ) . collect ( Collectors . toList ( ) ) ; return parseMacroDefinitions ( margedDefines ) ;
public class PhotosetsApi { /** * Create a new photoset for the calling user . * Authentication * < br > * This method requires authentication with ' write ' permission . * < br > * New photosets are automatically put first in the photoset ordering for the user . * Use orderSets if you don ' t want the new set to appear first on the user ' s photoset list . * @ param title title for the photoset . Required . * @ param description description of the photoset . Optional . * @ param primaryPhotoId id of the photo to represent this set . The photo must belong to the calling user . Required . * @ return instances of { @ link net . jeremybrooks . jinx . response . photosets . PhotosetInfo } with only the photosetId and url * fields set . * @ throws JinxException if required parameters are null , or if there are any errors . * @ see < a href = " https : / / www . flickr . com / services / api / flickr . photosets . create . html " > flickr . photosets . create < / a > */ public PhotosetInfo create ( String title , String description , String primaryPhotoId ) throws JinxException { } }
JinxUtils . validateParams ( title , primaryPhotoId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photosets.create" ) ; params . put ( "title" , title ) ; params . put ( "primary_photo_id" , primaryPhotoId ) ; if ( description != null ) { params . put ( "description" , description ) ; } return jinx . flickrPost ( params , PhotosetInfo . class ) ;
public class CmsRelation { /** * Returns the target resource when possible to read with the given filter . < p > * @ param cms the current user context * @ param filter the filter to use * @ return the target resource * @ throws CmsException if something goes wrong */ public CmsResource getTarget ( CmsObject cms , CmsResourceFilter filter ) throws CmsException { } }
try { // first look up by id return cms . readResource ( getTargetId ( ) , filter ) ; } catch ( CmsVfsResourceNotFoundException e ) { // then look up by name , but from the root site String storedSiteRoot = cms . getRequestContext ( ) . getSiteRoot ( ) ; try { cms . getRequestContext ( ) . setSiteRoot ( "" ) ; return cms . readResource ( getTargetPath ( ) , filter ) ; } finally { cms . getRequestContext ( ) . setSiteRoot ( storedSiteRoot ) ; } }
public class Normalization { /** * This method applies L2 normalization on a given array of doubles . The passed vector is modified by the * method . * @ param vector * the original vector * @ return the L2 normalized vector */ public static double [ ] normalizeL2 ( double [ ] vector ) { } }
// compute vector 2 - norm double norm2 = 0 ; for ( int i = 0 ; i < vector . length ; i ++ ) { norm2 += vector [ i ] * vector [ i ] ; } norm2 = ( double ) Math . sqrt ( norm2 ) ; if ( norm2 == 0 ) { Arrays . fill ( vector , 1 ) ; } else { for ( int i = 0 ; i < vector . length ; i ++ ) { vector [ i ] = vector [ i ] / norm2 ; } } return vector ;
public class JSONObject { /** * Get the BigInteger value associated with a key . * @ param key A key string . * @ return The numeric value . * @ throws JSONException if the key is not found or if the value cannot be * converted to BigInteger . */ public BigInteger getBigInteger ( String key ) throws JSONException { } }
Object object = this . get ( key ) ; try { return new BigInteger ( object . toString ( ) ) ; } catch ( Exception e ) { throw new JSONException ( "JSONObject[" + quote ( key ) + "] could not be converted to BigInteger." ) ; }
public class DeLiCluTreeIndex { /** * Marks the specified object as handled and returns the path of node ids from * the root to the objects ' s parent . * @ param id the objects id to be marked as handled * @ param obj the object to be marked as handled * @ return the path of node ids from the root to the objects ' s parent */ public synchronized IndexTreePath < DeLiCluEntry > setHandled ( DBID id , O obj ) { } }
if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "setHandled " + id + ", " + obj + "\n" ) ; } // find the leaf node containing o IndexTreePath < DeLiCluEntry > pathToObject = findPathToObject ( getRootPath ( ) , obj , id ) ; if ( pathToObject == null ) { throw new AbortException ( "Object not found in setHandled." ) ; } // set o handled DeLiCluEntry entry = pathToObject . getEntry ( ) ; entry . setHasHandled ( true ) ; entry . setHasUnhandled ( false ) ; for ( IndexTreePath < DeLiCluEntry > path = pathToObject ; path . getParentPath ( ) != null ; path = path . getParentPath ( ) ) { DeLiCluEntry parentEntry = path . getParentPath ( ) . getEntry ( ) ; DeLiCluNode node = getNode ( parentEntry ) ; boolean hasHandled = false ; boolean hasUnhandled = false ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { final DeLiCluEntry nodeEntry = node . getEntry ( i ) ; hasHandled = hasHandled || nodeEntry . hasHandled ( ) ; hasUnhandled = hasUnhandled || nodeEntry . hasUnhandled ( ) ; } parentEntry . setHasUnhandled ( hasUnhandled ) ; parentEntry . setHasHandled ( hasHandled ) ; } return pathToObject ;
public class AppModuleContextService { /** * Capture the current security context and create the JeeMetadata and Classloader context for * the given app component ( j2eeName ) . * @ return a ThreadContextDescriptor containing the current security context and the jeemetadata * and classloader context for the given app component ( j2eeName ) . * @ throws IllegalStateException if the app component is not installed */ protected ThreadContextDescriptor createThreadContext ( Map < String , String > execProps , J2EEName j2eeName ) { } }
ThreadContext classloaderContext = classLoaderContextProvider . createThreadContext ( execProps , getContextClassLoaderIdentifier ( j2eeName ) ) ; ThreadContext jeeContext = jeeMetaDataContextProvider . createThreadContext ( execProps , getMetaDataIdentifier ( j2eeName ) ) ; ThreadContextDescriptor retMe = contextService . captureThreadContext ( execProps , CapturedContexts ) ; retMe . set ( "com.ibm.ws.javaee.metadata.context.provider" , jeeContext ) ; retMe . set ( "com.ibm.ws.classloader.context.provider" , classloaderContext ) ; return retMe ;
public class ADEntries { /** * Convert a Date to the Zulu String format . * See the < a href = " http : / / developer . novell . com / documentation / ndslib / schm _ enu / data / sdk5701 . html " > eDirectory Time attribute syntax definition < / a > for more details . * @ param instant The Date to be converted * @ return A string formated such as " 199412161032Z " . */ public static String convertDateToWinEpoch ( final Instant instant ) { } }
if ( instant == null ) { throw new NullPointerException ( "date must be non-null" ) ; } final long inputAsMs = instant . toEpochMilli ( ) ; final long inputAsADMs = inputAsMs - AD_EPOCH_OFFSET_MS ; final long inputAsADNs = inputAsADMs * 10000 ; return String . valueOf ( inputAsADNs ) ;
public class Arr { /** * / * containsAny */ public static < T > boolean containsAny ( T [ ] arr , Iterable < ? extends T > values ) { } }
for ( T value : values ) { if ( contains ( arr , value ) ) return true ; } return false ;
public class BundleMonitorExtension { /** * Installs a new bundle . * @ param bundle the bundle file * @ param startIfNeeded whether or not the bundle need to be started * @ return the bundle page , with a flash message . */ @ Route ( method = HttpMethod . POST , uri = "" ) public Result installBundle ( @ FormParameter ( "bundle" ) final FileItem bundle , @ FormParameter ( "start" ) @ DefaultValue ( "false" ) final boolean startIfNeeded ) { } }
if ( bundle != null ) { return async ( new Callable < Result > ( ) { @ Override public Result call ( ) throws Exception { Bundle b ; try { b = context . installBundle ( "file/temp/" + bundle . name ( ) , bundle . stream ( ) ) ; logger ( ) . info ( "Bundle {} installed" , b . getSymbolicName ( ) ) ; } catch ( BundleException e ) { flash ( "error" , "Cannot install bundle '" + bundle . name ( ) + "' : " + e . getMessage ( ) ) ; return bundle ( ) ; } if ( startIfNeeded && ! isFragment ( b ) ) { try { b . start ( ) ; flash ( "success" , "Bundle '" + b . getSymbolicName ( ) + "' installed and started" ) ; return bundle ( ) ; } catch ( BundleException e ) { flash ( "error" , "Bundle '" + b . getSymbolicName ( ) + "' installed but " + "failed to start: " + e . getMessage ( ) ) ; return bundle ( ) ; } } else { flash ( "success" , "Bundle '" + b . getSymbolicName ( ) + "' installed." ) ; return bundle ( ) ; } } } ) ; } else { logger ( ) . error ( "No bundle to install" ) ; flash ( "error" , "Unable to install the bundle - no uploaded file" ) ; return bundle ( ) ; }
public class FeatureResolverImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . kernel . feature . resolver . FeatureResolver # resolveFeatures ( com . ibm . ws . kernel . feature . resolver . FeatureResolver . Repository , java . util . Collection , * java . util . Collection , java . util . Set , * boolean , java . util . EnumSet ) * Here are the steps this uses to resolve : * 1 ) Primes the selected features with the pre - resolved and the root features ( conflicts are reported , but no permutations for backtracking ) * 2 ) Resolve the root features * 3 ) Check if there are any auto features to resolve ; if so return to step 2 and resolve the auto - features as root features */ @ Override public Result resolveFeatures ( Repository repository , Collection < ProvisioningFeatureDefinition > kernelFeatures , Collection < String > rootFeatures , Set < String > preResolved , boolean allowMultipleVersions , EnumSet < ProcessType > supportedProcessTypes ) { } }
SelectionContext selectionContext = new SelectionContext ( repository , allowMultipleVersions , supportedProcessTypes ) ; // this checks if the pre - resolved exists in the repo ; // if one does not exist then we start over with an empty set of pre - resolved preResolved = checkPreResolvedExistAndSetFullName ( preResolved , selectionContext ) ; // check that the root features exist and are public ; remove them if not ; also get the full name rootFeatures = checkRootsAreAccessibleAndSetFullName ( new ArrayList < String > ( rootFeatures ) , selectionContext , preResolved ) ; // Always prime the selected with the pre - resolved and the root features . // This will ensure that the root and pre - resolved features do not conflict selectionContext . primeSelected ( preResolved ) ; selectionContext . primeSelected ( rootFeatures ) ; // Even if the feature set hasn ' t changed , we still need to process the auto features and add any features that need to be // installed / uninstalled to the list . This recursively iterates over the auto Features , as previously installed features // may satisfy other auto features . Set < String > autoFeaturesToInstall = Collections . < String > emptySet ( ) ; Set < String > seenAutoFeatures = new HashSet < String > ( ) ; Set < String > resolved = Collections . emptySet ( ) ; do { if ( ! ! ! autoFeaturesToInstall . isEmpty ( ) ) { // this is after the first pass ; use the autoFeaturesToInstall as the roots rootFeatures = autoFeaturesToInstall ; // Need to prime the auto features as selected selectionContext . primeSelected ( autoFeaturesToInstall ) ; // and use the resolved as the preResolved preResolved = resolved ; // A new resolution process will happen now along with the auto - features that match ; // need to save off the current conflicts to be the pre resolved conflicts // otherwise they would get lost selectionContext . saveCurrentPreResolvedConflicts ( ) ; } resolved = doResolveFeatures ( rootFeatures , preResolved , selectionContext ) ; } while ( ! ! ! ( autoFeaturesToInstall = processAutoFeatures ( kernelFeatures , resolved , seenAutoFeatures , selectionContext ) ) . isEmpty ( ) ) ; // Finally return the selected result return selectionContext . getResult ( ) ;
public class Watch { /** * Initializes a new stream to the backend with backoff . */ private void initStream ( ) { } }
firestoreExecutor . schedule ( new Runnable ( ) { @ Override public void run ( ) { if ( ! isActive . get ( ) ) { return ; } synchronized ( Watch . this ) { if ( ! isActive . get ( ) ) { return ; } Preconditions . checkState ( stream == null ) ; current = false ; nextAttempt = backoff . createNextAttempt ( nextAttempt ) ; stream = firestore . streamRequest ( Watch . this , firestore . getClient ( ) . listenCallable ( ) ) ; ListenRequest . Builder request = ListenRequest . newBuilder ( ) ; request . setDatabase ( firestore . getDatabaseName ( ) ) ; request . setAddTarget ( target ) ; if ( resumeToken != null ) { request . getAddTargetBuilder ( ) . setResumeToken ( resumeToken ) ; } stream . onNext ( request . build ( ) ) ; } } } , nextAttempt . getRandomizedRetryDelay ( ) . toMillis ( ) , TimeUnit . MILLISECONDS ) ;
public class LocalDirAllocator { /** * This method must be used to obtain the dir allocation context for a * particular value of the context name . The context name must be an item * defined in the Configuration object for which we want to control the * dir allocations ( e . g . , < code > mapred . local . dir < / code > ) . The method will * create a context for that name if it doesn ' t already exist . */ private AllocatorPerContext obtainContext ( String contextCfgItemName ) { } }
synchronized ( contexts ) { AllocatorPerContext l = contexts . get ( contextCfgItemName ) ; if ( l == null ) { contexts . put ( contextCfgItemName , ( l = new AllocatorPerContext ( contextCfgItemName ) ) ) ; } return l ; }
public class DITypeInfo { /** * Retrieves the SQL CLI datetime subcode for the type . < p > * @ return the SQL CLI datetime subcode for the type */ Integer getSqlDateTimeSub ( ) { } }
switch ( type ) { case Types . SQL_DATE : return ValuePool . INTEGER_1 ; case Types . SQL_TIME : return ValuePool . getInt ( 2 ) ; case Types . SQL_TIMESTAMP : return ValuePool . getInt ( 3 ) ; default : return null ; }
public class NameQuery { /** * { @ inheritDoc } */ public Query rewrite ( IndexReader reader ) throws IOException { } }
if ( version . getVersion ( ) >= IndexFormatVersion . V3 . getVersion ( ) ) { // use LOCAL _ NAME and NAMESPACE _ URI field BooleanQuery name = new BooleanQuery ( ) ; name . add ( new JcrTermQuery ( new Term ( FieldNames . NAMESPACE_URI , nodeName . getNamespace ( ) ) ) , BooleanClause . Occur . MUST ) ; name . add ( new JcrTermQuery ( new Term ( FieldNames . LOCAL_NAME , nodeName . getName ( ) ) ) , BooleanClause . Occur . MUST ) ; return name ; } else { // use LABEL field try { return new JcrTermQuery ( new Term ( FieldNames . LABEL , nsMappings . translateName ( nodeName ) ) ) ; } catch ( IllegalNameException e ) { throw Util . createIOException ( e ) ; } }
public class TrieMap { /** * { @ inheritDoc } */ @ Override public void clear ( ) { } }
final TrieNode < V > rootNode = root ; rootNode . children . clear ( ) ; rootNode . unset ( ) ; ++ modCount ; size = 0 ;
public class CmsContentTypeVisitor { /** * Returns the schema type message key . < p > * @ param value the schema type * @ return the schema type message key */ private String getTypeKey ( I_CmsXmlSchemaType value ) { } }
StringBuffer result = new StringBuffer ( 64 ) ; result . append ( value . getContentDefinition ( ) . getInnerName ( ) ) ; result . append ( '.' ) ; result . append ( value . getName ( ) ) ; return result . toString ( ) ;
public class ApiOvhTelephony { /** * Create a new fax campaign * REST : POST / telephony / { billingAccount } / fax / { serviceName } / campaigns * @ param recipientsDocId [ required ] If recipientsType is set to document , the id of the document containing the recipients phone numbers * @ param name [ required ] The name of the fax campaign * @ param recipientsList [ required ] If recipientsType is set to list , the list of recipients phone numbers * @ param recipientsType [ required ] Method to set the campaign recipient * @ param faxQuality [ required ] The quality of the fax you want to send * @ param documentId [ required ] The id of the / me / document pdf you want to send * @ param sendDate [ required ] Sending date of the campaign ( when sendType is scheduled ) * @ param sendType [ required ] Sending type of the campaign * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] */ public OvhFaxCampaign billingAccount_fax_serviceName_campaigns_POST ( String billingAccount , String serviceName , String documentId , OvhFaxQualityEnum faxQuality , String name , String recipientsDocId , String [ ] recipientsList , OvhFaxCampaignRecipientsTypeEnum recipientsType , Date sendDate , OvhFaxCampaignSendTypeEnum sendType ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/fax/{serviceName}/campaigns" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "documentId" , documentId ) ; addBody ( o , "faxQuality" , faxQuality ) ; addBody ( o , "name" , name ) ; addBody ( o , "recipientsDocId" , recipientsDocId ) ; addBody ( o , "recipientsList" , recipientsList ) ; addBody ( o , "recipientsType" , recipientsType ) ; addBody ( o , "sendDate" , sendDate ) ; addBody ( o , "sendType" , sendType ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhFaxCampaign . class ) ;
public class AnnotationActionImportFactory { /** * Adds an action import to factory . * @ param cls The action import class . */ public void addActionImport ( Class < ? > cls ) { } }
EdmActionImport actionImportAnnotation = cls . getAnnotation ( EdmActionImport . class ) ; ActionImportImpl . Builder actionImportBuilder = new ActionImportImpl . Builder ( ) . setEntitySetName ( actionImportAnnotation . entitySet ( ) ) . setActionName ( actionImportAnnotation . namespace ( ) + "." + actionImportAnnotation . action ( ) ) . setName ( actionImportAnnotation . name ( ) ) . setJavaClass ( cls ) ; actionImportBuilders . add ( actionImportBuilder ) ;
public class SignatureUtilImpl { /** * 使用公钥校验签名 * @ param content 原文 * @ param data 签名数据 ( BASE64 encode过的 ) * @ return 返回true表示校验成功 */ @ Override public boolean checkSign ( byte [ ] content , byte [ ] data ) { } }
try ( PooledObject < SignatureHolder > holder = CACHE . get ( id ) . get ( ) ) { Signature signature = holder . get ( ) . getVerify ( ) ; signature . update ( content ) ; return signature . verify ( BASE_64 . decrypt ( data ) ) ; } catch ( Exception e ) { throw new SecureException ( "加密失败" , e ) ; }
public class CmsEncoder { /** * Encodes a string used as parameter in an uri in a way independent of other encodings / decodings applied later . < p > * Used to ensure that GET parameters are not wrecked by wrong or incompatible configuration settings . * In order to ensure this , the String is first encoded with html entities for any character that cannot encoded * in US - ASCII ; additionally , the plus sign is also encoded to avoid problems with the white - space replacer . * Finally , the entity prefix is replaced with characters not used as delimiters in urls . < p > * @ param input the parameter string * @ return the encoded parameter string */ public static String encodeParameter ( String input ) { } }
String result = CmsEncoder . encodeHtmlEntities ( input , CmsEncoder . ENCODING_US_ASCII ) ; result = CmsStringUtil . substitute ( result , "+" , PLUS_ENTITY ) ; return CmsStringUtil . substitute ( result , ENTITY_PREFIX , ENTITY_REPLACEMENT ) ;
public class ClipboardUtils { /** * Reads the String content of the { @ link Clipboard } . * @ return */ public static String getContnet ( ) { } }
String result = "" ; Clipboard clipboard = Toolkit . getDefaultToolkit ( ) . getSystemClipboard ( ) ; try { result = ( String ) clipboard . getData ( DataFlavor . stringFlavor ) ; } catch ( Exception ex ) { // noop } return result ;
public class PrcSeSellerDel { /** * < p > Process entity request . < / p > * @ param pAddParam additional param , e . g . return this line ' s * document in " nextEntity " for farther process * @ param pRequestData Request Data * @ param pEntity Entity to process * @ return Entity processed for farther process or null * @ throws Exception - an exception */ @ Override public final SeSeller process ( final Map < String , Object > pAddParam , final SeSeller pEntity , final IRequestData pRequestData ) throws Exception { } }
findSeSeller . handleSeSellerChanged ( pAddParam , pEntity . getUserAuth ( ) . getItsId ( ) ) ; this . srvOrm . deleteEntity ( pAddParam , pEntity ) ; return null ;
public class MySQLFieldPacket { /** * Bytes Name * n ( Length Coded String ) catalog * n ( Length Coded String ) db * n ( Length Coded String ) table * n ( Length Coded String ) org _ table * n ( Length Coded String ) name * n ( Length Coded String ) org _ name * 1 ( filler ) * 2 charsetnr * 4 length * 1 type * 2 flags * 1 decimals * 2 ( filler ) , always 0x00 * n ( Length Coded Binary ) default */ public static ColumnInformation columnInformationFactory ( final RawPacket rawPacket ) throws IOException { } }
final Reader reader = new Reader ( rawPacket ) ; return new MySQLColumnInformation . Builder ( ) . catalog ( reader . getLengthEncodedString ( ) ) . db ( reader . getLengthEncodedString ( ) ) . table ( reader . getLengthEncodedString ( ) ) . originalTable ( reader . getLengthEncodedString ( ) ) . name ( reader . getLengthEncodedString ( ) ) . originalName ( reader . getLengthEncodedString ( ) ) . skipMe ( reader . skipBytes ( 1 ) ) . charsetNumber ( reader . readShort ( ) ) . length ( reader . readInt ( ) ) . type ( MySQLType . fromServer ( reader . readByte ( ) ) ) . flags ( parseFlags ( reader . readShort ( ) ) ) . decimals ( reader . readByte ( ) ) . skipMe ( reader . skipBytes ( 2 ) ) . build ( ) ;
public class Client { /** * Called by the { @ link ClientDObjectMgr } when our bootstrap notification arrives . If the * client and server are being run in " merged " mode in a single JVM , this is how the client is * configured with the server ' s distributed object manager and provided with bootstrap data . */ protected void gotBootstrap ( BootstrapData data , DObjectManager omgr ) { } }
if ( debugLogMessages ( ) ) { log . info ( "Got bootstrap " + data + "." ) ; } // keep these around for interested parties _bstrap = data ; _omgr = omgr ; // extract bootstrap information _connectionId = data . connectionId ; _cloid = data . clientOid ; // notify the communicator that we got our bootstrap data ( if we have one ) if ( _comm != null ) { _comm . gotBootstrap ( ) ; } // initialize our invocation director _invdir . init ( omgr , _cloid , this ) ; // send a few pings to the server to establish the clock offset between this client and // server standard time establishClockDelta ( System . currentTimeMillis ( ) ) ; // we can ' t quite call initialization completed at this point because we need for the // invocation director to fully initialize ( which requires a round trip to the server ) // before turning the client loose to do things like request invocation services
public class FctBnTradeEntitiesProcessors { /** * < p > Get PrCuOrSv ( create and put into map ) . < / p > * @ param pAddParam additional param * @ return requested PrCuOrSv * @ throws Exception - an exception */ protected final PrCuOrSv < RS > lazyGetPrCuOrSv ( final Map < String , Object > pAddParam ) throws Exception { } }
String beanName = PrCuOrSv . class . getSimpleName ( ) ; @ SuppressWarnings ( "unchecked" ) PrCuOrSv < RS > proc = ( PrCuOrSv < RS > ) this . processorsMap . get ( beanName ) ; if ( proc == null ) { proc = new PrCuOrSv < RS > ( ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; proc . setCncOrd ( getCncOrd ( ) ) ; // assigning fully initialized object : this . processorsMap . put ( beanName , proc ) ; this . logger . info ( null , FctBnTradeEntitiesProcessors . class , beanName + " has been created." ) ; } return proc ;
public class FileBasedCollection { /** * Update an entry in the collection . * @ param entry Updated entry to be stored * @ param fsid Internal ID of entry * @ throws java . lang . Exception On error */ public void updateEntry ( final Entry entry , String fsid ) throws Exception { } }
synchronized ( FileStore . getFileStore ( ) ) { final Feed f = getFeedDocument ( ) ; if ( fsid . endsWith ( ".media-link" ) ) { fsid = fsid . substring ( 0 , fsid . length ( ) - ".media-link" . length ( ) ) ; } updateTimestamps ( entry ) ; updateEntryAppLinks ( entry , fsid , false ) ; updateFeedDocumentWithExistingEntry ( f , entry ) ; final String entryPath = getEntryPath ( fsid ) ; final OutputStream os = FileStore . getFileStore ( ) . getFileOutputStream ( entryPath ) ; updateEntryAppLinks ( entry , fsid , true ) ; Atom10Generator . serializeEntry ( entry , new OutputStreamWriter ( os , "UTF-8" ) ) ; os . flush ( ) ; os . close ( ) ; }
public class SimpleExpression { /** * Create a { @ code this in right } expression * @ param right rhs of the comparison * @ return this in right */ public BooleanExpression in ( SubQueryExpression < ? extends T > right ) { } }
return Expressions . booleanOperation ( Ops . IN , mixin , right ) ;
public class GenericClientFactory { /** * Load . * @ param persistenceUnit * the persistence unit */ @ Override public void load ( String persistenceUnit , Map < String , Object > puProperties ) { } }
setPersistenceUnit ( persistenceUnit ) ; // Load Client Specific Stuff logger . info ( "Loading client metadata for persistence unit : " + persistenceUnit ) ; loadClientMetadata ( puProperties ) ; // initialize the client logger . info ( "Initializing client for persistence unit : " + persistenceUnit ) ; initialize ( puProperties ) ; // Construct Pool logger . info ( "Constructing pool for persistence unit : " + persistenceUnit ) ; connectionPoolOrConnection = createPoolOrConnection ( ) ;
public class GreenPepperServerServiceImpl { /** * { @ inheritDoc } */ public Repository getRegisteredRepository ( Repository repository ) throws GreenPepperServerException { } }
try { sessionService . startSession ( ) ; Repository registeredRepository = loadRepository ( repository . getUid ( ) ) ; registeredRepository . setMaxUsers ( repository . getMaxUsers ( ) ) ; return registeredRepository ; } finally { sessionService . closeSession ( ) ; }
public class Condition { /** * Negate given condition . * Used for methods like $ . shouldNot ( exist ) , $ . shouldNotBe ( visible ) * Typically you don ' t need to use it . */ public static Condition not ( final Condition condition ) { } }
return new Condition ( "not " + condition . name , ! condition . nullIsAllowed ) { @ Override public boolean apply ( Driver driver , WebElement element ) { return ! condition . apply ( driver , element ) ; } @ Override public String actualValue ( Driver driver , WebElement element ) { return condition . actualValue ( driver , element ) ; } } ;
public class DefaultBlitz4jConfig { /** * ( non - Javadoc ) * @ see com . netflix . blitz4j . BlitzConfig # getBatcherMaxDelay ( java . lang . String ) */ @ Override public double getBatcherMaxDelay ( String batcherName ) { } }
return CONFIGURATION . getDoubleProperty ( batcherName + "." + "batch.maxDelay" , Double . valueOf ( this . getPropertyValue ( batcherName + ".waitTimeinMillis" , "0.5" ) ) ) . get ( ) ;
public class CaptureActivityIntents { /** * Set desired duration for which to pause after a successful scan to { @ code Intent } . * @ param intent Target intent . * @ param duration Desired duration in milliseconds . */ public static void setResultDisplayDurationInMs ( Intent intent , long duration ) { } }
intent . putExtra ( Intents . Scan . RESULT_DISPLAY_DURATION_MS , duration ) ;
public class AWSAppMeshClient { /** * Returns a list of existing virtual services in a service mesh . * @ param listVirtualServicesRequest * @ return Result of the ListVirtualServices operation returned by the service . * @ throws BadRequestException * The request syntax was malformed . Check your request syntax and try again . * @ throws ForbiddenException * You don ' t have permissions to perform this action . * @ throws InternalServerErrorException * The request processing has failed because of an unknown error , exception , or failure . * @ throws NotFoundException * The specified resource doesn ' t exist . Check your request syntax and try again . * @ throws ServiceUnavailableException * The request has failed due to a temporary failure of the service . * @ throws TooManyRequestsException * The maximum request rate permitted by the App Mesh APIs has been exceeded for your account . For best * results , use an increasing or variable sleep interval between requests . * @ sample AWSAppMesh . ListVirtualServices * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / appmesh - 2019-01-25 / ListVirtualServices " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListVirtualServicesResult listVirtualServices ( ListVirtualServicesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListVirtualServices ( request ) ;
public class GetTechReportPunchCardStatsMethod { /** * Returns all ApplicationProjectModels . */ private static Set < ProjectModel > getAllApplications ( GraphContext graphContext ) { } }
Set < ProjectModel > apps = new HashSet < > ( ) ; Iterable < ProjectModel > appProjects = graphContext . findAll ( ProjectModel . class ) ; for ( ProjectModel appProject : appProjects ) apps . add ( appProject ) ; return apps ;
public class SDPLogarithmicBarrier { /** * Calculates the initial value for the s parameter in Phase I . * Return s so that F ( x ) - s . I is negative definite * @ see " S . Boyd and L . Vandenberghe , Convex Optimization , 11.6.2" * @ see " S . Boyd and L . Vandenberghe , Semidefinite programming , 6.1" */ @ Override public double calculatePhase1InitialFeasiblePoint ( double [ ] originalNotFeasiblePoint , double tolerance ) { } }
RealMatrix F = this . buildS ( originalNotFeasiblePoint ) . scalarMultiply ( - 1 ) ; RealMatrix S = F . scalarMultiply ( - 1 ) ; try { new CholeskyDecomposition ( S ) ; // already feasible return - 1 ; } catch ( NonPositiveDefiniteMatrixException ee ) { // it does NOT mean that F is negative , it can be not definite EigenDecomposition eFact = new EigenDecomposition ( F ) ; double [ ] eValues = eFact . getRealEigenvalues ( ) ; double minEigenValue = eValues [ Utils . getMinIndex ( DoubleFactory1D . dense . make ( eValues ) ) ] ; return - Math . min ( minEigenValue * Math . pow ( tolerance , - 0.5 ) , 0. ) ; }
public class SimpleDirectoryPersistentCache { /** * Writes the DiscoverInfo stanza to an file * @ param file * @ param info * @ throws IOException */ private static void writeInfoToFile ( File file , DiscoverInfo info ) throws IOException { } }
try ( DataOutputStream dos = new DataOutputStream ( new FileOutputStream ( file ) ) ) { dos . writeUTF ( info . toXML ( ) . toString ( ) ) ; }
public class SimonConsolePlugin { /** * Serialize plugin data into a JSON object * @ param jsonStringifierFactory Stringifier factory * @ return JSON object representing plugin */ public final ObjectJS toJson ( StringifierFactory jsonStringifierFactory ) { } }
final ObjectJS pluginJS = new ObjectJS ( ) ; final Stringifier < String > stringStringifier = jsonStringifierFactory . getStringifier ( String . class ) ; pluginJS . setSimpleAttribute ( "id" , getId ( ) , stringStringifier ) ; pluginJS . setSimpleAttribute ( "label" , getLabel ( ) , stringStringifier ) ; pluginJS . setAttribute ( "resources" , HtmlResource . toJson ( getResources ( ) , jsonStringifierFactory ) ) ; return pluginJS ;
public class MetaUtil { /** * 创建带有字段限制的Entity对象 < br > * 此方法读取数据库中对应表的字段列表 , 加入到Entity中 , 当Entity被设置内容时 , 会忽略对应表字段外的所有KEY * @ param ds 数据源 * @ param tableName 表名 * @ return Entity对象 */ public static Entity createLimitedEntity ( DataSource ds , String tableName ) { } }
final String [ ] columnNames = getColumnNames ( ds , tableName ) ; return Entity . create ( tableName ) . setFieldNames ( columnNames ) ;
public class ConnectionOptions { /** * Returns the addresses to attempt connections to , in round - robin order . * @ see # withAddresses ( Address . . . ) * @ see # withAddresses ( String ) * @ see # withHost ( String ) * @ see # withHosts ( String . . . ) */ public Address [ ] getAddresses ( ) { } }
if ( addresses != null ) return addresses ; if ( hosts != null ) { addresses = new Address [ hosts . length ] ; for ( int i = 0 ; i < hosts . length ; i ++ ) addresses [ i ] = new Address ( hosts [ i ] , factory . getPort ( ) ) ; return addresses ; } Address address = factory == null ? new Address ( "localhost" , - 1 ) : new Address ( factory . getHost ( ) , factory . getPort ( ) ) ; return new Address [ ] { address } ;
public class PooledBufferedOutputStream { /** * This really release buffers back to the pool . MUST be called to gain the * benefit of pooling . * @ throws IOException */ public void release ( ) throws IOException { } }
for ( BufferPool . Buffer b : buffers ) { PooledBuffers . release ( b ) ; } buffers . clear ( ) ; count = 0 ;
public class RecoverableUnitImpl { /** * This internal method is called by the RLS to direct the recoverable unit to * retrieve recovery data from the underlying recovery log . The ReadableLogRecord * supplied by the caller provides direct access to the underlying recovery log . * From it , this method can retrieve details of recoverable unit sections and data * items that must be ADDED to any already stored in memory . * This method may be called any number of times to complete recovery processing * for the target recoverable unit . * This method throws LogCorruptedException to indicate that a failure has occured * whilst parsing the data . We assume that this failure has been caused as a result * of a previous system crash during a disk write . The calling code will react to * this by assmuning that all valid information has now been retireved from the * underlying recovery log . * @ param logRecord Provides direct access to the underlying recovery log and the * recoverable unit sections / data items that need to be restored . * @ exception LogCorruptedException Corrupt log data was detected ( see above ) * @ exception InternalLogException An unexpected exception has occured */ private void recover ( ReadableLogRecord logRecord ) throws LogCorruptedException , InternalLogException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "recover" , new Object [ ] { this , logRecord } ) ; // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing . if ( _recLog . failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recover" , this ) ; throw new InternalLogException ( null ) ; } try { // Read the record type field . short recordType = logRecord . getShort ( ) ; if ( recordType == RecoverableUnitImpl . RECORDTYPEDELETED ) { // This record is a marker to indicate that the recoverable unit was deleted at this point // in its lifecycle ( ie its lifecycle in relation to the data contained in the recovery // log before and after the deletion record ) . In order to support re - use of a recoverable // unit identity before a keypoint operaiton occurs and old data is deleted from the recovery // log , we must delete the existing recoverable unit . This ensures that it does not get // confused with later instance that uses the same identity value . if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "This is a DELETION record. Deleting RecoverableUnit from map" ) ; _recLog . removeRecoverableUnitMapEntries ( _identity ) ; // PI68664 - if the RU being deleted contains any RU Sections then the DataItem recovery will have // caused payload to be added which increments the _ totalDataSize of the log . // We must set this back down here . this . removeDuringLogRead ( ) ; } else { // This record is not a deletion record . It contains new data to be recovered for the recoverable // unit . Decode the record accordingly . if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "This is a NORMAL record. Decoding contents" ) ; // Determine the identity of the next section . Ideally , we would decode the entire // recoverable unit section from within the RecoverableUnitSectionImpl class , rather // than decoding part of it here . Unfortunatly , we must determine if this class // already knows about this recoverable unit section and if not create it and place // it into the _ recoverableUnitSections map . This means that we must decode both its // identity and ' singleData ' flag . int recoverableUnitSectionIdentity = logRecord . getInt ( ) ; while ( recoverableUnitSectionIdentity != END_OF_SECTIONS ) { // This is a real recoverable unit section record and not just the marker // to indicate that there are no further recoverable unit sections stored // within the record . if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Recovering RecoverableUnitSection '" + recoverableUnitSectionIdentity + "'" ) ; // Read the ' record type ' field . Currently this is not used but is provided for future compatibility . // It would be used to distinguish between a ' normal ' write and a ' delete ' write . The latter would be used // to remove the recoverable unit section from the recovery log . Ignore this field for now . logRecord . getShort ( ) ; // Determine if this section can hold multiple data items . final boolean singleData = logRecord . getBoolean ( ) ; if ( tc . isDebugEnabled ( ) ) { if ( singleData ) { Tr . debug ( tc , "RecoverableUnitSection can hold only a single data item" ) ; } else { Tr . debug ( tc , "RecoverableUnitSection can hold multiple data items" ) ; } } // Determine if the identity has been encountered before and either lookup or create // the corrisponding recoverable unit section . RecoverableUnitSectionImpl recoverableUnitSection = ( RecoverableUnitSectionImpl ) _recoverableUnitSections . get ( new Integer ( recoverableUnitSectionIdentity ) ) ; if ( recoverableUnitSection == null ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has not been encountered before. Creating." ) ; try { recoverableUnitSection = ( RecoverableUnitSectionImpl ) createSection ( recoverableUnitSectionIdentity , singleData ) ; } catch ( RecoverableUnitSectionExistsException exc ) { // This exception should not be generated in practice as we are in the single threaded // recovery process and have already checked that the RecoverableUnitSection does not // exist . If this exception was actually generated then ignore it - it simply indicates // that the creation has failed as the section has already been created . Given that // creation is the goal , this does not seem to be a problem . FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover" , "713" , this ) ; } catch ( InternalLogException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover" , "717" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "An unexpected exception occured when attempting to create a new RecoverableUnitSection" ) ; throw exc ; // Caught in this method further down . } } else { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has been encountered before." ) ; } // Direct the recoverable unit section to recover further information from the log record . recoverableUnitSection . recover ( logRecord ) ; // Since this information has been recovered from disk it has been " written to the log " . Record this fact // so that any subsequent deletion of the recoverable unit will cause a deletion record to be written // to disk . _storedOnDisk = true ; // Retrieve the identity of the next recoverable unit section . This may be the ' END _ OF _ SECTIONS ' // marker and hence indicate that there are no further recoverable unit sections to process // in this record . recoverableUnitSectionIdentity = logRecord . getInt ( ) ; } } } catch ( LogCorruptedException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover" , "740" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "A LogCorruptedException exception occured reconstructng a RecoverableUnitImpl" ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recover" , exc ) ; throw exc ; } catch ( InternalLogException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover" , "747" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "An InternalLogException exception occured reconstructng a RecoverableUnitImpl" ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recover" , exc ) ; throw exc ; } catch ( Throwable exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover" , "753" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "An exception occured reconstructng a RecoverableUnitImpl" ) ; _recLog . markFailed ( exc ) ; /* @ MD19484C */ if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recover" , "InternalLogException" ) ; throw new InternalLogException ( exc ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "recover" ) ;
public class Collections { /** * Concatenate two Iterable sources * @ param a a non - null Iterable value * @ param b a non - null Iterable value * @ return an Iterable that will iterate through all the values from ' a ' and then all the values from ' b ' * @ param < T > the value type */ public static < T > Iterable < T > concat ( final Iterable < T > a , final Iterable < T > b ) { } }
assert ( a != null ) ; assert ( b != null ) ; return ( ) -> Collections . concat ( a . iterator ( ) , b . iterator ( ) ) ;
public class TextParameterDataSegment { /** * { @ inheritDoc } */ public final int append ( final ByteBuffer src , final int len ) { } }
if ( len == 0 ) { return 0 ; } resizeBuffer ( len , true ) ; dataBuffer . put ( src ) ; isDirty = true ; return dataBuffer . limit ( ) ;
public class CmsVfsConfiguration { /** * Returns the XSD translator that has been initialized * with the configured XSD translation rules . < p > * @ return the XSD translator */ public CmsResourceTranslator getXsdTranslator ( ) { } }
String [ ] array = m_xsdTranslationEnabled ? new String [ m_xsdTranslations . size ( ) ] : new String [ 0 ] ; for ( int i = 0 ; i < m_xsdTranslations . size ( ) ; i ++ ) { array [ i ] = m_xsdTranslations . get ( i ) ; } return new CmsResourceTranslator ( array , true ) ;
public class InsertStmt { /** * Execute . * @ return the insert stmt * @ throws EFapsException the e faps exception */ public Instance execute ( ) throws EFapsException { } }
this . update = new Insert ( ( IInsertStatement ) getEQLStmt ( ) ) ; StmtRunner . get ( ) . execute ( this . update ) ; return this . update . getInstance ( ) ;
public class ValidHelper { /** * 验证日期格式 不能为空和必须正值最小值 * @ param pContent 文本 * @ param pParamName 参数名称 * @ throws APPErrorException */ public static void validDate ( Long pContent , String pParamName ) throws APPErrorException { } }
if ( null == pContent || pContent . longValue ( ) < minDate ) { throw new APPErrorException ( pParamName + " 错误的时间值" ) ; }
public class CodeQualityEvaluator { /** * Sets audit status when code quality gate details are found . Sonar 6.7 style data . * @ param condition * @ param codeQualityAuditResponse */ private void auditStatusWhenQualityGateDetailsFound ( Map condition , CodeQualityAuditResponse codeQualityAuditResponse ) { } }
if ( StringUtils . equalsIgnoreCase ( condition . get ( "metric" ) . toString ( ) , CodeQualityMetricType . BLOCKER_VIOLATIONS . getType ( ) ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_BLOCKER_FOUND ) ; if ( ! StringUtils . equalsIgnoreCase ( condition . get ( "level" ) . toString ( ) , "ERROR" ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_BLOCKER_MET ) ; } } else if ( StringUtils . equalsIgnoreCase ( condition . get ( "metric" ) . toString ( ) , CodeQualityMetricType . CRITICAL_VIOLATIONS . getType ( ) ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_CRITICAL_FOUND ) ; if ( ! StringUtils . equalsIgnoreCase ( condition . get ( "level" ) . toString ( ) , "ERROR" ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_CRITICAL_MET ) ; } } else if ( StringUtils . equalsIgnoreCase ( condition . get ( "metric" ) . toString ( ) , CodeQualityMetricType . UNIT_TEST . getType ( ) ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_UNIT_TEST_FOUND ) ; if ( ! StringUtils . equalsIgnoreCase ( condition . get ( "level" ) . toString ( ) , "ERROR" ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_UNIT_TEST_MET ) ; } } else if ( StringUtils . equalsIgnoreCase ( condition . get ( "metric" ) . toString ( ) , CodeQualityMetricType . NEW_COVERAGE . getType ( ) ) || StringUtils . equalsIgnoreCase ( condition . get ( "metric" ) . toString ( ) , CodeQualityMetricType . COVERAGE . getType ( ) ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_CODE_COVERAGE_FOUND ) ; if ( ! StringUtils . equalsIgnoreCase ( condition . get ( "level" ) . toString ( ) , "ERROR" ) ) { codeQualityAuditResponse . addAuditStatus ( CodeQualityAuditStatus . CODE_QUALITY_THRESHOLD_CODE_COVERAGE_MET ) ; } }
public class EthiopianCalendar { /** * / * [ deutsch ] * < p > Erzeugt ein neues & auml ; thiopisches Kalenderdatum . < / p > * @ param era Ethiopian era * @ param yearOfEra Ethiopian year of era in the range 1-9999 ( 1-15499 if amete alem ) * @ param month Ethiopian month in the range 1-13 * @ param dayOfMonth Ethiopian day of month in the range 1-30 * @ return new instance of { @ code EthiopianCalendar } * @ throws IllegalArgumentException in case of any inconsistencies * @ since 3.11/4.8 */ public static EthiopianCalendar of ( EthiopianEra era , int yearOfEra , int month , int dayOfMonth ) { } }
if ( ! CALSYS . isValid ( era , yearOfEra , month , dayOfMonth ) ) { throw new IllegalArgumentException ( "Invalid Ethiopian date: era=" + era + ", year=" + yearOfEra + ", month=" + month + ", day=" + dayOfMonth ) ; } return new EthiopianCalendar ( mihret ( era , yearOfEra ) , month , dayOfMonth ) ;
public class FieldUtils { /** * Writes a { @ code public static } { @ link Field } . * @ param field * to write * @ param value * to set * @ throws IllegalArgumentException * if the field is { @ code null } or not { @ code static } , or { @ code value } is not assignable * @ throws IllegalAccessException * if the field is not { @ code public } or is { @ code final } */ public static void writeStaticField ( final Field field , final Object value ) throws IllegalAccessException { } }
writeStaticField ( field , value , false ) ;
public class FileIoUtil { /** * Read properties from given filename * ( returns empty { @ link Properties } object on failure ) . * @ param _ fileName The properties file to read * @ param _ props optional properties object , if null a new object created in the method * @ return { @ link Properties } object */ public static Properties readPropertiesFromFile ( String _fileName , Properties _props ) { } }
Properties props = _props == null ? new Properties ( ) : _props ; LOGGER . debug ( "Trying to read properties from file: " + _fileName ) ; Properties newProperties = readProperties ( new File ( _fileName ) ) ; if ( newProperties != null ) { LOGGER . debug ( "Successfully read properties from file: " + _fileName ) ; props . putAll ( newProperties ) ; } return props ;
public class MessagePacker { /** * Writes header of a Binary value . * You MUST call { @ link # writePayload ( byte [ ] ) } or { @ link # addPayload ( byte [ ] ) } method to write body binary . * @ param len number of bytes of a binary to be written * @ return this * @ throws IOException when underlying output throws IOException */ public MessagePacker packBinaryHeader ( int len ) throws IOException { } }
if ( len < ( 1 << 8 ) ) { writeByteAndByte ( BIN8 , ( byte ) len ) ; } else if ( len < ( 1 << 16 ) ) { writeByteAndShort ( BIN16 , ( short ) len ) ; } else { writeByteAndInt ( BIN32 , len ) ; } return this ;
public class SessionSchedulable { /** * Adjust the locality requirement based on the current * locality and the locality wait times . * If the current required locality is node and enough time * has passed - update it to rack . * If the current is rack and enough time has passed - update to any * @ param now current time * @ param nodeWait node locality wait * @ param rackWait rack locality wait */ public void adjustLocalityRequirement ( long now , long nodeWait , long rackWait ) { } }
if ( ! localityWaitStarted ) { return ; } if ( localityRequired == LocalityLevel . ANY ) { return ; } if ( localityRequired == LocalityLevel . NODE ) { if ( now - localityWaitStartTime > nodeWait ) { setLocalityLevel ( LocalityLevel . RACK ) ; } } if ( localityRequired == LocalityLevel . RACK ) { if ( now - localityWaitStartTime > rackWait ) { setLocalityLevel ( LocalityLevel . ANY ) ; } }
public class TileSparklineSkin { /** * * * * * * Resizing * * * * * */ private void resizeDynamicText ( ) { } }
double maxWidth = unitText . isManaged ( ) ? size * 0.725 : size * 0.9 ; double fontSize = size * 0.24 ; valueText . setFont ( Fonts . latoRegular ( fontSize ) ) ; if ( valueText . getLayoutBounds ( ) . getWidth ( ) > maxWidth ) { Helper . adjustTextSize ( valueText , maxWidth , fontSize ) ; } if ( unitText . isVisible ( ) ) { valueText . relocate ( size * 0.925 - valueText . getLayoutBounds ( ) . getWidth ( ) - unitText . getLayoutBounds ( ) . getWidth ( ) , size * 0.15 ) ; } else { valueText . relocate ( size * 0.95 - valueText . getLayoutBounds ( ) . getWidth ( ) , size * 0.15 ) ; } maxWidth = size * 0.3 ; fontSize = size * 0.05 ; averageText . setFont ( Fonts . latoRegular ( fontSize ) ) ; if ( averageText . getLayoutBounds ( ) . getWidth ( ) > maxWidth ) { Helper . adjustTextSize ( averageText , maxWidth , fontSize ) ; } if ( averageLine . getStartY ( ) < graphBounds . getY ( ) + graphBounds . getHeight ( ) * 0.5 ) { averageText . setY ( averageLine . getStartY ( ) + ( size * 0.0425 ) ) ; } else { averageText . setY ( averageLine . getStartY ( ) - ( size * 0.0075 ) ) ; } highText . setFont ( Fonts . latoRegular ( fontSize ) ) ; if ( highText . getLayoutBounds ( ) . getWidth ( ) > maxWidth ) { Helper . adjustTextSize ( highText , maxWidth , fontSize ) ; } highText . setY ( graphBounds . getY ( ) - size * 0.0125 ) ; lowText . setFont ( Fonts . latoRegular ( fontSize ) ) ; if ( lowText . getLayoutBounds ( ) . getWidth ( ) > maxWidth ) { Helper . adjustTextSize ( lowText , maxWidth , fontSize ) ; } lowText . setY ( size * 0.9 ) ;
public class Cluster { /** * 选取初始质心 * @ param ndocs 质心数量 * @ param docs 输出到该列表中 */ void choose_smartly ( int ndocs , List < Document > docs ) { } }
int siz = size ( ) ; double [ ] closest = new double [ siz ] ; if ( siz < ndocs ) ndocs = siz ; int index , count = 0 ; index = random . nextInt ( siz ) ; // initial center docs . add ( documents_ . get ( index ) ) ; ++ count ; double potential = 0.0 ; for ( int i = 0 ; i < documents_ . size ( ) ; i ++ ) { double dist = 1.0 - SparseVector . inner_product ( documents_ . get ( i ) . feature ( ) , documents_ . get ( index ) . feature ( ) ) ; potential += dist ; closest [ i ] = dist ; } // choose each center while ( count < ndocs ) { double randval = random . nextDouble ( ) * potential ; for ( index = 0 ; index < documents_ . size ( ) ; index ++ ) { double dist = closest [ index ] ; if ( randval <= dist ) break ; randval -= dist ; } if ( index == documents_ . size ( ) ) index -- ; docs . add ( documents_ . get ( index ) ) ; ++ count ; double new_potential = 0.0 ; for ( int i = 0 ; i < documents_ . size ( ) ; i ++ ) { double dist = 1.0 - SparseVector . inner_product ( documents_ . get ( i ) . feature ( ) , documents_ . get ( index ) . feature ( ) ) ; double min = closest [ i ] ; if ( dist < min ) { closest [ i ] = dist ; min = dist ; } new_potential += min ; } potential = new_potential ; }
public class XCodeCopySourcesMojo { /** * / / Return the part of the path between project base directory and project build directory . / / * Assumption is : project build directory is located below project base directory . */ private String getProjectBuildDirectory ( ) { } }
return com . sap . prd . mobile . ios . mios . FileUtils . getDelta ( project . getBasedir ( ) , new File ( project . getBuild ( ) . getDirectory ( ) ) ) ;
public class AtlasClient { /** * Supports Partial updates * Updates properties set in the definition for the entity corresponding to guid * @ param guid guid * @ param entity entity definition */ public EntityResult updateEntity ( String guid , Referenceable entity ) throws AtlasServiceException { } }
String entityJson = InstanceSerialization . toJson ( entity , true ) ; LOG . debug ( "Updating entity id {} with {}" , guid , entityJson ) ; JSONObject response = callAPIWithBodyAndParams ( API . UPDATE_ENTITY_PARTIAL , entityJson , guid ) ; return extractEntityResult ( response ) ;
public class BatchGetDeploymentInstancesResult { /** * Information about the instance . * @ param instancesSummary * Information about the instance . */ public void setInstancesSummary ( java . util . Collection < InstanceSummary > instancesSummary ) { } }
if ( instancesSummary == null ) { this . instancesSummary = null ; return ; } this . instancesSummary = new com . amazonaws . internal . SdkInternalList < InstanceSummary > ( instancesSummary ) ;
public class Icon { /** * Offsets this { @ link Icon } by a specified amount . < b > offsetX < / b > and < b > offsetY < / b > are specified in pixels . * @ param offsetX the x offset * @ param offsetY the y offset * @ return this { @ link Icon } */ public Icon offset ( int offsetX , int offsetY ) { } }
initSprite ( sheetWidth , sheetHeight , getOriginX ( ) + offsetX , getOriginY ( ) + offsetY , isRotated ( ) ) ; return this ;
public class MetricValues { /** * Updates { @ code h } with the contents of { @ code value } . * @ param h a { @ link Hasher } * @ param value a { @ code MetricValue } to be added to the hash * @ return the { @ code Hasher } , to allow fluent - style usage */ public static Hasher putMetricValue ( Hasher h , MetricValue value ) { } }
Signing . putLabels ( h , value . getLabelsMap ( ) ) ; return h ;
public class WorkspacesInner { /** * Creates a Workspace . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param parameters Workspace creation parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WorkspaceInner object if successful . */ public WorkspaceInner create ( String resourceGroupName , String workspaceName , WorkspaceCreateParameters parameters ) { } }
return createWithServiceResponseAsync ( resourceGroupName , workspaceName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class MiniSatBackbone { /** * Builds the backbone object from the computed backbone literals . * @ param variables relevant variables * @ return backbone */ private Backbone buildBackbone ( final Collection < Variable > variables ) { } }
final SortedSet < Variable > posBackboneVars = isBothOrPositiveType ( ) ? new TreeSet < Variable > ( ) : null ; final SortedSet < Variable > negBackboneVars = isBothOrNegativeType ( ) ? new TreeSet < Variable > ( ) : null ; final SortedSet < Variable > optionalVars = isBothType ( ) ? new TreeSet < Variable > ( ) : null ; for ( final Variable var : variables ) { final Integer idx = this . name2idx . get ( var . name ( ) ) ; if ( idx == null ) { if ( isBothType ( ) ) { optionalVars . add ( var ) ; } } else { switch ( this . backboneMap . get ( idx ) ) { case TRUE : if ( isBothOrPositiveType ( ) ) { posBackboneVars . add ( var ) ; } break ; case FALSE : if ( isBothOrNegativeType ( ) ) { negBackboneVars . add ( var ) ; } break ; case UNDEF : if ( isBothType ( ) ) { optionalVars . add ( var ) ; } break ; default : throw new IllegalStateException ( "Unknown tristate: " + this . backboneMap . get ( idx ) ) ; } } } return new Backbone ( posBackboneVars , negBackboneVars , optionalVars ) ;
public class InstanceHealthSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InstanceHealthSummary instanceHealthSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( instanceHealthSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceHealthSummary . getInstanceName ( ) , INSTANCENAME_BINDING ) ; protocolMarshaller . marshall ( instanceHealthSummary . getInstanceHealth ( ) , INSTANCEHEALTH_BINDING ) ; protocolMarshaller . marshall ( instanceHealthSummary . getInstanceHealthReason ( ) , INSTANCEHEALTHREASON_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TiffDocument { /** * Removes the tag . * @ param tagName the tag name * @ return true , if successful */ public boolean removeTag ( String tagName ) { } }
boolean result = false ; IFD ifd = firstIFD ; while ( ifd != null ) { if ( ifd . containsTagId ( TiffTags . getTagId ( tagName ) ) ) { ifd . removeTag ( tagName ) ; } ifd = ifd . getNextIFD ( ) ; } createMetadataDictionary ( ) ; return result ;
public class TernaryVector { /** * { @ inheritDoc } . */ public int [ ] toArray ( ) { } }
int [ ] array = new int [ length ] ; for ( int p : positiveDimensions ) array [ p ] = 1 ; for ( int n : negativeDimensions ) array [ n ] = - 1 ; return array ;
public class ThymeLeafTemplateImplementation { /** * Renders the template . * @ param controller the controller having requested the rendering . * @ param variables the parameters * @ return the rendered object . */ @ Override public synchronized Renderable < ? > render ( Controller controller , Map < String , Object > variables ) { } }
HashMap < String , Object > map = new HashMap < > ( variables ) ; if ( ! map . containsKey ( ExtendedOGNLExpressionEvaluator . BUNDLE_VAR_KEY ) ) { map . put ( ExtendedOGNLExpressionEvaluator . BUNDLE_VAR_KEY , bundle ) ; } return templateEngine . process ( this , controller , router , assets , map ) ;
public class ThreadExtensions { /** * Creates a custom thread pool that are executed in parallel processes with the will run with * the given number of the cpu cores * @ param supplier * the { @ link Supplier } task to execute * @ param cpuCores * the number of the cpu cores to run with * @ param < T > * the generic type of the result * @ return the result of the given task * @ throws ExecutionException * if the computation threw an exception * @ throws InterruptedException * if the current thread is not a member of a ForkJoinPool and was interrupted while * waiting */ public static < T > T runAsyncSupplierWithCpuCores ( Supplier < T > supplier , int cpuCores ) throws ExecutionException , InterruptedException { } }
ForkJoinPool forkJoinPool = new ForkJoinPool ( cpuCores ) ; CompletableFuture < T > future = CompletableFuture . supplyAsync ( supplier , forkJoinPool ) ; return future . get ( ) ;
public class CircularImageView { /** * Default implementation of check mark { @ code Paint } creation . * @ return */ protected Paint getCheckMarkPaint ( ) { } }
Paint paint = new Paint ( Paint . ANTI_ALIAS_FLAG ) ; paint . setColor ( DEFAULT_CHECK_STROKE_COLOR ) ; paint . setStyle ( Paint . Style . STROKE ) ; paint . setStrokeWidth ( getCheckMarkStrokeWidthInPixels ( ) ) ; return paint ;
public class CreateApplicationVersionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateApplicationVersionRequest createApplicationVersionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createApplicationVersionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createApplicationVersionRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( createApplicationVersionRequest . getSemanticVersion ( ) , SEMANTICVERSION_BINDING ) ; protocolMarshaller . marshall ( createApplicationVersionRequest . getSourceCodeArchiveUrl ( ) , SOURCECODEARCHIVEURL_BINDING ) ; protocolMarshaller . marshall ( createApplicationVersionRequest . getSourceCodeUrl ( ) , SOURCECODEURL_BINDING ) ; protocolMarshaller . marshall ( createApplicationVersionRequest . getTemplateBody ( ) , TEMPLATEBODY_BINDING ) ; protocolMarshaller . marshall ( createApplicationVersionRequest . getTemplateUrl ( ) , TEMPLATEURL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TaskMemoryManager { /** * Free a block of memory allocated via { @ link TaskMemoryManager # allocatePage } . */ public void freePage ( MemoryBlock page , MemoryConsumer consumer ) { } }
assert ( page . pageNumber != MemoryBlock . NO_PAGE_NUMBER ) : "Called freePage() on memory that wasn't allocated with allocatePage()" ; assert ( page . pageNumber != MemoryBlock . FREED_IN_ALLOCATOR_PAGE_NUMBER ) : "Called freePage() on a memory block that has already been freed" ; assert ( page . pageNumber != MemoryBlock . FREED_IN_TMM_PAGE_NUMBER ) : "Called freePage() on a memory block that has already been freed" ; assert ( allocatedPages . get ( page . pageNumber ) ) ; pageTable [ page . pageNumber ] = null ; synchronized ( this ) { allocatedPages . clear ( page . pageNumber ) ; } if ( logger . isTraceEnabled ( ) ) { logger . trace ( "Freed page number {} ({} bytes)" , page . pageNumber , page . size ( ) ) ; } long pageSize = page . size ( ) ; // Clear the page number before passing the block to the MemoryAllocator ' s free ( ) . // Doing this allows the MemoryAllocator to detect when a TaskMemoryManager - managed // page has been inappropriately directly freed without calling TMM . freePage ( ) . page . pageNumber = MemoryBlock . FREED_IN_TMM_PAGE_NUMBER ; memoryManager . tungstenMemoryAllocator ( ) . free ( page ) ; releaseExecutionMemory ( pageSize , consumer ) ;
public class GetActiveNamesResult { /** * The list of active names returned by the get active names request . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setActiveNames ( java . util . Collection ) } or { @ link # withActiveNames ( java . util . Collection ) } if you want to * override the existing values . * @ param activeNames * The list of active names returned by the get active names request . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetActiveNamesResult withActiveNames ( String ... activeNames ) { } }
if ( this . activeNames == null ) { setActiveNames ( new java . util . ArrayList < String > ( activeNames . length ) ) ; } for ( String ele : activeNames ) { this . activeNames . add ( ele ) ; } return this ;
public class WebDriverBackedEmbeddedBrowser { /** * alert , prompt , and confirm behave as if the OK button is always clicked . */ @ Override public void handlePopups ( ) { } }
/* * try { executeJavaScript ( " window . alert = function ( msg ) { return true ; } ; " + * " window . confirm = function ( msg ) { return true ; } ; " + * " window . prompt = function ( msg ) { return true ; } ; " ) ; } catch ( CrawljaxException e ) { * LOGGER . error ( " Handling of PopUp windows failed " , e ) ; } */ /* Workaround : Popups handling currently not supported in PhantomJS . */ if ( browser instanceof PhantomJSDriver ) { return ; } if ( ExpectedConditions . alertIsPresent ( ) . apply ( browser ) != null ) { try { browser . switchTo ( ) . alert ( ) . accept ( ) ; LOGGER . info ( "Alert accepted" ) ; } catch ( Exception e ) { LOGGER . error ( "Handling of PopUp windows failed" ) ; } }
public class SmbResourceLocatorImpl { /** * { @ inheritDoc } * @ see jcifs . SmbResourceLocator # getName ( ) */ @ Override public String getName ( ) { } }
String urlpath = getURLPath ( ) ; String shr = getShare ( ) ; if ( urlpath . length ( ) > 1 ) { int i = urlpath . length ( ) - 2 ; while ( urlpath . charAt ( i ) != '/' ) { i -- ; } return urlpath . substring ( i + 1 ) ; } else if ( shr != null ) { return shr + '/' ; } else if ( this . url . getHost ( ) . length ( ) > 0 ) { return this . url . getHost ( ) + '/' ; } else { return "smb://" ; }
public class CPFriendlyURLEntryUtil { /** * Returns the first cp friendly url entry in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp friendly url entry * @ throws NoSuchCPFriendlyURLEntryException if a matching cp friendly url entry could not be found */ public static CPFriendlyURLEntry findByUuid_First ( String uuid , OrderByComparator < CPFriendlyURLEntry > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPFriendlyURLEntryException { } }
return getPersistence ( ) . findByUuid_First ( uuid , orderByComparator ) ;
public class GenericInfoUtils { /** * Type analysis in context of analyzed type . For example , resolution of field type class in context of * analyzed class ( so we can correctly resolve it ' s generics ) . In essence , the only difference with usual type * resolution is known root generics . * The result is not intended to be cached as it ' s context - sensitive . * @ param context generics context of containing class * @ param type type to analyze ( important : this must be generified type and not raw class in * order to properly resolve generics ) * @ param ignoreClasses classes to exclude from hierarchy analysis * @ return analyzed type generics info */ public static GenericsInfo create ( final GenericsContext context , final Type type , final Class < ? > ... ignoreClasses ) { } }
// root generics are required only to properly solve type final Map < String , Type > rootGenerics = context . visibleGenericsMap ( ) ; // first step : solve type to replace transitive generics with direct values final Type actual = GenericsUtils . resolveTypeVariables ( type , rootGenerics ) ; final Class < ? > target = context . resolveClass ( actual ) ; LinkedHashMap < String , Type > generics = GenericsResolutionUtils . resolveGenerics ( actual , rootGenerics ) ; generics = GenericsResolutionUtils . fillOuterGenerics ( actual , generics , context . getGenericsInfo ( ) . getTypesMap ( ) ) ; return create ( target , generics , // store possible owner types from parent context usePossiblyOwnerGenerics ( target , context . getGenericsInfo ( ) ) , ignoreClasses ) ;
public class ReviewsImpl { /** * Publish video review to make it available for review . * @ param teamName Your team name . * @ param reviewId Id of the review . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < ServiceResponse < Void > > publishVideoReviewWithServiceResponseAsync ( String teamName , String reviewId ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } if ( teamName == null ) { throw new IllegalArgumentException ( "Parameter teamName is required and cannot be null." ) ; } if ( reviewId == null ) { throw new IllegalArgumentException ( "Parameter reviewId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . publishVideoReview ( teamName , reviewId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = publishVideoReviewDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class SSLUtils { /** * Assign an array of limits associated with the passed in buffer array . * @ param buffers * @ param limits */ public static void getBufferLimits ( WsByteBuffer [ ] buffers , int [ ] limits ) { } }
// Double check that the parameters are non null . if ( ( buffers != null ) && ( limits != null ) ) { // Loop through the buffers . // In case of errant parameters , protect from array out of bounds . for ( int i = 0 ; i < buffers . length && i < limits . length ; i ++ ) { // Double check for null buffers . if ( buffers [ i ] != null ) { // Save the limit . limits [ i ] = buffers [ i ] . limit ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getBufferLimits: buffer[" + i + "] limit of " + limits [ i ] ) ; } } else { // When buffer is null , save a limit of 0. limits [ i ] = 0 ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getBufferLimits: null buffer[" + i + "] limit of " + limits [ i ] ) ; } } } }
public class XMLName { /** * TODO : marked deprecated by original author */ static XMLName formProperty ( String uri , String localName ) { } }
return formProperty ( XmlNode . Namespace . create ( uri ) , localName ) ;
public class MPSelectorEvaluatorImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . MPSelectorEvaluator # evaluateDiscriminator ( java . lang . String , java . lang . String ) */ public boolean evaluateDiscriminator ( String fullTopic , String wildcardTopic ) throws SIDiscriminatorSyntaxException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "evaluateDiscriminator" , new Object [ ] { fullTopic , wildcardTopic } ) ; boolean ret = mpm . evaluateDiscriminator ( fullTopic , wildcardTopic ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "evaluateDiscriminator" , new Boolean ( ret ) ) ; return ret ;
public class DirectoryPoller { /** * Blocks until the last poll - cycle has finished and all { @ link AfterStopEvent } has been * processed . */ public void awaitTermination ( ) { } }
try { latch . await ( ) ; } catch ( InterruptedException e ) { String message = "awaitTermination() method was interrupted!" ; throw new UnsupportedOperationException ( message , e ) ; }
public class FileBasedTemporalSemanticSpace { /** * Loads the { @ link TemporalSemanticSpace } from the text formatted file , * @ param sspaceFile a file in { @ link TSSpaceFormat # TEXT text } format */ private Map < String , SemanticVector > loadText ( File sspaceFile ) throws IOException { } }
LOGGER . info ( "loading text TSS from " + sspaceFile ) ; BufferedReader br = new BufferedReader ( new FileReader ( sspaceFile ) ) ; String [ ] header = br . readLine ( ) . split ( "\\s+" ) ; int words = Integer . parseInt ( header [ 0 ] ) ; dimensions = Integer . parseInt ( header [ 1 ] ) ; Map < String , SemanticVector > wordToSemantics = new HashMap < String , SemanticVector > ( words , 2f ) ; // read in each word for ( String line = null ; ( line = br . readLine ( ) ) != null ; ) { String [ ] wordAndSemantics = line . split ( "\\|" ) ; String word = wordAndSemantics [ 0 ] ; SemanticVector semantics = new SemanticVector ( dimensions ) ; LOGGER . info ( "loading " + wordAndSemantics . length + " timesteps for word " + word ) ; for ( int i = 1 ; i < wordAndSemantics . length ; ++ i ) { String [ ] timeStepAndValues = wordAndSemantics [ i ] . split ( " " ) ; long timeStep = Long . parseLong ( timeStepAndValues [ 0 ] ) ; updateTimeRange ( timeStep ) ; // Load that time step ' s vector . Note that we make the // assumption here that even though the T - Space is serialized in // a dense format , that the vector data is actually sparse , and // so it will be more efficient to store it as such . Map < Integer , Double > sparseArray = new IntegerMap < Double > ( ) ; for ( int j = 1 ; j < timeStepAndValues . length ; ++ j ) { sparseArray . put ( Integer . valueOf ( j - 1 ) , Double . valueOf ( timeStepAndValues [ j ] ) ) ; } semantics . setSemantics ( timeStep , sparseArray ) ; } wordToSemantics . put ( word , semantics ) ; } return wordToSemantics ;
public class RemoteRegionRegistry { /** * Close HTTP response object and its respective resources . * @ param response * the HttpResponse object . */ private void closeResponse ( ClientResponse response ) { } }
if ( response != null ) { try { response . close ( ) ; } catch ( Throwable th ) { logger . error ( "Cannot release response resource :" , th ) ; } }
public class OptionalBoolean { /** * Invokes the given mapping function on inner value if present . * @ param < U > the type of result value * @ param mapper mapping function * @ return an { @ code Optional } with transformed value if present , * otherwise an empty { @ code Optional } * @ throws NullPointerException if value is present and * { @ code mapper } is { @ code null } */ @ NotNull public < U > Optional < U > mapToObj ( @ NotNull BooleanFunction < U > mapper ) { } }
if ( ! isPresent ( ) ) { return Optional . empty ( ) ; } Objects . requireNonNull ( mapper ) ; return Optional . ofNullable ( mapper . apply ( value ) ) ;
public class TimeFilter { /** * Sets the start date . * @ param startDate */ public final void setStartDate ( Date startDate ) { } }
if ( this . startDate == null || ! this . startDate . equals ( startDate ) ) { if ( startDate != null && endDate != null && endDate . before ( startDate ) ) { throw new ParameterException ( "The start date must be before the end date of the filter." ) ; } this . startDate = startDate ; setChanged ( ) ; notifyObservers ( ) ; }
public class JDescTextField { /** * Change the font depending of whether you are displaying the description or text . * @ param bDescription If true , set the description text . */ public void changeFont ( boolean bDescription ) { } }
if ( m_fontNormal == null ) { m_fontNormal = this . getFont ( ) ; m_fontDesc = m_fontNormal . deriveFont ( Font . ITALIC ) ; m_colorNormal = this . getForeground ( ) ; m_colorDesc = Color . gray ; } if ( bDescription ) { this . setFont ( m_fontDesc ) ; this . setForeground ( m_colorDesc ) ; } else { this . setFont ( m_fontNormal ) ; this . setForeground ( m_colorNormal ) ; }
public class NonNsStreamWriter { /** * Package methods : */ @ Override public void writeStartElement ( StartElement elem ) throws XMLStreamException { } }
QName name = elem . getName ( ) ; writeStartElement ( name . getLocalPart ( ) ) ; @ SuppressWarnings ( "unchecked" ) Iterator < Attribute > it = elem . getAttributes ( ) ; while ( it . hasNext ( ) ) { Attribute attr = it . next ( ) ; name = attr . getName ( ) ; writeAttribute ( name . getLocalPart ( ) , attr . getValue ( ) ) ; }
public class MGOImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . MGO__RG : getRG ( ) . clear ( ) ; getRG ( ) . addAll ( ( Collection < ? extends MGORG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class MTreeDirectoryEntry { /** * Sets the id of the routing object of this entry . * @ param objectID the id to be set */ @ Override public final boolean setRoutingObjectID ( DBID objectID ) { } }
if ( objectID == routingObjectID || DBIDUtil . equal ( objectID , routingObjectID ) ) { return false ; } this . routingObjectID = objectID ; return true ;
public class AmazonDaxClient { /** * Creates a new parameter group . A parameter group is a collection of parameters that you apply to all of the nodes * in a DAX cluster . * @ param createParameterGroupRequest * @ return Result of the CreateParameterGroup operation returned by the service . * @ throws ParameterGroupQuotaExceededException * You have attempted to exceed the maximum number of parameter groups . * @ throws ParameterGroupAlreadyExistsException * The specified parameter group already exists . * @ throws InvalidParameterGroupStateException * One or more parameters in a parameter group are in an invalid state . * @ throws ServiceLinkedRoleNotFoundException * @ throws InvalidParameterValueException * The value for a parameter is invalid . * @ throws InvalidParameterCombinationException * Two or more incompatible parameters were specified . * @ sample AmazonDax . CreateParameterGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dax - 2017-04-19 / CreateParameterGroup " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateParameterGroupResult createParameterGroup ( CreateParameterGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateParameterGroup ( request ) ;
public class WebSocketStreamHandler { /** * Get a specific input stream using its identifier . Caller is responsible for closing these * streams . * @ param stream The stream to return * @ return The specified stream . */ public synchronized InputStream getInputStream ( int stream ) { } }
if ( state == State . CLOSED ) throw new IllegalStateException ( ) ; if ( ! input . containsKey ( stream ) ) { try { PipedInputStream pipeIn = new PipedInputStream ( ) ; PipedOutputStream pipeOut = new PipedOutputStream ( pipeIn ) ; pipedOutput . put ( stream , pipeOut ) ; input . put ( stream , pipeIn ) ; } catch ( IOException ex ) { // This is _ very _ unlikely , as it requires the above constructor to fail . // don ' t force callers to catch , but still throw throw new IllegalStateException ( ex ) ; } } return input . get ( stream ) ;
public class FnDouble { /** * Determines whether the target object is null or not . * @ return true if the target object is null , false if not . */ public static final Function < Double , Boolean > isNull ( ) { } }
return ( Function < Double , Boolean > ) ( ( Function ) FnObject . isNull ( ) ) ;
public class Collections { /** * Sorts the specified list according to the order induced by the * specified comparator . All elements in the list must be < i > mutually * comparable < / i > using the specified comparator ( that is , * { @ code c . compare ( e1 , e2 ) } must not throw a { @ code ClassCastException } * for any elements { @ code e1 } and { @ code e2 } in the list ) . * < p > This sort is guaranteed to be < i > stable < / i > : equal elements will * not be reordered as a result of the sort . * < p > The specified list must be modifiable , but need not be resizable . * @ implNote * This implementation defers to the { @ link List # sort ( Comparator ) } * method using the specified list and comparator . * Do not call this method from { @ code List . sort ( ) } since that can lead * to infinite recursion . Apps targeting APIs { @ code < = 25 } observe * backwards compatibility behavior where this method was implemented * on top of { @ link List # toArray ( ) } , { @ link ListIterator # next ( ) } and * { @ link ListIterator # set ( Object ) } . * @ param < T > the class of the objects in the list * @ param list the list to be sorted . * @ param c the comparator to determine the order of the list . A * { @ code null } value indicates that the elements ' < i > natural * ordering < / i > should be used . * @ throws ClassCastException if the list contains elements that are not * < i > mutually comparable < / i > using the specified comparator . * @ throws UnsupportedOperationException if the specified list ' s * list - iterator does not support the { @ code set } operation . * @ throws IllegalArgumentException ( optional ) if the comparator is * found to violate the { @ link Comparator } contract * @ see List # sort ( Comparator ) */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public static < T > void sort ( List < T > list , Comparator < ? super T > c ) { if ( list . getClass ( ) == ArrayList . class ) { Arrays . sort ( ( ( ArrayList ) list ) . elementData , 0 , list . size ( ) , ( Comparator ) c ) ; return ; } Object [ ] a = list . toArray ( ) ; Arrays . sort ( a , ( Comparator ) c ) ; ListIterator < T > i = list . listIterator ( ) ; for ( int j = 0 ; j < a . length ; j ++ ) { i . next ( ) ; i . set ( ( T ) a [ j ] ) ; }
public class RegexValidator { /** * Validate a value against the set of regular expressions * returning the array of matched groups . * @ param value The value to validate . * @ return String array of the < i > groups < / i > matched if valid or < code > null < / code > if invalid */ @ SuppressFBWarnings ( value = "PZLA" , justification = "Null is checked, not empty array. API is clear as well." ) public String [ ] match ( String value ) { } }
if ( value == null ) { return null ; } for ( int i = 0 ; i < patterns . length ; i ++ ) { Matcher matcher = patterns [ i ] . matcher ( value ) ; if ( matcher . matches ( ) ) { int count = matcher . groupCount ( ) ; String [ ] groups = new String [ count ] ; for ( int j = 0 ; j < count ; j ++ ) { groups [ j ] = matcher . group ( j + 1 ) ; } return groups ; } } return null ;
public class ManagementLocksInner { /** * Creates or updates a management lock at the resource group level . * When you apply a lock at a parent scope , all child resources inherit the same lock . To create management locks , you must have access to Microsoft . Authorization / * or Microsoft . Authorization / locks / * actions . Of the built - in roles , only Owner and User Access Administrator are granted those actions . * @ param resourceGroupName The name of the resource group to lock . * @ param lockName The lock name . The lock name can be a maximum of 260 characters . It cannot contain & lt ; , & gt ; % , & amp ; , : , \ , ? , / , or any control characters . * @ param parameters The management lock parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ManagementLockObjectInner object */ public Observable < ServiceResponse < ManagementLockObjectInner > > createOrUpdateAtResourceGroupLevelWithServiceResponseAsync ( String resourceGroupName , String lockName , ManagementLockObjectInner parameters ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( lockName == null ) { throw new IllegalArgumentException ( "Parameter lockName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( parameters ) ; return service . createOrUpdateAtResourceGroupLevel ( resourceGroupName , lockName , this . client . subscriptionId ( ) , parameters , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ManagementLockObjectInner > > > ( ) { @ Override public Observable < ServiceResponse < ManagementLockObjectInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ManagementLockObjectInner > clientResponse = createOrUpdateAtResourceGroupLevelDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class WarningPropertySet { /** * Check whether or not the given WarningProperty has the given attribute * value . * @ param prop * the WarningProperty * @ param value * the attribute value * @ return true if the set contains the WarningProperty and has an attribute * equal to the one given , false otherwise */ public boolean checkProperty ( T prop , Object value ) { } }
Object attribute = getProperty ( prop ) ; return ( attribute != null && attribute . equals ( value ) ) ;
public class EncoderStream { /** * tell the EncoderStream how large buffer it prefers . . . */ public void write ( final byte [ ] pBytes , final int pOffset , final int pLength ) throws IOException { } }
if ( ! flushOnWrite && pLength < buffer . remaining ( ) ) { // Buffer data buffer . put ( pBytes , pOffset , pLength ) ; } else { // Encode data already in the buffer encodeBuffer ( ) ; // Encode rest without buffering encoder . encode ( out , ByteBuffer . wrap ( pBytes , pOffset , pLength ) ) ; }
public class PoolOperations { /** * Enables automatic scaling on the specified pool . * @ param poolId * The ID of the pool . * @ throws BatchErrorException * Exception thrown when an error response is received from the * Batch service . * @ throws IOException * Exception thrown when there is an error in * serialization / deserialization of data sent to / received from the * Batch service . */ public void enableAutoScale ( String poolId ) throws BatchErrorException , IOException { } }
enableAutoScale ( poolId , null , null , null ) ;
public class AWSIotClient { /** * Associates a Device Defender security profile with a thing group or with this account . Each thing group or * account can have up to five security profiles associated with it . * @ param attachSecurityProfileRequest * @ return Result of the AttachSecurityProfile operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ throws LimitExceededException * A limit has been exceeded . * @ throws VersionConflictException * An exception thrown when the version of an entity specified with the < code > expectedVersion < / code > * parameter does not match the latest version in the system . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws InternalFailureException * An unexpected error has occurred . * @ sample AWSIot . AttachSecurityProfile */ @ Override public AttachSecurityProfileResult attachSecurityProfile ( AttachSecurityProfileRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAttachSecurityProfile ( request ) ;
public class ObjectParameter { /** * Parse a generic list . * @ param serializedObject the concatenated list * @ return the list object */ private Object parseListParameter ( final String serializedObject ) { } }
final List < Object > res = new ArrayList < > ( ) ; final Class < ? > objectType = ( ( List < ? > ) this . object ) . get ( 0 ) . getClass ( ) ; for ( final String item : serializedObject . split ( ";" ) ) { res . add ( parseObjectString ( objectType , item ) ) ; } return res ;
public class HAConfUtil { /** * Is value of b bigger than value of a . * @ return True , if b is bigger than a . Always true , if value of a is null . */ static boolean isBigger ( Long a , Long b ) { } }
return a == null || ( b != null && b > a ) ;