signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Bitlist { /** * Returns false if bit is clear ( zero ) , true if bit is set ( one ) . If bit * is beyond the file buffer size , false is returned . */ public boolean get ( long index ) throws IOException { } }
try { lock ( ) . acquireReadLock ( ) ; int value = mFile . read ( index >> 3 ) ; return value > 0 && ( ( value << ( index & 7 ) ) & 0x80 ) != 0 ; } catch ( InterruptedException e ) { throw new InterruptedIOException ( ) ; } finally { lock ( ) . releaseLock ( ) ; }
public class DebugRingSet { /** * { @ inheritDoc } */ @ Override public IRingSet getRings ( IAtom atom ) { } }
logger . debug ( "Getting rings for atom: " , atom ) ; return super . getRings ( atom ) ;
public class AmazonEC2Client { /** * Describes the specified bundle tasks or all of your bundle tasks . * < note > * Completed bundle tasks are listed for only a limited time . If your bundle task is no longer in the list , you can * still register an AMI from it . Just use < code > RegisterImage < / code > with the Amazon S3 bucket name and image * manifest name you provided to the bundle task . * < / note > * @ param describeBundleTasksRequest * @ return Result of the DescribeBundleTasks operation returned by the service . * @ sample AmazonEC2 . DescribeBundleTasks * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeBundleTasks " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeBundleTasksResult describeBundleTasks ( DescribeBundleTasksRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeBundleTasks ( request ) ;
public class VietnameseContextGenerator { /** * / * ( non - Javadoc ) * @ see jvntextpro . data . ContextGenerator # getContext ( jvntextpro . data . Sentence , int ) */ @ Override public String [ ] getContext ( Sentence sent , int pos ) { } }
List < String > cps = new ArrayList < String > ( ) ; for ( int it = 0 ; it < cpnames . size ( ) ; ++ it ) { String cp = cpnames . get ( it ) ; Vector < Integer > paras = this . paras . get ( it ) ; String cpvalue = "" ; String word = "" ; for ( int i = 0 ; i < paras . size ( ) ; ++ i ) { if ( pos + paras . get ( i ) < 0 || pos + paras . get ( i ) >= sent . size ( ) ) { cpvalue = "" ; continue ; } word += sent . getWordAt ( pos + paras . get ( i ) ) + " " ; } word = word . trim ( ) . toLowerCase ( ) ; VnSyllParser parser = new VnSyllParser ( word ) ; if ( ! parser . isValidVnSyllable ( ) && cp . equals ( "not_valid_vnsyll" ) ) cpvalue = "nvs:" + word ; if ( ! cpvalue . equals ( "" ) ) cps . add ( cpvalue ) ; } String [ ] ret = new String [ cps . size ( ) ] ; return cps . toArray ( ret ) ;
public class NDArrayMessage { /** * Get the current time in utc in milliseconds * @ return the current time in utc in * milliseconds */ public static long getCurrentTimeUtc ( ) { } }
Instant instant = Instant . now ( ) ; ZonedDateTime dateTime = instant . atZone ( ZoneOffset . UTC ) ; return dateTime . toInstant ( ) . toEpochMilli ( ) ;
public class AbstractProcessor { /** * If the processor class is annotated with { @ link * SupportedOptions } , return an unmodifiable set with the same set * of strings as the annotation . If the class is not so * annotated , an empty set is returned . * @ return the options recognized by this processor , or an empty * set if none */ public Set < String > getSupportedOptions ( ) { } }
SupportedOptions so = this . getClass ( ) . getAnnotation ( SupportedOptions . class ) ; if ( so == null ) return Collections . emptySet ( ) ; else return arrayToSet ( so . value ( ) , false ) ;
public class Coref { /** * setter for ref - sets * @ generated * @ param v value to set into the feature */ public void setRef ( Coref v ) { } }
if ( Coref_Type . featOkTst && ( ( Coref_Type ) jcasType ) . casFeat_ref == null ) jcasType . jcas . throwFeatMissing ( "ref" , "de.julielab.jules.types.muc7.Coref" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( Coref_Type ) jcasType ) . casFeatCode_ref , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class SystemObserver { /** * Method to prefetch the GAID and LAT values . * @ param context Context . * @ param callback { @ link GAdsParamsFetchEvents } instance to notify process completion * @ return { @ link Boolean } with true if GAID fetch process started . */ boolean prefetchGAdsParams ( Context context , GAdsParamsFetchEvents callback ) { } }
boolean isPrefetchStarted = false ; if ( TextUtils . isEmpty ( GAIDString_ ) ) { isPrefetchStarted = true ; new GAdsPrefetchTask ( context , callback ) . executeTask ( ) ; } return isPrefetchStarted ;
public class GuiceBindingsModule { /** * Important moment : request scoped jersey objects must be bound to guice request scope ( if guice web used ) * because otherwise scope delegation to other thread will not work * ( see { @ link com . google . inject . servlet . ServletScopes # transferRequest ( java . util . concurrent . Callable ) } ) . * WARNING : bean instance must be obtained in current ( request ) thread in order to be us used later * inside transferred thread ( simply call { @ code provider . get ( ) } ( for jersey - managed bean like { @ link UriInfo } ) * before { @ code ServletScopes . transferRequest ( ) } . * @ param type jersey type to bind * @ param global true for global type binding */ private void jerseyToGuiceBinding ( final Class < ? > type , final boolean global ) { } }
final ScopedBindingBuilder binding = bindJerseyComponent ( binder ( ) , provider , type ) ; if ( ! global && guiceServletSupport ) { binding . in ( RequestScoped . class ) ; }
public class FluentBy { /** * Finds elements a composite of other By strategies */ public static ByComposite composite ( By . ByTagName b0 , By . ByClassName b1 ) { } }
return new ByComposite ( b0 , b1 ) ;
public class Check { /** * Ensures that a given position index is valid within the size of an array , list or string . . . * @ param index * index of an array , list or string * @ param size * size of an array list or string * @ return the index * @ throws IllegalPositionIndexException * if the index is not a valid position index within an array , list or string of size < em > size < / em > */ @ Throws ( IllegalPositionIndexException . class ) public static int positionIndex ( final int index , final int size ) { } }
final boolean isIndexValid = ( size >= 0 ) && ( index >= 0 ) && ( index < size ) ; if ( ! isIndexValid ) { throw new IllegalPositionIndexException ( index , size ) ; } return index ;
public class TableBodyElement { /** * Overrideable method for setting the class of a cell ( the < td > element ) in * the table * @ param context * @ param row * @ param col * @ return */ protected String getCellClass ( HtmlRenderingContext context , int row , int col ) { } }
if ( ArrayUtils . indexOf ( _highlightedColumns , col ) == - 1 ) { return null ; } return "highlighted" ;
public class MamManager { /** * Update the preferences in the server . * @ param mamPrefs * @ return the currently active preferences after the operation . * @ throws NoResponseException * @ throws XMPPErrorException * @ throws NotConnectedException * @ throws InterruptedException * @ throws NotLoggedInException * @ since 4.3.0 */ public MamPrefsResult updateArchivingPreferences ( MamPrefs mamPrefs ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException , NotLoggedInException { } }
MamPrefsIQ mamPrefIQ = mamPrefs . constructMamPrefsIq ( ) ; return queryMamPrefs ( mamPrefIQ ) ;
public class SanityChecks { /** * Check whether vector addition works . This is pure Java code and should work . */ public static void checkVectorAddition ( ) { } }
DoubleMatrix x = new DoubleMatrix ( 3 , 1 , 1.0 , 2.0 , 3.0 ) ; DoubleMatrix y = new DoubleMatrix ( 3 , 1 , 4.0 , 5.0 , 6.0 ) ; DoubleMatrix z = new DoubleMatrix ( 3 , 1 , 5.0 , 7.0 , 9.0 ) ; check ( "checking vector addition" , x . add ( y ) . equals ( z ) ) ;
public class Bond { /** * Returns the accrued interest of the bond for a given date . * @ param date The date of interest . * @ param model The model under which the product is valued . * @ return The accrued interest . */ public double getAccruedInterest ( LocalDate date , AnalyticModel model ) { } }
int periodIndex = schedule . getPeriodIndex ( date ) ; Period period = schedule . getPeriod ( periodIndex ) ; DayCountConvention dcc = schedule . getDaycountconvention ( ) ; double accruedInterest = getCouponPayment ( periodIndex , model ) * ( dcc . getDaycountFraction ( period . getPeriodStart ( ) , date ) ) / schedule . getPeriodLength ( periodIndex ) ; return accruedInterest ;
public class MetadataService { /** * Finds { @ link Permission } s which belong to the specified { @ code appId } from the specified * { @ code repoName } in the specified { @ code projectName } . */ public CompletableFuture < Collection < Permission > > findPermissions ( String projectName , String repoName , String appId ) { } }
requireNonNull ( projectName , "projectName" ) ; requireNonNull ( repoName , "repoName" ) ; requireNonNull ( appId , "appId" ) ; return getProject ( projectName ) . thenApply ( metadata -> { final RepositoryMetadata repositoryMetadata = metadata . repo ( repoName ) ; final TokenRegistration registration = metadata . tokens ( ) . getOrDefault ( appId , null ) ; // If the token is guest . if ( registration == null ) { return repositoryMetadata . perRolePermissions ( ) . guest ( ) ; } final Collection < Permission > p = repositoryMetadata . perTokenPermissions ( ) . get ( registration . id ( ) ) ; if ( p != null ) { return p ; } return findPerRolePermissions ( repositoryMetadata , registration . role ( ) ) ; } ) ;
public class SARLQuickfixProvider { /** * Quick fix for " Invalid member name " . * @ param issue the issue . * @ param acceptor the quick fix acceptor . */ @ Fix ( IssueCodes . INVALID_MEMBER_NAME ) public void fixMemberName ( final Issue issue , IssueResolutionAcceptor acceptor ) { } }
MemberRenameModification . accept ( this , issue , acceptor ) ; MemberRemoveModification . accept ( this , issue , acceptor ) ;
public class GuidedDTDRLOtherwiseHelper { /** * Retrieve the correct OtherwiseBuilder for the given column * @ param c * @ return */ public static OtherwiseBuilder getBuilder ( ConditionCol52 c ) { } }
if ( c . getOperator ( ) . equals ( "==" ) ) { return new EqualsOtherwiseBuilder ( ) ; } else if ( c . getOperator ( ) . equals ( "!=" ) ) { return new NotEqualsOtherwiseBuilder ( ) ; } throw new IllegalArgumentException ( "ConditionCol operator does not support Otherwise values" ) ;
public class ThriftClient { /** * ( non - Javadoc ) * @ see * com . impetus . client . cassandra . CassandraClientBase # delete ( java . lang . Object , * java . lang . Object ) */ @ Override public void delete ( Object entity , Object pKey ) { } }
if ( ! isOpen ( ) ) { throw new PersistenceException ( "ThriftClient is closed." ) ; } EntityMetadata metadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entity . getClass ( ) ) ; Connection conn = null ; try { conn = getConnection ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( metadata . getPersistenceUnit ( ) ) ; AbstractManagedType managedType = ( AbstractManagedType ) metaModel . entity ( metadata . getEntityClazz ( ) ) ; // For secondary tables . List < String > secondaryTables = ( ( DefaultEntityAnnotationProcessor ) managedType . getEntityAnnotation ( ) ) . getSecondaryTablesName ( ) ; secondaryTables . add ( metadata . getTableName ( ) ) ; for ( String tableName : secondaryTables ) { if ( isCql3Enabled ( metadata ) ) { String deleteQuery = onDeleteQuery ( metadata , tableName , metaModel , pKey ) ; executeCQLQuery ( deleteQuery , isCql3Enabled ( metadata ) ) ; } else { if ( metadata . isCounterColumnType ( ) ) { deleteRecordFromCounterColumnFamily ( pKey , tableName , metadata , getConsistencyLevel ( ) ) ; } else { ColumnPath path = new ColumnPath ( tableName ) ; conn . getClient ( ) . remove ( CassandraUtilities . toBytes ( pKey , metadata . getIdAttribute ( ) . getJavaType ( ) ) , path , generator . getTimestamp ( ) , getConsistencyLevel ( ) ) ; } } } getIndexManager ( ) . remove ( metadata , entity , pKey ) ; // Delete from Inverted Index if applicable invertedIndexHandler . delete ( entity , metadata , getConsistencyLevel ( ) , kunderaMetadata ) ; } catch ( InvalidRequestException e ) { log . error ( "Error while deleting of column family {} for row key {}, Caused by: ." , metadata . getTableName ( ) , pKey , e ) ; throw new KunderaException ( e ) ; } catch ( TException e ) { log . error ( "Error while deleting of column family {} for row key {}, Caused by: ." , metadata . getTableName ( ) , pKey , e ) ; throw new KunderaException ( e ) ; } finally { releaseConnection ( conn ) ; }
public class CmsSecurityManager { /** * Deletes a property definition . < p > * @ param context the current request context * @ param name the name of the property definition to delete * @ throws CmsException if something goes wrong * @ throws CmsSecurityException if the project to delete is the " Online " project * @ throws CmsRoleViolationException if the current user does not own the role { @ link CmsRole # WORKPLACE _ MANAGER } */ public void deletePropertyDefinition ( CmsRequestContext context , String name ) throws CmsException , CmsSecurityException , CmsRoleViolationException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; checkRole ( dbc , CmsRole . WORKPLACE_MANAGER . forOrgUnit ( null ) ) ; m_driverManager . deletePropertyDefinition ( dbc , name ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_DELETE_PROPERTY_1 , name ) , e ) ; } finally { dbc . clear ( ) ; }
public class PrivateTaskScheduler { /** * Pull the next job off the queue and remove it . * @ return The next job . */ public synchronized Object getNextJob ( ) { } }
Object strJobDef = null ; synchronized ( this ) { if ( m_vPrivateJobs . size ( ) > 0 ) strJobDef = m_vPrivateJobs . remove ( 0 ) ; } return strJobDef ;
public class ApiRequestExecutorImpl { /** * Creates the chain used to apply policies in reverse order to the api response . */ private Chain < ApiResponse > createResponseChain ( IAsyncHandler < ApiResponse > responseHandler ) { } }
ResponseChain chain = new ResponseChain ( policyImpls , context ) ; chain . headHandler ( responseHandler ) ; chain . policyFailureHandler ( result -> { if ( apiConnectionResponse != null ) { apiConnectionResponse . abort ( ) ; } policyFailureHandler . handle ( result ) ; } ) ; chain . policyErrorHandler ( result -> { if ( apiConnectionResponse != null ) { apiConnectionResponse . abort ( ) ; } policyErrorHandler . handle ( result ) ; } ) ; return chain ;
public class AmazonS3Client { /** * SigV2 presigned url has " AWSAccessKeyId " in the params . Also doing " X - Amz - Algorithm " check to ensure * " AWSAccessKeyId = " is not present in the bucket or key name */ private boolean isSigV2PresignedUrl ( URL presignedUrl ) { } }
String url = presignedUrl . toString ( ) ; return url . contains ( "AWSAccessKeyId=" ) && ! presignedUrl . toString ( ) . contains ( "X-Amz-Algorithm=AWS4-HMAC-SHA256" ) ;
public class FastArchivalUrlReplayParseEventHandler { /** * return body - insert text . * < p > Run { @ code jspInsertPath } and return its output as String . * if { @ code jspInsertPath } is { @ code null } , or body - insert should not be * inserted into the resource being processed , returns { @ code null } . < / p > * @ param context context for the resource being processed * @ return insert text as String , or { @ code null } if no insertion shall be * made . */ protected String bodyInsertContent ( ReplayParseContext context ) { } }
if ( jspInsertPath == null ) return null ; JSPExecutor jspExec = context . getJspExec ( ) ; // FIXME bad chain of references . add method to ReplayParseContext ? WaybackRequest wbRequest = jspExec . getUiResults ( ) . getWbRequest ( ) ; // isAnyEmbeddedContext ( ) used as shorthand for ( isFrameWrapperContext ( ) // & & isIFrameWrapperContext ( ) ) . if ( wbRequest . isAnyEmbeddedContext ( ) ) return null ; try { return jspExec . jspToString ( jspInsertPath ) ; } catch ( ServletException ex ) { LOGGER . log ( Level . WARNING , "execution of " + jspInsertPath + " failed" , ex ) ; return null ; } catch ( IOException ex ) { LOGGER . log ( Level . WARNING , "erorr executing " + jspInsertPath , ex ) ; return null ; }
public class AmazonKinesisAnalyticsV2Client { /** * Adds an external destination to your SQL - based Amazon Kinesis Data Analytics application . * If you want Kinesis Data Analytics to deliver data from an in - application stream within your application to an * external destination ( such as an Kinesis data stream , a Kinesis Data Firehose delivery stream , or an AWS Lambda * function ) , you add the relevant configuration to your application using this operation . You can configure one or * more outputs for your application . Each output configuration maps an in - application stream and an external * destination . * You can use one of the output configurations to deliver data from your in - application error stream to an external * destination so that you can analyze the errors . * Any configuration update , including adding a streaming source using this operation , results in a new version of * the application . You can use the < a > DescribeApplication < / a > operation to find the current application version . * @ param addApplicationOutputRequest * @ return Result of the AddApplicationOutput operation returned by the service . * @ throws ResourceNotFoundException * Specified application can ' t be found . * @ throws ResourceInUseException * The application is not available for this operation . * @ throws InvalidArgumentException * The specified input parameter value is not valid . * @ throws ConcurrentModificationException * Exception thrown as a result of concurrent modifications to an application . This error can be the result * of attempting to modify an application without using the current application ID . * @ throws InvalidRequestException * The request JSON is not valid for the operation . * @ sample AmazonKinesisAnalyticsV2 . AddApplicationOutput * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kinesisanalyticsv2-2018-05-23 / AddApplicationOutput " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AddApplicationOutputResult addApplicationOutput ( AddApplicationOutputRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAddApplicationOutput ( request ) ;
public class EpsReader { /** * Reads all bytes until the given sentinel is observed . * The sentinel will be included in the returned bytes . */ private static byte [ ] readUntil ( @ NotNull SequentialReader reader , @ NotNull byte [ ] sentinel ) throws IOException { } }
ByteArrayOutputStream bytes = new ByteArrayOutputStream ( ) ; final int length = sentinel . length ; int depth = 0 ; while ( depth != length ) { byte b = reader . getByte ( ) ; if ( b == sentinel [ depth ] ) depth ++ ; else depth = 0 ; bytes . write ( b ) ; } return bytes . toByteArray ( ) ;
public class RobotStatusProxy { /** * / * ( non - Javadoc ) * @ see com . github . thehilikus . jrobocom . RobotInfo # getBanksCount ( boolean ) */ @ Override public int getRemoteBanksCount ( ) { } }
int penalty = Timing . getInstance ( ) . REMOTE_ACCESS_PENALTY ; log . trace ( "[getRemoteBanksCount] Waiting {} cycles" , penalty ) ; turnsControl . waitTurns ( penalty , "Get Neighbour's Bank count" ) ; Robot neighbour = world . getNeighbour ( this . robot ) ; if ( neighbour != null ) { return neighbour . getBanksCount ( ) ; } else { return 0 ; }
public class ExceptionPrinter { /** * Print Exception messages without stack trace in non debug mode and call system exit afterwards . Method prints recursive all messages of the given exception stack to get a history overview of the causes . In verbose mode ( app * - v ) the stacktrace is printed in the end of history . The logging level is fixed to level " error " . After printing the system exit routine with error code 255 is triggered . * @ param < T > Exception type * @ param th exception stack to print . * @ param logger */ public static < T extends Throwable > void printHistoryAndExit ( final T th , final Logger logger ) { } }
printHistory ( th , logger , LogLevel . ERROR ) ; if ( JPService . testMode ( ) ) { assert false ; return ; } exit ( 255 ) ;
public class OWLDataComplementOfImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLDataComplementOfImpl instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class NewJFrame { /** * GEN - LAST : event _ dateTimeFocusLost */ private void timeFocusLost ( java . awt . event . FocusEvent evt ) { } }
// GEN - FIRST : event _ timeFocusLost String time = jTextField3 . getText ( ) ; setTime ( time ) ;
public class BlockMetadataManager { /** * Gets all the temporary blocks associated with a session , empty list is returned if the session * has no temporary blocks . * @ param sessionId the id of the session * @ return A list of temp blocks associated with the session */ public List < TempBlockMeta > getSessionTempBlocks ( long sessionId ) { } }
List < TempBlockMeta > sessionTempBlocks = new ArrayList < > ( ) ; for ( StorageTier tier : mTiers ) { for ( StorageDir dir : tier . getStorageDirs ( ) ) { sessionTempBlocks . addAll ( dir . getSessionTempBlocks ( sessionId ) ) ; } } return sessionTempBlocks ;
public class RequestFactory { /** * Create a request builder for a create operation for a multiple items * @ param items The items to create * @ return The request builder */ public CreateCollectionRequestBuilder < T , ID > create ( List < T > items ) { } }
return new CreateCollectionRequestBuilder < T , ID > ( version , type , entityName , items ) ;
public class PID { /** * Return a URIReference of some PID string , assuming it is well - formed . */ public static SimpleURIReference toURIReference ( String pidString ) { } }
SimpleURIReference ref = null ; try { ref = new SimpleURIReference ( new URI ( toURI ( pidString ) ) ) ; } catch ( URISyntaxException e ) { // assumes pid is well - formed throw new Error ( e ) ; } return ref ;
public class DynamicCDXIndex { /** * ( non - Javadoc ) * @ see org . archive . wayback . resourceindex . SearchResultSource # getPrefixReverseIterator ( java . lang . String ) */ public CloseableIterator < CaptureSearchResult > getPrefixReverseIterator ( String prefix ) throws ResourceIndexNotAvailableException { } }
if ( getState ( ) != STATE_SYNCHED ) { throw new ResourceIndexNotAvailableException ( "Not synchronized" ) ; } return super . getPrefixReverseIterator ( prefix ) ;
public class InventoryItemAttributeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InventoryItemAttribute inventoryItemAttribute , ProtocolMarshaller protocolMarshaller ) { } }
if ( inventoryItemAttribute == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inventoryItemAttribute . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( inventoryItemAttribute . getDataType ( ) , DATATYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Matrix4f { /** * Set this matrix to a rotation transformation about the Y axis . * When used with a right - handed coordinate system , the produced rotation will rotate a vector * counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin . * When used with a left - handed coordinate system , the rotation is clockwise . * Reference : < a href = " http : / / en . wikipedia . org / wiki / Rotation _ matrix # Basic _ rotations " > http : / / en . wikipedia . org < / a > * @ param ang * the angle in radians * @ return this */ public Matrix4f rotationY ( float ang ) { } }
float sin , cos ; sin = ( float ) Math . sin ( ang ) ; cos = ( float ) Math . cosFromSin ( sin , ang ) ; if ( ( properties & PROPERTY_IDENTITY ) == 0 ) MemUtil . INSTANCE . identity ( this ) ; this . _m00 ( cos ) ; this . _m02 ( - sin ) ; this . _m20 ( sin ) ; this . _m22 ( cos ) ; _properties ( PROPERTY_AFFINE | PROPERTY_ORTHONORMAL ) ; return this ;
public class IslamicChronology { int getDaysInYearMonth ( int year , int month ) { } }
if ( month == 12 && isLeapYear ( year ) ) { return LONG_MONTH_LENGTH ; } return ( -- month % 2 == 0 ? LONG_MONTH_LENGTH : SHORT_MONTH_LENGTH ) ;
public class DefaultGroovyMethods { /** * Adds GroovyCollections # combinations ( Iterable , Closure ) as a method on collections . * Example usage : * < pre class = " groovyTestCase " > assert [ [ 2 , 3 ] , [ 4 , 5 , 6 ] ] . combinations { x , y { @ code - > } x * y } = = [ 8 , 12 , 10 , 15 , 12 , 18 ] < / pre > * @ param self a Collection of lists * @ param function a closure to be called on each combination * @ return a List of the results of applying the closure to each combinations found * @ see groovy . util . GroovyCollections # combinations ( Iterable ) * @ since 2.2.0 */ public static List combinations ( Iterable self , Closure < ? > function ) { } }
return collect ( ( Iterable ) GroovyCollections . combinations ( self ) , function ) ;
public class ParameterUtil { /** * Internal method to parse long values . */ protected static long parseLongParameter ( String value , String invalidDataMessage ) throws DataValidationException { } }
try { return Long . parseLong ( value ) ; } catch ( NumberFormatException nfe ) { throw new DataValidationException ( invalidDataMessage ) ; }
public class CommerceNotificationTemplateServiceBaseImpl { /** * Sets the commerce notification queue entry remote service . * @ param commerceNotificationQueueEntryService the commerce notification queue entry remote service */ public void setCommerceNotificationQueueEntryService ( com . liferay . commerce . notification . service . CommerceNotificationQueueEntryService commerceNotificationQueueEntryService ) { } }
this . commerceNotificationQueueEntryService = commerceNotificationQueueEntryService ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcExtrudedAreaSolid ( ) { } }
if ( ifcExtrudedAreaSolidEClass == null ) { ifcExtrudedAreaSolidEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 251 ) ; } return ifcExtrudedAreaSolidEClass ;
public class StorageReader { /** * Return the data buffer for the given position */ private ByteBuffer getDataBuffer ( long index ) { } }
ByteBuffer buf = dataBuffers [ ( int ) ( index / segmentSize ) ] ; buf . position ( ( int ) ( index % segmentSize ) ) ; return buf ;
public class GenericDatabaseMetaData { public < T > T unwrap ( Class < T > iface ) throws SQLException { } }
try { return iface . cast ( this ) ; } catch ( Throwable ex ) { // Intentionally blank - SQLException thrown below } throw new SQLException ( getClass ( ) . getName ( ) + " does not implement " + iface ) ;
public class SimpleQueryCache { /** * record a specific answer to a given query with a known cache unifier * @ param query to which an answer is to be recorded * @ param answer answer specific answer to the query * @ param unifier between the cached and input query * @ return recorded answer */ public CacheEntry < Q , Set < ConceptMap > > record ( Q query , ConceptMap answer , @ Nullable CacheEntry < Q , Set < ConceptMap > > entry , @ Nullable MultiUnifier unifier ) { } }
CacheEntry < Q , Set < ConceptMap > > match = entry != null ? entry : this . getEntry ( query ) ; if ( match != null ) { Q equivalentQuery = match . query ( ) ; Set < ConceptMap > answers = match . cachedElement ( ) ; MultiUnifier multiUnifier = unifier == null ? query . getMultiUnifier ( equivalentQuery , unifierType ( ) ) : unifier ; Set < Variable > cacheVars = answers . isEmpty ( ) ? new HashSet < > ( ) : answers . iterator ( ) . next ( ) . vars ( ) ; multiUnifier . apply ( answer ) . peek ( ans -> { if ( ! ans . vars ( ) . containsAll ( cacheVars ) ) { throw GraqlQueryException . invalidQueryCacheEntry ( equivalentQuery , ans ) ; } } ) . forEach ( answers :: add ) ; return match ; } else { if ( ! answer . vars ( ) . containsAll ( query . getVarNames ( ) ) ) { throw GraqlQueryException . invalidQueryCacheEntry ( query , answer ) ; } return putEntry ( query , Sets . newHashSet ( answer ) ) ; }
public class ValueMap { /** * Returns the value of the mapping with the specified key , or the given default value . */ public < T > T get ( String key , T defaultValue ) { } }
if ( map . containsKey ( key ) ) { Object value = map . get ( key ) ; return ( T ) ( value instanceof byte [ ] ? unmarshall ( ( byte [ ] ) value ) : value ) ; } return defaultValue ;
public class ParserDDL { /** * Reads column constraints */ void readColumnConstraints ( Table table , ColumnSchema column , HsqlArrayList constraintList ) { } }
boolean end = false ; boolean isAutogeneratedName = true ; while ( true ) { HsqlName constName = null ; if ( token . tokenType == Tokens . CONSTRAINT ) { read ( ) ; constName = readNewDependentSchemaObjectName ( table . getName ( ) , SchemaObject . CONSTRAINT ) ; isAutogeneratedName = false ; } // A VoltDB extension to support indexed expressions and the assume unique attribute boolean assumeUnique = false ; // For VoltDB // End of VoltDB extension switch ( token . tokenType ) { case Tokens . PRIMARY : { read ( ) ; readThis ( Tokens . KEY ) ; Constraint existingConst = ( Constraint ) constraintList . get ( 0 ) ; if ( existingConst . constType == Constraint . PRIMARY_KEY ) { throw Error . error ( ErrorCode . X_42532 ) ; } OrderedHashSet set = new OrderedHashSet ( ) ; set . add ( column . getName ( ) . name ) ; if ( constName == null ) { constName = database . nameManager . newAutoName ( "PK" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . CONSTRAINT ) ; } Constraint c = new Constraint ( constName , isAutogeneratedName , set , Constraint . PRIMARY_KEY ) ; constraintList . set ( 0 , c ) ; column . setPrimaryKey ( true ) ; break ; } // A VoltDB extension to support indexed expressions and the assume unique attribute case Tokens . ASSUMEUNIQUE : assumeUnique = true ; // $ FALL - THROUGH $ // End of VoltDB extension case Tokens . UNIQUE : { read ( ) ; OrderedHashSet set = new OrderedHashSet ( ) ; set . add ( column . getName ( ) . name ) ; if ( constName == null ) { constName = database . nameManager . newAutoName ( "CT" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . CONSTRAINT ) ; } Constraint c = new Constraint ( constName , isAutogeneratedName , set , Constraint . UNIQUE ) ; // A VoltDB extension to support indexed expressions and the assume unique attribute c . setAssumeUnique ( assumeUnique ) ; // End of VoltDB extension constraintList . add ( c ) ; break ; } case Tokens . FOREIGN : { read ( ) ; readThis ( Tokens . KEY ) ; } // $ FALL - THROUGH $ case Tokens . REFERENCES : { OrderedHashSet set = new OrderedHashSet ( ) ; set . add ( column . getName ( ) . name ) ; Constraint c = readFKReferences ( table , constName , set ) ; constraintList . add ( c ) ; break ; } case Tokens . CHECK : { read ( ) ; if ( constName == null ) { constName = database . nameManager . newAutoName ( "CT" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . CONSTRAINT ) ; } Constraint c = new Constraint ( constName , isAutogeneratedName , null , Constraint . CHECK ) ; readCheckConstraintCondition ( c ) ; OrderedHashSet set = c . getCheckColumnExpressions ( ) ; for ( int i = 0 ; i < set . size ( ) ; i ++ ) { ExpressionColumn e = ( ExpressionColumn ) set . get ( i ) ; if ( column . getName ( ) . name . equals ( e . getColumnName ( ) ) ) { if ( e . getSchemaName ( ) != null && e . getSchemaName ( ) != table . getSchemaName ( ) . name ) { throw Error . error ( ErrorCode . X_42505 ) ; } } else { throw Error . error ( ErrorCode . X_42501 ) ; } } constraintList . add ( c ) ; break ; } case Tokens . NOT : { read ( ) ; readThis ( Tokens . NULL ) ; if ( constName == null ) { constName = database . nameManager . newAutoName ( "CT" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . CONSTRAINT ) ; } Constraint c = new Constraint ( constName , isAutogeneratedName , null , Constraint . CHECK ) ; c . check = new ExpressionLogical ( column ) ; constraintList . add ( c ) ; break ; } default : end = true ; break ; } if ( end ) { break ; } }
public class SystemManager { /** * Adds a new system . */ public < T extends SubSystem > void addSystem ( Class < T > systemApi , T system ) { } }
SubSystem oldSystem = _systemMap . putIfAbsent ( systemApi , system ) ; if ( oldSystem != null ) { throw new IllegalStateException ( L . l ( "duplicate system '{0}' is not allowed because another system with that class is already registered '{1}'" , system , oldSystem ) ) ; } _pendingStart . add ( system ) ; if ( _lifecycle . isActive ( ) ) { startSystems ( ) ; }
public class ResourceGroovyMethods { /** * Invokes the closure specified with key ' visit ' in the options Map * for each descendant file in this directory tree . Convenience method * for { @ link # traverse ( java . io . File , java . util . Map , groovy . lang . Closure ) } allowing the ' visit ' closure * to be included in the options Map rather than as a parameter . * @ param self a File ( that happens to be a folder / directory ) * @ param options a Map of options to alter the traversal behavior * @ throws FileNotFoundException if the given directory does not exist * @ throws IllegalArgumentException if the provided File object does not represent a directory or illegal filter combinations are supplied * @ see # traverse ( java . io . File , java . util . Map , groovy . lang . Closure ) * @ since 1.7.1 */ public static void traverse ( final File self , final Map < String , Object > options ) throws FileNotFoundException , IllegalArgumentException { } }
final Closure visit = ( Closure ) options . remove ( "visit" ) ; traverse ( self , options , visit ) ;
public class DevLockManager { void addToRelockList ( final DeviceProxy dev , final int validity ) throws DevFailed { } }
// Check if it is the first relock if ( relockMap == null ) { // Create hash table for admin devices object relockMap = new Hashtable < String , LockedDeviceAmin > ( ) ; // Create a thread to unlock all devices at exit Runtime . getRuntime ( ) . addShutdownHook ( new ShutdownThread ( ) ) ; // Create thread fo garbage callector call preiodicaly new GarbageThread ( ) . start ( ) ; } // Check if admin device already exists . String adm ; try { adm = dev . adm_name ( ) ; } catch ( final DevFailed e ) { // Give up return ; } final LockedDevice ld = new LockedDevice ( dev . get_name ( ) , validity ) ; LockedDeviceAmin lda ; if ( ! relockMap . containsKey ( adm ) ) { // if not , crate and add th the list lda = new LockedDeviceAmin ( adm , ld ) ; lda . start ( ) ; relockMap . put ( adm , lda ) ; } else { lda = relockMap . get ( adm ) ; lda . add ( ld ) ; }
public class InputSchemaUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InputSchemaUpdate inputSchemaUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( inputSchemaUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inputSchemaUpdate . getRecordFormatUpdate ( ) , RECORDFORMATUPDATE_BINDING ) ; protocolMarshaller . marshall ( inputSchemaUpdate . getRecordEncodingUpdate ( ) , RECORDENCODINGUPDATE_BINDING ) ; protocolMarshaller . marshall ( inputSchemaUpdate . getRecordColumnUpdates ( ) , RECORDCOLUMNUPDATES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ExportClientVpnClientConfigurationRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < ExportClientVpnClientConfigurationRequest > getDryRunRequest ( ) { } }
Request < ExportClientVpnClientConfigurationRequest > request = new ExportClientVpnClientConfigurationRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class FieldValueMappingCallback { /** * Implements the NEBA contracts for fields , for instance guarantees that collection - typed fields are never < code > null < / code > . Applies * { @ link AnnotatedFieldMapper custom field mappers } . * @ param fieldData must not be < code > null < / code > . * @ param value can be < code > null < / code > . * @ return the post - processed value , can be < code > null < / code > . */ private Object postProcessResolvedValue ( FieldData fieldData , Object value ) { } }
// For convenience , NEBA guarantees that any mappable collection - typed field is never < code > null < / code > but rather // an empty collection , in case no non - < code > null < / code > default value was provided and the field is not Lazy . boolean preventNullCollection = value == null && ! fieldData . metaData . isLazy ( ) && fieldData . metaData . isInstantiableCollectionType ( ) && getField ( fieldData ) == null ; @ SuppressWarnings ( "unchecked" ) Object defaultValue = preventNullCollection ? instantiateCollectionType ( ( Class < Collection > ) fieldData . metaData . getType ( ) ) : null ; // Provide the custom mappers with the default value in case of empty collections for convenience value = applyCustomMappings ( fieldData , value == null ? defaultValue : value ) ; return value == null ? defaultValue : value ;
public class ScriptReaderBinary { /** * String : schemaname */ protected String readTableInit ( ) throws IOException { } }
boolean more = readRow ( rowIn , 0 ) ; if ( ! more ) { return null ; } String s = rowIn . readString ( ) ; // operation is always INSERT int checkOp = rowIn . readInt ( ) ; if ( checkOp == ScriptWriterBase . INSERT_WITH_SCHEMA ) { currentSchema = rowIn . readString ( ) ; } else { currentSchema = null ; } if ( checkOp != ScriptWriterBase . INSERT && checkOp != ScriptWriterBase . INSERT_WITH_SCHEMA ) { throw Error . error ( ErrorCode . ERROR_IN_SCRIPT_FILE , ErrorCode . M_ERROR_IN_BINARY_SCRIPT_2 ) ; } return s ;
public class DefaultCosHttpClient { /** * Get请求函数 * @ param url * @ param headers * 额外添加的Http头部 * @ param params * GET请求的参数 * @ return Cos服务器返回的字符串 * @ throws Exception */ @ Override protected String sendGetRequest ( HttpRequest httpRequest ) throws AbstractCosException { } }
String url = httpRequest . getUrl ( ) ; HttpGet httpGet = null ; String responseStr = "" ; int retry = 0 ; int maxRetryCount = this . config . getMaxFailedRetry ( ) ; while ( retry < maxRetryCount ) { try { URIBuilder urlBuilder = new URIBuilder ( url ) ; for ( String paramKey : httpRequest . getParams ( ) . keySet ( ) ) { urlBuilder . addParameter ( paramKey , httpRequest . getParams ( ) . get ( paramKey ) ) ; } httpGet = new HttpGet ( urlBuilder . build ( ) ) ; } catch ( URISyntaxException e ) { String errMsg = "Invalid url:" + url ; LOG . error ( errMsg ) ; throw new ParamException ( errMsg ) ; } httpGet . setConfig ( requestConfig ) ; setHeaders ( httpGet , httpRequest . getHeaders ( ) ) ; try { HttpResponse httpResponse = httpClient . execute ( httpGet ) ; ; int responseStatusCode = httpResponse . getStatusLine ( ) . getStatusCode ( ) ; if ( responseStatusCode == 200 || responseStatusCode == 400 ) { responseStr = EntityUtils . toString ( httpResponse . getEntity ( ) , "UTF-8" ) ; return responseStr ; } else { String errMsg = getErrorHttpResponseMsg ( httpRequest , httpResponse . getStatusLine ( ) ) ; throw new NetworkException ( errMsg ) ; } } catch ( ParseException | IOException e ) { ++ retry ; if ( retry == maxRetryCount ) { String errMsg = getExceptionMsg ( httpRequest , e . toString ( ) ) ; throw new ServerException ( errMsg ) ; } } finally { httpGet . releaseConnection ( ) ; } } return responseStr ;
public class Bundler { /** * Inserts an array of Parcelable values into the mapping of this Bundle , * replacing any existing value for the given key . Either key or value may * be null . * @ param key a String , or null * @ param value an array of Parcelable objects , or null * @ return this */ public Bundler putParcelableArray ( String key , Parcelable [ ] value ) { } }
bundle . putParcelableArray ( key , value ) ; return this ;
public class GoogleTableDisplayer { /** * Reset the current navigation status on filter requests from external displayers . */ @ Override public void onFilterEnabled ( Displayer displayer , DataSetGroup groupOp ) { } }
currentPage = 1 ; super . onFilterEnabled ( displayer , groupOp ) ;
public class ObjectResult { /** * Get the status for this object update result . If a status has not been explicitly * defined , a value of OK is returned . * @ return Status value of this object update result . */ public Status getStatus ( ) { } }
String status = m_resultFields . get ( STATUS ) ; if ( status == null ) { return Status . OK ; } else { return Status . valueOf ( status . toUpperCase ( ) ) ; }
public class AuditStorageProvider { /** * These methods make changes to the state of storage , so audit information * needs to be captured for each */ @ Override public void createSpace ( String spaceId ) { } }
target . createSpace ( spaceId ) ; String action = AuditTask . ActionType . CREATE_SPACE . name ( ) ; submitWriteTask ( action , spaceId , AuditTask . NA , AuditTask . NA , AuditTask . NA , AuditTask . NA , null , null , AuditTask . NA , AuditTask . NA ) ;
public class HttpRequest { /** * 获取指定的header值 , 没有返回默认值 * @ param name header名 * @ param defaultValue 默认值 * @ return header值 */ public String getHeader ( String name , String defaultValue ) { } }
return header . getValue ( name , defaultValue ) ;
public class RebalanceUtils { /** * For a particular stealer node find all the primary partitions tuples it * will steal . * @ param currentCluster The cluster definition of the existing cluster * @ param finalCluster The final cluster definition * @ param stealNodeId Node id of the stealer node * @ return Returns a list of primary partitions which this stealer node will * get */ public static List < Integer > getStolenPrimaryPartitions ( final Cluster currentCluster , final Cluster finalCluster , final int stealNodeId ) { } }
List < Integer > finalList = new ArrayList < Integer > ( finalCluster . getNodeById ( stealNodeId ) . getPartitionIds ( ) ) ; List < Integer > currentList = new ArrayList < Integer > ( ) ; if ( currentCluster . hasNodeWithId ( stealNodeId ) ) { currentList = currentCluster . getNodeById ( stealNodeId ) . getPartitionIds ( ) ; } else { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Current cluster does not contain stealer node (cluster : [[[" + currentCluster + "]]], node id " + stealNodeId + ")" ) ; } } finalList . removeAll ( currentList ) ; return finalList ;
public class XsdAsmInterfaces { /** * Obtains all the interfaces that a given element will implement . * @ param element The { @ link XsdElement } in which the class will be based . * @ param apiName The name of the generated fluent interface . * @ return A { @ link String } array with all the interface names . */ String [ ] getInterfaces ( XsdElement element , String apiName ) { } }
String [ ] attributeGroupInterfacesArr = getAttributeGroupInterfaces ( element ) ; String [ ] elementGroupInterfacesArr = getElementInterfaces ( element , apiName ) ; String [ ] hierarchyInterfacesArr = getHierarchyInterfaces ( element , apiName ) ; return ArrayUtils . addAll ( ArrayUtils . addAll ( attributeGroupInterfacesArr , elementGroupInterfacesArr ) , hierarchyInterfacesArr ) ;
public class Parser { /** * < p > Find the end of the single - quoted string starting at the given offset . < / p > * < p > Note : for < tt > ' single ' ' quote in string ' < / tt > , this method currently returns the offset of * first < tt > ' < / tt > character after the initial one . The caller must call the method a second time * for the second part of the quoted string . < / p > * @ param query query * @ param offset start offset * @ param standardConformingStrings standard conforming strings * @ return position of the end of the single - quoted string */ public static int parseSingleQuotes ( final char [ ] query , int offset , boolean standardConformingStrings ) { } }
// check for escape string syntax ( E ' ' ) if ( standardConformingStrings && offset >= 2 && ( query [ offset - 1 ] == 'e' || query [ offset - 1 ] == 'E' ) && charTerminatesIdentifier ( query [ offset - 2 ] ) ) { standardConformingStrings = false ; } if ( standardConformingStrings ) { // do NOT treat backslashes as escape characters while ( ++ offset < query . length ) { switch ( query [ offset ] ) { case '\'' : return offset ; default : break ; } } } else { // treat backslashes as escape characters while ( ++ offset < query . length ) { switch ( query [ offset ] ) { case '\\' : ++ offset ; break ; case '\'' : return offset ; default : break ; } } } return query . length ;
public class ServerImpl { /** * Gets or creates a server text channel . * @ param data The json data of the channel . * @ return The server text channel . */ public ServerTextChannel getOrCreateServerTextChannel ( JsonNode data ) { } }
long id = Long . parseLong ( data . get ( "id" ) . asText ( ) ) ; ChannelType type = ChannelType . fromId ( data . get ( "type" ) . asInt ( ) ) ; synchronized ( this ) { // TODO Treat news channels differently if ( type == ChannelType . SERVER_TEXT_CHANNEL || type == ChannelType . SERVER_NEWS_CHANNEL ) { return getTextChannelById ( id ) . orElseGet ( ( ) -> new ServerTextChannelImpl ( api , this , data ) ) ; } } // Invalid channel type return null ;
public class TaggerUtils { /** * Evaluates the performance of { @ code tagger } on * { @ code evaluationData } , returning both per - item and per - sequence * error rates . * @ param tagger * @ param evaluationData * @ return */ public static < I , O > SequenceTaggerError evaluateTagger ( SequenceTagger < I , O > tagger , Collection < ? extends TaggedSequence < I , O > > evaluationData ) { } }
MapReduceExecutor executor = MapReduceConfiguration . getMapReduceExecutor ( ) ; return executor . mapReduce ( evaluationData , new SequenceTaggerEvaluationMapper < I , O > ( tagger , - 1 ) , new SequenceTaggerEvaluationReducer ( ) ) ;
public class JKDateTimeUtil { /** * Compare two dates . * @ param date1 the date 1 * @ param date2 the date 2 * @ return the compare dates */ public static CompareDates compareTwoDates ( Date date1 , Date date2 ) { } }
Date d1 = new Date ( date1 . getTime ( ) ) ; // to unify the format of the dates // before the compare Date d2 = new Date ( date2 . getTime ( ) ) ; if ( d1 . compareTo ( d2 ) < 0 ) return CompareDates . DATE1_LESS_THAN_DATE2 ; else if ( d1 . compareTo ( d2 ) > 0 ) return CompareDates . DATE1_GREATER_THAN_DATE2 ; else return CompareDates . DATE1_EQUAL_DATE2 ;
public class TimeUtils { /** * Creates a list of days between the specified start ( inclusive ) and end ( inclusive ) . * @ param startUnixTimestamp the start * @ param endUnixTimestamp the end * @ return the unix timestamps for each day between start and end */ public static List < Long > createTimestampList ( final long startUnixTimestamp , final long endUnixTimestamp ) { } }
if ( startUnixTimestamp > endUnixTimestamp ) { return Collections . emptyList ( ) ; } // normalize the start and end ( next day ' s start ) final long normStart = TimeModifier . START_OF_DAY . applyModifier ( startUnixTimestamp ) ; final long normEnd = TimeModifier . moveDays ( endUnixTimestamp , true , 1 ) ; // determine which times we have to query for final List < Long > times = new ArrayList < > ( ) ; for ( long time = normStart ; time < normEnd ; time += 24 * 60 * 60 ) { times . add ( time ) ; } return times ;
public class CompactCharArray { /** * Set a new value for a Unicode character . * Set automatically expands the array if it is compacted . * @ param index the character to set the mapped value with * @ param value the new mapped value * @ deprecated This API is ICU internal only . * @ hide draft / provisional / internal are hidden on Android */ @ Deprecated public void setElementAt ( char index , char value ) { } }
if ( isCompact ) expand ( ) ; values [ index ] = value ; touchBlock ( index >> BLOCKSHIFT , value ) ;
public class ViewTransitionBuilder { /** * TODO Current support is rudimentary , may expand support if there are enough demand for this . * Converts an animator to ITransition when built , note that not all functions of Animator are supported . * Non - working functions : repeatMode , repeatCount , delay , duration ( when in a set ) , Interpolator . * Furthermore , { @ link # transitViewGroup ( ViewGroupTransition ) } does not work with this method . * @ param animator * @ return */ public ViewTransitionBuilder animator ( @ NonNull final Animator animator ) { } }
addSetup ( new ViewTransition . Setup ( ) { @ Override public void setupAnimation ( @ NonNull final TransitionControllerManager manager ) { Animator animator2 = animator . clone ( ) ; if ( animator2 instanceof AnimatorSet ) { manager . addTransitionController ( DefaultTransitionController . wrapAnimatorSet ( ( AnimatorSet ) animator2 ) ) ; } else { manager . addTransitionController ( DefaultTransitionController . wrapAnimator ( animator2 ) ) ; } } } ) ; return self ( ) ;
public class ExpressionUtils { /** * / * With value class */ public static < X , Y > Y getValue ( Class < X > source , X target , String propertyPath , Class < Y > valueClass ) { } }
return getExpression ( source , propertyPath , valueClass ) . getValue ( target ) ;
public class DateUtils { /** * Roll the days forward or backward . * @ param startDate - The start date * @ param days - Negative to rollbackwards . */ public static java . sql . Date rollDays ( java . util . Date startDate , int days ) { } }
return rollDate ( startDate , Calendar . DATE , days ) ;
public class ServerActor { private void failTimedOutMissingContentRequests ( ) { } }
if ( isStopped ( ) ) return ; if ( requestsMissingContent . isEmpty ( ) ) return ; final List < String > toRemove = new ArrayList < > ( ) ; // prevent ConcurrentModificationException for ( final String id : requestsMissingContent . keySet ( ) ) { final RequestResponseHttpContext requestResponseHttpContext = requestsMissingContent . get ( id ) ; if ( requestResponseHttpContext . requestResponseContext . hasConsumerData ( ) ) { final RequestParser parser = requestResponseHttpContext . requestResponseContext . consumerData ( ) ; if ( parser . hasMissingContentTimeExpired ( requestMissingContentTimeout ) ) { requestResponseHttpContext . requestResponseContext . consumerData ( null ) ; toRemove . add ( id ) ; requestResponseHttpContext . httpContext . completes . with ( Response . of ( Response . Status . BadRequest , "Missing content." ) ) ; requestResponseHttpContext . requestResponseContext . consumerData ( null ) ; } } else { toRemove . add ( id ) ; // already closed ? } } for ( final String id : toRemove ) { requestsMissingContent . remove ( id ) ; }
public class LaplaceInterpolation { /** * Solves A * x = b by iterative biconjugate gradient method . * @ param b the right hand side of linear equations . * @ param x on input , x should be set to an initial guess of the solution * ( or all zeros ) . On output , x is reset to the improved solution . * @ param tol the desired convergence tolerance . * @ param maxIter the maximum number of allowed iterations . * @ return the estimated error . */ private static double solve ( double [ ] [ ] matrix , double [ ] b , double [ ] x , boolean [ ] mask , double tol , int maxIter ) { } }
double err = 0.0 ; double ak , akden , bk , bkden = 1.0 , bknum , bnrm ; int j , n = b . length ; double [ ] p = new double [ n ] ; double [ ] pp = new double [ n ] ; double [ ] r = new double [ n ] ; double [ ] rr = new double [ n ] ; double [ ] z = new double [ n ] ; double [ ] zz = new double [ n ] ; ax ( matrix , x , r , mask ) ; for ( j = 0 ; j < n ; j ++ ) { r [ j ] = b [ j ] - r [ j ] ; rr [ j ] = r [ j ] ; } bnrm = snorm ( b ) ; asolve ( r , z ) ; for ( int iter = 0 ; iter < maxIter ; iter ++ ) { asolve ( rr , zz ) ; for ( bknum = 0.0 , j = 0 ; j < n ; j ++ ) { bknum += z [ j ] * rr [ j ] ; } if ( iter == 1 ) { for ( j = 0 ; j < n ; j ++ ) { p [ j ] = z [ j ] ; pp [ j ] = zz [ j ] ; } } else { bk = bknum / bkden ; for ( j = 0 ; j < n ; j ++ ) { p [ j ] = bk * p [ j ] + z [ j ] ; pp [ j ] = bk * pp [ j ] + zz [ j ] ; } } bkden = bknum ; ax ( matrix , p , z , mask ) ; for ( akden = 0.0 , j = 0 ; j < n ; j ++ ) { akden += z [ j ] * pp [ j ] ; } ak = bknum / akden ; atx ( matrix , pp , zz , mask ) ; for ( j = 0 ; j < n ; j ++ ) { x [ j ] += ak * p [ j ] ; r [ j ] -= ak * z [ j ] ; rr [ j ] -= ak * zz [ j ] ; } asolve ( r , z ) ; err = snorm ( r ) / bnrm ; if ( err <= tol ) { break ; } } return err ;
public class SequenceMouseListener { /** * get the sequence position of the current mouse event */ private int getSeqPos ( MouseEvent e ) { } }
int x = e . getX ( ) ; // int y = e . getY ( ) ; // float scale = seqScale . getScale ( ) ; // int DEFAULT _ X _ START = SequenceScalePanel . DEFAULT _ X _ START ; float scale = parent . getScale ( ) ; coordManager . setScale ( scale ) ; int seqpos = coordManager . getSeqPos ( x - 2 ) ; return seqpos ;
public class CollectionUtils { /** * As boolean array . * @ param input the input * @ return the boolean [ ] */ public static Boolean [ ] asBooleanArray ( List < Boolean > input ) { } }
Boolean [ ] result = new Boolean [ input . size ( ) ] ; for ( int i = 0 ; i < result . length ; i ++ ) { result [ i ] = input . get ( i ) ; } return result ;
public class ViewPortImpl { /** * Change the view on the map by applying a bounding box ( world coordinates ! ) . Since the width / height ratio of the * bounding box may differ from that of the map , the fit is " as good as possible " . * Also this function will almost certainly change the scale on the map , so if there have been resolutions defined , * it will snap to them . * @ param bounds * A bounding box in world coordinates that determines the view from now on . */ public void applyBounds ( final Bbox bounds ) { } }
org . geomajas . gwt . client . spatial . Bbox bbox = new org . geomajas . gwt . client . spatial . Bbox ( bounds . getX ( ) , bounds . getY ( ) , bounds . getWidth ( ) , bounds . getHeight ( ) ) ; mapView . applyBounds ( bbox , zoomOption ) ;
public class Base64 { /** * Decodes data from Base64 notation , automatically detecting * gzip - compressed data and decompressing it . * @ param s * the string to decode * @ return the decoded data * @ since 1.4 */ public static byte [ ] decode ( String s ) { } }
byte [ ] bytes ; try { bytes = s . getBytes ( PREFERRED_ENCODING ) ; } // end try catch ( java . io . UnsupportedEncodingException uee ) { bytes = s . getBytes ( ) ; } // end catch // Decode bytes = decode ( bytes ) ; return bytes ;
public class ProxyBuilderDefaultImpl { /** * Method called by both synchronous and asynchronous build ( ) to create a ProxyInvocationHandler */ private ProxyInvocationHandler createProxyInvocationHandler ( final ProxyCreatedCallback < T > callback ) { } }
if ( buildCalled ) { throw new JoynrIllegalStateException ( "Proxy builder was already used to build a proxy. Please create a new proxy builder for each proxy." ) ; } buildCalled = true ; StatelessAsyncCallback statelessAsyncCallback = null ; if ( statelessAsyncCallbackUseCase != null ) { statelessAsyncCallback = statelessAsyncCallbackDirectory . get ( statelessAsyncCallbackUseCase ) ; if ( statelessAsyncCallback == null ) { throw new JoynrIllegalStateException ( "No stateless async callback found registered for use case " + statelessAsyncCallbackUseCase ) ; } } final ProxyInvocationHandler proxyInvocationHandler = proxyInvocationHandlerFactory . create ( domains , interfaceName , proxyParticipantId , discoveryQos , messagingQos , shutdownNotifier , statelessAsyncCallback ) ; // This order is necessary because the Arbitrator might return early // But if the listener is set after the ProxyInvocationHandler the // Arbitrator cannot return early arbitrator . setArbitrationListener ( new ArbitrationCallback ( ) { @ Override public void onSuccess ( ArbitrationResult arbitrationResult ) { logger . debug ( "DISCOVERY proxy created for:{}" , arbitrationResult . getDiscoveryEntries ( ) ) ; proxyInvocationHandler . createConnector ( arbitrationResult ) ; callback . onProxyCreationFinished ( proxy ) ; } @ Override public void onError ( Throwable throwable ) { JoynrRuntimeException reason ; if ( throwable instanceof JoynrRuntimeException ) { reason = ( JoynrRuntimeException ) throwable ; } else { reason = new JoynrRuntimeException ( throwable ) ; } proxyInvocationHandler . abort ( reason ) ; callback . onProxyCreationError ( reason ) ; } } ) ; return proxyInvocationHandler ;
public class CreatePresetRequest { /** * The tags that you want to add to the resource . You can tag resources with a key - value pair or with only a key . * @ param tags * The tags that you want to add to the resource . You can tag resources with a key - value pair or with only a * key . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreatePresetRequest withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class GetApiRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetApiRequest getApiRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getApiRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getApiRequest . getApiId ( ) , APIID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractManagedType { /** * ( non - Javadoc ) * @ see javax . persistence . metamodel . ManagedType # getDeclaredAttributes ( ) */ @ Override public Set < Attribute < X , ? > > getDeclaredAttributes ( ) { } }
Set < Attribute < X , ? > > attributes = new HashSet < Attribute < X , ? > > ( ) ; if ( declaredSingluarAttribs != null ) { attributes . addAll ( declaredSingluarAttribs . values ( ) ) ; } if ( declaredPluralAttributes != null ) { attributes . addAll ( declaredPluralAttributes . values ( ) ) ; } return attributes ;
public class CollectionUtils { /** * Like { @ link Collections # max ( java . util . Collection ) } except with a default value returned in the * case of an empty collection . */ public static < T extends Comparable < T > > T maxOr ( Collection < T > values , T defaultVal ) { } }
if ( values . isEmpty ( ) ) { return defaultVal ; } else { return Collections . max ( values ) ; }
public class DialogUtil { /** * Present a confirmation ( OK / Cancel ) dialog with the specified prompt and return the user * response . * @ param message Prompt to present to the user . * @ param title Caption of prompt dialog . * @ param responseId Optional response id if user response is to be cached . If null , the * response will not be cached . If specified , the response is cached and the user is * not prompted again . * @ param callback Callback to receive dialog response . */ public static void confirm ( String message , String title , String responseId , IConfirmCallback callback ) { } }
prompt ( message , title , STYLES_QUESTION , LABEL_IDS_OK_CANCEL , LABEL_ID_CANCEL , null , responseId , ( response ) -> { IResponseCallback . invoke ( callback , response != null && response . isOk ( ) ) ;
public class JmxClient { /** * Invoke a JMX method as an array of objects . * @ return The value returned by the method or null if none . */ public Object invokeOperation ( String domain , String beanName , String operName , Object ... params ) throws Exception { } }
return invokeOperation ( ObjectNameUtil . makeObjectName ( domain , beanName ) , operName , params ) ;
public class MealyFilter { /** * Returns a Mealy machine with all transitions removed that have an output not among the specified values . The * resulting Mealy machine will not contain any unreachable states . * @ param in * the input Mealy machine * @ param inputs * the input alphabet * @ param outputs * the outputs to retain * @ return a Mealy machine with all transitions retained that have one of the specified outputs . */ public static < I , O > CompactMealy < I , O > retainTransitionsWithOutput ( MealyMachine < ? , I , ? , O > in , Alphabet < I > inputs , Collection < ? super O > outputs ) { } }
return filterByOutput ( in , inputs , outputs :: contains ) ;
public class HeartbeatImpl { /** * If the join is to the self server , and there ' s only one successful * join , update immediately . */ private void updateHubHeartbeatSelf ( ) { } }
_isHubHeartbeatSelf = true ; if ( _hubHeartbeatCount < 2 ) { for ( Result < Boolean > result : _hubHeartbeatList ) { result . ok ( true ) ; } _hubHeartbeatList . clear ( ) ; }
public class CmsAdvancedLinkSubstitutionHandler { /** * Reads the link exclude definition file and extracts all excluded links stored in it . < p > * @ param cms the current CmsObject * @ return list of Strings , containing link exclude paths */ private List < String > readLinkExcludes ( CmsObject cms ) { } }
List < String > linkExcludes = new ArrayList < String > ( ) ; try { // get the link exclude file String filePath = OpenCms . getSystemInfo ( ) . getConfigFilePath ( cms , LINK_EXCLUDE_DEFINIFITON_FILE ) ; CmsResource res = cms . readResource ( filePath ) ; CmsFile file = cms . readFile ( res ) ; CmsXmlContent linkExcludeDefinitions = CmsXmlContentFactory . unmarshal ( cms , file ) ; // get number of excludes int count = linkExcludeDefinitions . getIndexCount ( XPATH_LINK , Locale . ENGLISH ) ; for ( int i = 1 ; i <= count ; i ++ ) { String exclude = linkExcludeDefinitions . getStringValue ( cms , XPATH_LINK + "[" + i + "]" , Locale . ENGLISH ) ; linkExcludes . add ( exclude ) ; } } catch ( CmsException e ) { LOG . error ( e ) ; } return linkExcludes ;
public class VMath { /** * Matrix multiplication , ( a - c ) < sup > T < / sup > * B * ( a - c ) * Note : it may ( or may not ) be more efficient to materialize ( a - c ) , then use * { @ code transposeTimesTimes ( a _ minus _ c , B , a _ minus _ c ) } instead . * @ param B matrix * @ param a First vector * @ param c Center vector * @ return Matrix product , ( a - c ) < sup > T < / sup > * B * ( a - c ) */ @ Reference ( authors = "P. C. Mahalanobis" , title = "On the generalized distance in statistics" , booktitle = "Proceedings of the National Institute of Sciences of India. 2 (1)" , bibkey = "journals/misc/Mahalanobis36" ) public static double mahalanobisDistance ( final double [ ] [ ] B , final double [ ] a , final double [ ] c ) { } }
final int rowdim = B . length , coldim = getColumnDimensionality ( B ) ; assert rowdim == a . length : ERR_MATRIX_INNERDIM ; assert coldim == c . length : ERR_MATRIX_INNERDIM ; assert a . length == c . length : ERR_VEC_DIMENSIONS ; double sum = 0.0 ; for ( int k = 0 ; k < rowdim ; k ++ ) { final double [ ] B_k = B [ k ] ; double s = 0 ; for ( int j = 0 ; j < coldim ; j ++ ) { s += ( a [ j ] - c [ j ] ) * B_k [ j ] ; } sum += ( a [ k ] - c [ k ] ) * s ; } return sum ;
public class AbstractClassicTag { /** * An internal method that allows a tag to qualify the < code > name < / code > paramater by converting * it from a struts style naming convention to an explicit databinding expression . The qualified * name will be returned . This method may report an error if there is an error in the expression . * @ param name the name to be qualified * @ return the name which has been qualified * @ throws JspException throws a JspException if in - page error reporting is turned off . * @ see org . apache . beehive . netui . tags . naming . FormDataNameInterceptor */ protected String qualifyAttribute ( String name ) throws JspException { } }
if ( name == null ) return null ; // if this is a Struts style name , convert it to an expression try { name = formRewriter . rewriteName ( name , this ) ; } catch ( ExpressionEvaluationException e ) { String s = Bundle . getString ( "Tags_DataSourceExpressionError" , new Object [ ] { name , e . toString ( ) } ) ; registerTagError ( s , null ) ; } return name ;
public class NativeLoader { /** * Determine the right windows library depending on the architecture . * @ param library The library name . * @ param osName The operating system name . * @ param osArch The system architecture . * @ return The library resource . * @ throws UnsupportedOperationException Throw an exception if no native library for this platform * was found . */ private static String determineWindowsLibrary ( final String library , final String osName , final String osArch ) { } }
String resourceName ; String platform ; String fileExtension = "dll" ; switch ( osArch ) { case ARCH_AMD64 : case ARCH_X86_64 : platform = "windows-x64" ; break ; default : throw new UnsupportedOperationException ( "Platform " + osName + ":" + osArch + " not supported" ) ; } resourceName = "/" + platform + "/" + library + "." + fileExtension ; return resourceName ;
public class CharsetHelper { /** * If a BOM is present in the { @ link InputStream } it is read and if possible * the charset is automatically determined from the BOM . * @ param aIS * The input stream to use . May not be < code > null < / code > . * @ return Never < code > null < / code > . Always use the input stream contained in * the returned object and never the one passed in as a parameter , * because the returned IS is a push - back InputStream that has a * couple of bytes already buffered ! */ @ Nonnull public static InputStreamAndCharset getInputStreamAndCharsetFromBOM ( @ Nonnull @ WillNotClose final InputStream aIS ) { } }
ValueEnforcer . notNull ( aIS , "InputStream" ) ; // Check for BOM final int nMaxBOMBytes = EUnicodeBOM . getMaximumByteCount ( ) ; @ WillNotClose final NonBlockingPushbackInputStream aPIS = new NonBlockingPushbackInputStream ( StreamHelper . getBuffered ( aIS ) , nMaxBOMBytes ) ; try { // Try to read as many bytes as necessary to determine all supported BOMs final byte [ ] aBOM = new byte [ nMaxBOMBytes ] ; final int nReadBOMBytes = aPIS . read ( aBOM ) ; EUnicodeBOM eBOM = null ; Charset aDeterminedCharset = null ; if ( nReadBOMBytes > 0 ) { // Some byte BOMs were read - determine eBOM = EUnicodeBOM . getFromBytesOrNull ( ArrayHelper . getCopy ( aBOM , 0 , nReadBOMBytes ) ) ; if ( eBOM == null ) { // Unread the whole BOM aPIS . unread ( aBOM , 0 , nReadBOMBytes ) ; // aDeterminedCharset stays null } else { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Found " + eBOM + " on " + aIS . getClass ( ) . getName ( ) ) ; // Unread the unnecessary parts of the BOM final int nBOMBytes = eBOM . getByteCount ( ) ; if ( nBOMBytes < nReadBOMBytes ) aPIS . unread ( aBOM , nBOMBytes , nReadBOMBytes - nBOMBytes ) ; // Use the Charset of the BOM - maybe null ! aDeterminedCharset = eBOM . getCharset ( ) ; } } return new InputStreamAndCharset ( aPIS , eBOM , aDeterminedCharset ) ; } catch ( final IOException ex ) { LOGGER . error ( "Failed to determine BOM" , ex ) ; StreamHelper . close ( aPIS ) ; throw new UncheckedIOException ( ex ) ; }
public class HiveDataset { /** * Replace various tokens ( DB , TABLE , LOGICAL _ DB , LOGICAL _ TABLE ) with their values . * @ param datasetConfig The config object that needs to be resolved with final values . * @ param realDbAndTable Real DB and Table . * @ param logicalDbAndTable Logical DB and Table . * @ return Resolved config object . */ @ VisibleForTesting protected static Config resolveConfig ( Config datasetConfig , DbAndTable realDbAndTable , DbAndTable logicalDbAndTable ) { } }
Preconditions . checkNotNull ( datasetConfig , "Dataset config should not be null" ) ; Preconditions . checkNotNull ( realDbAndTable , "Real DB and table should not be null" ) ; Preconditions . checkNotNull ( logicalDbAndTable , "Logical DB and table should not be null" ) ; Properties resolvedProperties = new Properties ( ) ; Config resolvedConfig = datasetConfig . resolve ( ) ; for ( Map . Entry < String , ConfigValue > entry : resolvedConfig . entrySet ( ) ) { if ( ConfigValueType . LIST . equals ( entry . getValue ( ) . valueType ( ) ) ) { List < String > rawValueList = resolvedConfig . getStringList ( entry . getKey ( ) ) ; List < String > resolvedValueList = Lists . newArrayList ( ) ; for ( String rawValue : rawValueList ) { String resolvedValue = StringUtils . replaceEach ( rawValue , new String [ ] { DATABASE_TOKEN , TABLE_TOKEN , LOGICAL_DB_TOKEN , LOGICAL_TABLE_TOKEN } , new String [ ] { realDbAndTable . getDb ( ) , realDbAndTable . getTable ( ) , logicalDbAndTable . getDb ( ) , logicalDbAndTable . getTable ( ) } ) ; resolvedValueList . add ( resolvedValue ) ; } StringBuilder listToStringWithQuotes = new StringBuilder ( ) ; for ( String resolvedValueStr : resolvedValueList ) { if ( listToStringWithQuotes . length ( ) > 0 ) { listToStringWithQuotes . append ( "," ) ; } listToStringWithQuotes . append ( "\"" ) . append ( resolvedValueStr ) . append ( "\"" ) ; } resolvedProperties . setProperty ( entry . getKey ( ) , listToStringWithQuotes . toString ( ) ) ; } else { String resolvedValue = StringUtils . replaceEach ( resolvedConfig . getString ( entry . getKey ( ) ) , new String [ ] { DATABASE_TOKEN , TABLE_TOKEN , LOGICAL_DB_TOKEN , LOGICAL_TABLE_TOKEN } , new String [ ] { realDbAndTable . getDb ( ) , realDbAndTable . getTable ( ) , logicalDbAndTable . getDb ( ) , logicalDbAndTable . getTable ( ) } ) ; resolvedProperties . setProperty ( entry . getKey ( ) , resolvedValue ) ; } } return ConfigUtils . propertiesToConfig ( resolvedProperties ) ;
public class Player { /** * Plays a script . * @ param file The script file name * @ param testCases The test case to run , use null to run all * @ param className The class name of the application under test , null to not launch - notice that the application is restarted for each TC * @ param args The arguments to pass to the class above * @ param properties * @ return The created player * @ throws IOException The exception */ public Player play ( File file , String [ ] testCases , String className , String [ ] args , Map < String , Object > properties ) throws IOException { } }
classToRun = className ; classToRunArgs = args ; play ( file , testCases , properties ) ; return this ;
public class AnalyzeDataSourceRiskDetails { /** * < code > * . google . privacy . dlp . v2 . AnalyzeDataSourceRiskDetails . DeltaPresenceEstimationResult delta _ presence _ estimation _ result = 9; * < / code > */ public com . google . privacy . dlp . v2 . AnalyzeDataSourceRiskDetails . DeltaPresenceEstimationResultOrBuilder getDeltaPresenceEstimationResultOrBuilder ( ) { } }
if ( resultCase_ == 9 ) { return ( com . google . privacy . dlp . v2 . AnalyzeDataSourceRiskDetails . DeltaPresenceEstimationResult ) result_ ; } return com . google . privacy . dlp . v2 . AnalyzeDataSourceRiskDetails . DeltaPresenceEstimationResult . getDefaultInstance ( ) ;
public class Dialog { /** * Set the RippleEffect of negative action button . * @ param resId The resourceId of style . * @ return The Dialog for chaining methods . */ public Dialog negativeActionRipple ( int resId ) { } }
RippleDrawable drawable = new RippleDrawable . Builder ( getContext ( ) , resId ) . build ( ) ; return negativeActionBackground ( drawable ) ;
public class TvdbParser { /** * Get a list of updates from the URL * @ param urlString * @ param seriesId * @ return * @ throws com . omertron . thetvdbapi . TvDbException */ public static TVDBUpdates getUpdates ( String urlString , int seriesId ) throws TvDbException { } }
TVDBUpdates updates = new TVDBUpdates ( ) ; Document doc = DOMHelper . getEventDocFromUrl ( urlString ) ; if ( doc != null ) { Node root = doc . getChildNodes ( ) . item ( 0 ) ; List < SeriesUpdate > seriesUpdates = new ArrayList < > ( ) ; List < EpisodeUpdate > episodeUpdates = new ArrayList < > ( ) ; List < BannerUpdate > bannerUpdates = new ArrayList < > ( ) ; NodeList updateNodes = root . getChildNodes ( ) ; Node updateNode ; for ( int i = 0 ; i < updateNodes . getLength ( ) ; i ++ ) { updateNode = updateNodes . item ( i ) ; switch ( updateNode . getNodeName ( ) ) { case SERIES : SeriesUpdate su = parseNextSeriesUpdate ( ( Element ) updateNode ) ; if ( isValidUpdate ( seriesId , su ) ) { seriesUpdates . add ( su ) ; } break ; case EPISODE : EpisodeUpdate eu = parseNextEpisodeUpdate ( ( Element ) updateNode ) ; if ( isValidUpdate ( seriesId , eu ) ) { episodeUpdates . add ( eu ) ; } break ; case BANNER : BannerUpdate bu = parseNextBannerUpdate ( ( Element ) updateNode ) ; if ( isValidUpdate ( seriesId , bu ) ) { bannerUpdates . add ( bu ) ; } break ; default : LOG . warn ( "Unknown update type '{}'" , updateNode . getNodeName ( ) ) ; } } updates . setTime ( DOMHelper . getValueFromElement ( ( Element ) root , TIME ) ) ; updates . setSeriesUpdates ( seriesUpdates ) ; updates . setEpisodeUpdates ( episodeUpdates ) ; updates . setBannerUpdates ( bannerUpdates ) ; } return updates ;
public class ObjectGraphDump { /** * Visits all the fields in the given ComponentModel . * @ param node the ObjectGraphNode containing the ComponentModel . */ private void visitComponentModel ( final ObjectGraphNode node ) { } }
ComponentModel model = ( ComponentModel ) node . getValue ( ) ; ComponentModel sharedModel = null ; List < Field > fieldList = ReflectionUtil . getAllFields ( node . getValue ( ) , true , false ) ; Field [ ] fields = fieldList . toArray ( new Field [ fieldList . size ( ) ] ) ; for ( int i = 0 ; i < fields . length ; i ++ ) { if ( ComponentModel . class . equals ( fields [ i ] . getDeclaringClass ( ) ) && "sharedModel" . equals ( fields [ i ] . getName ( ) ) ) { sharedModel = ( ComponentModel ) readField ( fields [ i ] , model ) ; } } visitComplexTypeWithDiff ( node , sharedModel ) ;
public class DefaultVFBondMatcher { /** * { @ inheritDoc } * @ param targetConatiner target container * @ param targetBond target bond * @ return true if bonds match */ @ Override public boolean matches ( TargetProperties targetConatiner , IBond targetBond ) { } }
if ( this . smartQueryBond != null ) { return smartQueryBond . matches ( targetBond ) ; } else { if ( ! isBondMatchFlag ( ) ) { return true ; } if ( isBondMatchFlag ( ) && isBondTypeMatch ( targetBond ) ) { return true ; } if ( isBondMatchFlag ( ) && this . unsaturation == getUnsaturation ( targetConatiner , targetBond ) ) { return true ; } } return false ;
public class DelegateClassLoader { /** * { @ inheritDoc } */ @ Override public Enumeration < URL > getResources ( final String name ) throws IOException { } }
final ArrayList < Enumeration < URL > > foundResources = new ArrayList < Enumeration < URL > > ( ) ; foundResources . add ( delegate . getResources ( name ) ) ; if ( parent != null ) { foundResources . add ( parent . getResources ( name ) ) ; } return new Enumeration < URL > ( ) { private int position = foundResources . size ( ) - 1 ; public boolean hasMoreElements ( ) { while ( position >= 0 ) { if ( foundResources . get ( position ) . hasMoreElements ( ) ) { return true ; } position -- ; } return false ; } public URL nextElement ( ) { while ( position >= 0 ) { try { return ( foundResources . get ( position ) ) . nextElement ( ) ; } catch ( NoSuchElementException e ) { } position -- ; } throw new NoSuchElementException ( ) ; } } ;
public class InvocationDirector { /** * Removes a receiver registration . */ public void unregisterReceiver ( String receiverCode ) { } }
// remove the receiver from the list for ( Iterator < InvocationDecoder > iter = _reclist . iterator ( ) ; iter . hasNext ( ) ; ) { InvocationDecoder decoder = iter . next ( ) ; if ( decoder . getReceiverCode ( ) . equals ( receiverCode ) ) { iter . remove ( ) ; } } // if we ' re logged on , clear out any receiver id mapping if ( _clobj != null ) { Registration rreg = _clobj . receivers . get ( receiverCode ) ; if ( rreg == null ) { log . warning ( "Receiver unregistered for which we have no id to code mapping" , "code" , receiverCode ) ; } else { _receivers . remove ( rreg . receiverId ) ; // Log . info ( " Cleared receiver " + StringUtil . shortClassName ( decoder ) + // " " + rreg + " . " ) ; } _clobj . removeFromReceivers ( receiverCode ) ; }
public class LiveOutputsInner { /** * Delete Live Output . * Deletes a Live Output . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ param liveEventName The name of the Live Event . * @ param liveOutputName The name of the Live Output . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ApiErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete ( String resourceGroupName , String accountName , String liveEventName , String liveOutputName ) { } }
beginDeleteWithServiceResponseAsync ( resourceGroupName , accountName , liveEventName , liveOutputName ) . toBlocking ( ) . single ( ) . body ( ) ;