signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SubstructureIdentifier { /** * Supplements the reduced structure with ligands from the full structure based on * a distance cutoff . Ligand groups are moved ( destructively ) from full to reduced * if they fall within the cutoff of any atom in the reduced structure . * @ param full Structure containing all ligands * @ param reduced Structure with a subset of the polymer groups from full * @ param cutoff Distance cutoff ( Å ) * @ param fromModel source model in full * @ param toModel destination model in reduced * @ see StructureTools # getLigandsByProximity ( java . util . Collection , Atom [ ] , double ) */ protected static void copyLigandsByProximity ( Structure full , Structure reduced , double cutoff , int fromModel , int toModel ) { } }
// Geometric hashing of the reduced structure Grid grid = new Grid ( cutoff ) ; Atom [ ] nonwaters = StructureTools . getAllNonHAtomArray ( reduced , true , toModel ) ; if ( nonwaters . length < 1 ) return ; grid . addAtoms ( nonwaters ) ; full . getNonPolyChains ( fromModel ) . stream ( ) // potential ligand chains . flatMap ( ( chain ) -> chain . getAtomGroups ( ) . stream ( ) ) // potential ligand groups . filter ( ( g ) -> ! g . isWater ( ) ) // ignore waters . filter ( ( g ) -> ! g . isPolymeric ( ) ) // already shouldn ' t be polymeric , but filter anyways . filter ( ( g ) -> grid . hasAnyContact ( Calc . atomsToPoints ( g . getAtoms ( ) ) ) ) // must contact reduced . sequential ( ) // Keeps ligands from the same chain together if possible . reduce ( ( Chain ) null , // reduction updates the chain guess ( guess , g ) -> { boolean wasAdded ; try { // Check that it ' s not in reduced already wasAdded = reduced . findGroup ( g . getChainId ( ) , g . getResidueNumber ( ) . toString ( ) , toModel ) != null ; } catch ( StructureException e ) { // not found wasAdded = false ; } if ( ! wasAdded ) { // Add the ligand to reduced // note this is not idempotent , but it is synchronized on reduced logger . info ( "Adding ligand group {} {} by proximity" , g . getPDBName ( ) , g . getResidueNumber ( ) . toPDB ( ) ) ; return StructureTools . addGroupToStructure ( reduced , g , toModel , guess , false ) ; } return guess ; } , // update to the new guess ( oldGuess , newGuess ) -> newGuess ) ;
public class ArchetypeUtils { /** * Returns true if this file is a valid source file ; so * excluding things like . svn directories and whatnot */ public boolean isValidSourceFileOrDir ( File file ) { } }
String name = file . getName ( ) ; return ! isExcludedDotFile ( name ) && ! excludeExtensions . contains ( Files . getExtension ( file . getName ( ) ) ) ;
public class DbUtil { /** * Gets the < code > String < / code > denoting the specified SQL data type . * @ param type The data type to get the name of . Valid type values * consist of the static fields of { @ link java . sql . Types } . * @ param length The length to assign to data types for those types * that require a length ( e . g . , < code > VARCHAR ( n ) < / code > ) , or zero * to indicate that no length is required . * @ param con The < code > Connection < / code > for which to get the type name . * @ return The name of the type , or < code > null < / code > if no such type * exists . * @ throws SQLException If an error occurs while communicating with the * database . * @ see java . sql . Types */ public static String getTypeName ( int type , int length , Connection con ) throws SQLException { } }
return getTypeName ( type , length , con . getMetaData ( ) ) ;
public class FastaFormat { /** * method to generate for the whole HELM2Notation fasta - files , it contains * fasta for all rna and peptides * @ param helm2Notation2 * HELM2Notation * @ return FASTA - File - Format * @ throws FastaFormatException * if the HELM2Notation can not be transformed to FASTA * @ throws ChemistryException * if the Chemistry Engine can not be initialized */ public static String generateFasta ( HELM2Notation helm2Notation2 ) throws FastaFormatException , ChemistryException { } }
List < PolymerNotation > polymersPeptides = new ArrayList < PolymerNotation > ( ) ; List < PolymerNotation > polymerNucleotides = new ArrayList < PolymerNotation > ( ) ; StringBuilder fasta = new StringBuilder ( ) ; for ( PolymerNotation polymer : helm2Notation2 . getListOfPolymers ( ) ) { if ( polymer . getPolymerID ( ) instanceof RNAEntity ) { polymerNucleotides . add ( polymer ) ; } if ( polymer . getPolymerID ( ) instanceof PeptideEntity ) { polymersPeptides . add ( polymer ) ; } } fasta . append ( generateFastaFromPeptidePolymer ( polymersPeptides ) ) ; fasta . append ( generateFastaFromRNAPolymer ( polymerNucleotides ) ) ; return fasta . toString ( ) ;
public class ContentsPhrase { /** * Public API . */ public Formula getFormula ( ) { } }
Reagent [ ] reagents = new Reagent [ ] { ResourceHelper . CONTEXT_TARGET , ResourceHelper . LOCATION_PHRASE } ; return new SimpleFormula ( ContentsPhrase . class , reagents ) ;
public class ControlParametersImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case BpsimPackage . CONTROL_PARAMETERS__PROBABILITY : setProbability ( ( Parameter ) null ) ; return ; case BpsimPackage . CONTROL_PARAMETERS__CONDITION : setCondition ( ( Parameter ) null ) ; return ; case BpsimPackage . CONTROL_PARAMETERS__INTER_TRIGGER_TIMER : setInterTriggerTimer ( ( Parameter ) null ) ; return ; case BpsimPackage . CONTROL_PARAMETERS__TRIGGER_COUNT : setTriggerCount ( ( Parameter ) null ) ; return ; } super . eUnset ( featureID ) ;
public class FilteredJobLifecycleListener { /** * { @ inheritDoc } */ @ Override public void onUpdateJob ( JobSpec updatedJob ) { } }
if ( this . filter . apply ( updatedJob ) ) { this . delegate . onUpdateJob ( updatedJob ) ; }
public class DaylightModel { /** * Is the element specified by the atomic number , allowed to be aromatic by * the daylight specification . Allowed elements are C , N , O , P , S , As , Se * and * . This model allows all except for the unknown ( ' * ' ) element . * @ param element atomic number of element * @ return the element can be aromatic */ private static boolean aromaticElement ( int element ) { } }
switch ( element ) { case CARBON : case NITROGEN : case OXYGEN : case PHOSPHORUS : case SULPHUR : case ARSENIC : case SELENIUM : return true ; } return false ;
public class UniversalIdIntQueueMessage { /** * Create a new { @ link UniversalIdIntQueueMessage } object with specified * content . * @ param content * @ return * @ since 0.6.0 */ public static UniversalIdIntQueueMessage newInstance ( byte [ ] content ) { } }
UniversalIdIntQueueMessage msg = newInstance ( ) ; msg . setContent ( content ) ; return msg ;
public class StreamingJsonBuilder { /** * Named arguments can be passed to the JSON builder instance to create a root JSON object * Example : * < pre class = " groovyTestCase " > * new StringWriter ( ) . with { w - > * def json = new groovy . json . StreamingJsonBuilder ( w ) * json name : " Tim " , age : 31 * assert w . toString ( ) = = ' { " name " : " Tim " , " age " : 31 } ' * < / pre > * @ param m a map of key / value pairs * @ return a map of key / value pairs */ public Object call ( Map m ) throws IOException { } }
writer . write ( JsonOutput . toJson ( m ) ) ; return m ;
public class ResponsiveDisplayAd { /** * Gets the squareMarketingImage value for this ResponsiveDisplayAd . * @ return squareMarketingImage * Square marketing image to be used in the ad . This image may * be used when a square aspect ratio * is more appropriate than the aspect ratio of the * { @ link # marketingImage } image . This ad format * does not allow the creation of an image using * the Image . data field . An image must first be * created using the MediaService , and Image . mediaId * must be populated when creating a { @ link * " ResponsiveDisplayAd " } . Valid image types are * GIF , JPEG , and PNG . The minimum size is 300x300 * the aspect ratio must be 1:1 ( + - 1 % ) . * < span class = " constraint Selectable " > This field * can be selected using the value " SquareMarketingImage " . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . Image getSquareMarketingImage ( ) { } }
return squareMarketingImage ;
public class DPathUtils { /** * Extract a value from the target object using DPath expression ( generic * version ) . * @ param target * @ param dPath * @ param clazz * @ return */ public static < T > T getValue ( Object target , String dPath , Class < T > clazz ) { } }
if ( clazz == null ) { throw new NullPointerException ( "Class parameter is null!" ) ; } Object temp = getValue ( target , dPath ) ; return ValueUtils . convertValue ( temp , clazz ) ;
public class DateTimeConverter { /** * METHODS */ public Object getAsObject ( FacesContext facesContext , UIComponent uiComponent , String value ) { } }
if ( facesContext == null ) { throw new NullPointerException ( "facesContext" ) ; } if ( uiComponent == null ) { throw new NullPointerException ( "uiComponent" ) ; } if ( value != null ) { value = value . trim ( ) ; if ( value . length ( ) > 0 ) { DateFormat format = getDateFormat ( ) ; TimeZone tz = getTimeZone ( ) ; if ( tz != null ) { format . setTimeZone ( tz ) ; } try { return format . parse ( value ) ; } catch ( ParseException e ) { String type = getType ( ) ; Object [ ] args = new Object [ ] { value , format . format ( new Date ( ) ) , _MessageUtils . getLabel ( facesContext , uiComponent ) } ; if ( type . equals ( TYPE_DATE ) ) { throw new ConverterException ( _MessageUtils . getErrorMessage ( facesContext , DATE_ID , args ) ) ; } else if ( type . equals ( TYPE_TIME ) ) { throw new ConverterException ( _MessageUtils . getErrorMessage ( facesContext , TIME_ID , args ) ) ; } else if ( type . equals ( TYPE_BOTH ) ) { throw new ConverterException ( _MessageUtils . getErrorMessage ( facesContext , DATETIME_ID , args ) ) ; } else { throw new ConverterException ( "invalid type '" + _type + "'" ) ; } } } } return null ;
public class TimePickerFragment { @ SuppressWarnings ( "unchecked" ) protected void notifyInstantSelected ( ) { } }
if ( getActivity ( ) instanceof InstantPickerListener ) { ( ( InstantPickerListener < TimeInstantT > ) getActivity ( ) ) . onInstantSelected ( getPickerId ( ) , getSelectedInstant ( ) ) ; } if ( getParentFragment ( ) instanceof InstantPickerListener ) { ( ( InstantPickerListener < TimeInstantT > ) getParentFragment ( ) ) . onInstantSelected ( getPickerId ( ) , getSelectedInstant ( ) ) ; } if ( getTargetFragment ( ) instanceof InstantPickerListener ) { ( ( InstantPickerListener < TimeInstantT > ) getTargetFragment ( ) ) . onInstantSelected ( getPickerId ( ) , getSelectedInstant ( ) ) ; }
public class DefaultGroovyMethods { /** * Returns a new < code > Map < / code > containing all entries from < code > left < / code > and < code > right < / code > , * giving precedence to < code > right < / code > . Any keys appearing in both Maps * will appear in the resultant map with values from the < code > right < / code > * operand . If the < code > left < / code > map is one of TreeMap , LinkedHashMap , Hashtable * or Properties , the returned Map will preserve that type , otherwise a HashMap will * be returned . * Roughly equivalent to < code > Map m = new HashMap ( ) ; m . putAll ( left ) ; m . putAll ( right ) ; return m ; < / code > * but with some additional logic to preserve the < code > left < / code > Map type for common cases as * described above . * < pre class = " groovyTestCase " > * assert [ a : 10 , b : 20 ] + [ a : 5 , c : 7 ] = = [ a : 5 , b : 20 , c : 7] * < / pre > * @ param left a Map * @ param right a Map * @ return a new Map containing all entries from left and right * @ since 1.5.0 */ public static < K , V > Map < K , V > plus ( Map < K , V > left , Map < K , V > right ) { } }
Map < K , V > map = cloneSimilarMap ( left ) ; map . putAll ( right ) ; return map ;
public class U { /** * Documented , # join */ public static < T > String join ( final Iterable < T > iterable , final String separator ) { } }
final StringBuilder sb = new StringBuilder ( ) ; int index = 0 ; for ( final T item : iterable ) { if ( index > 0 ) { sb . append ( separator ) ; } sb . append ( item . toString ( ) ) ; index += 1 ; } return sb . toString ( ) ;
public class Key { /** * Make a Key which is homed to specific nodes . */ static public Key make ( byte [ ] kb , byte rf , byte systemType , H2ONode ... replicas ) { } }
// no more than 3 replicas allowed to be stored in the key assert 0 <= replicas . length && replicas . length <= 3 ; assert systemType < 32 ; // only system keys allowed // Key byte layout is : // 0 - systemType , from 0-31 // 1 - replica - count , plus up to 3 bits for ip4 vs ip6 // 2 - n - zero , one , two or 3 IP4 ( 4 + 2 bytes ) or IP6 ( 16 + 2 bytes ) addresses // 2-5 - 4 bytes of chunk # , or - 1 for masters // n + - repeat of the original kb AutoBuffer ab = new AutoBuffer ( ) ; ab . put1 ( systemType ) . put1 ( replicas . length ) ; for ( H2ONode h2o : replicas ) h2o . write ( ab ) ; ab . put4 ( - 1 ) ; ab . putA1 ( kb , kb . length ) ; return make ( Arrays . copyOf ( ab . buf ( ) , ab . position ( ) ) , rf ) ;
public class AbstractAggregatingDefaultQueryPersonAttributeDao { /** * Merges the results of calling { @ link IPersonAttributeDao # getPossibleUserAttributeNames ( IPersonAttributeDaoFilter ) } on each child dao using * the configured { @ link IAttributeMerger # mergePossibleUserAttributeNames ( Set , Set ) } . If all children return null * this method returns null as well . If any child does not return null this method will not return null . * @ see IPersonAttributeDao # getPossibleUserAttributeNames ( IPersonAttributeDaoFilter ) */ @ Override @ JsonIgnore public final Set < String > getPossibleUserAttributeNames ( final IPersonAttributeDaoFilter filter ) { } }
Set < String > attrNames = null ; for ( final IPersonAttributeDao currentDao : this . personAttributeDaos ) { if ( filter != null && ! filter . choosePersonAttributeDao ( currentDao ) ) { continue ; } boolean handledException = false ; Set < String > currentDaoAttrNames = null ; try { currentDaoAttrNames = currentDao . getPossibleUserAttributeNames ( filter ) ; if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Retrieved possible attribute names '" + currentDaoAttrNames + "' from '" + currentDao + "'" ) ; } } catch ( final RuntimeException rte ) { handledException |= handleRuntimeException ( currentDao , rte ) ; } if ( currentDaoAttrNames != null ) { if ( attrNames == null ) { attrNames = new LinkedHashSet < > ( ) ; } attrNames = this . attrMerger . mergePossibleUserAttributeNames ( attrNames , currentDaoAttrNames ) ; } if ( this . stopOnSuccess && ! handledException ) { if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Successfully retrieved possible user attributes from a child DAO and stopOnSuccess is true, stopping iteration of child DAOs" ) ; } break ; } } if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Aggregated possible attribute names '" + attrNames + "'" ) ; } if ( attrNames == null ) { return null ; } return Collections . unmodifiableSet ( attrNames ) ;
public class ProxyBuilder { /** * This one is tricky to fix , I gave up . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) private static void generateCodeForReturnStatement ( Code code , Class methodReturnType , Local localForResultOfInvoke , Local localOfMethodReturnType , Local aBoxedResult ) { if ( PRIMITIVE_TO_UNBOX_METHOD . containsKey ( methodReturnType ) ) { code . cast ( aBoxedResult , localForResultOfInvoke ) ; MethodId unboxingMethodFor = getUnboxMethodForPrimitive ( methodReturnType ) ; code . invokeVirtual ( unboxingMethodFor , localOfMethodReturnType , aBoxedResult ) ; code . returnValue ( localOfMethodReturnType ) ; } else if ( void . class . equals ( methodReturnType ) ) { code . returnVoid ( ) ; } else { code . cast ( localOfMethodReturnType , localForResultOfInvoke ) ; code . returnValue ( localOfMethodReturnType ) ; }
public class BaseField { /** * Constructor . * @ param record The parent record . * @ param strName The field name . * @ param iDataLength The maximum string length ( pass - 1 for default ) . * @ param strDesc The string description ( usually pass null , to use the resource file desc ) . * @ param strDefault The default value ( if object , this value is the default value , if string , the string is the default ) . */ public void init ( Record record , String strName , int iDataLength , String strDesc , Object strDefault ) { } }
m_bJustChanged = false ; m_DBObject = null ; m_listener = null ; m_bVirtual = false ; m_bSelected = true ; m_bNullable = true ; super . init ( record , strName , iDataLength , strDesc , strDefault ) ; this . setModified ( false ) ; // No modifications to start with
public class ResourceManager { /** * Registers a new JobMaster . * @ param jobMasterGateway to communicate with the registering JobMaster * @ param jobId of the job for which the JobMaster is responsible * @ param jobManagerAddress address of the JobMaster * @ param jobManagerResourceId ResourceID of the JobMaster * @ return RegistrationResponse */ private RegistrationResponse registerJobMasterInternal ( final JobMasterGateway jobMasterGateway , JobID jobId , String jobManagerAddress , ResourceID jobManagerResourceId ) { } }
if ( jobManagerRegistrations . containsKey ( jobId ) ) { JobManagerRegistration oldJobManagerRegistration = jobManagerRegistrations . get ( jobId ) ; if ( Objects . equals ( oldJobManagerRegistration . getJobMasterId ( ) , jobMasterGateway . getFencingToken ( ) ) ) { // same registration log . debug ( "Job manager {}@{} was already registered." , jobMasterGateway . getFencingToken ( ) , jobManagerAddress ) ; } else { // tell old job manager that he is no longer the job leader disconnectJobManager ( oldJobManagerRegistration . getJobID ( ) , new Exception ( "New job leader for job " + jobId + " found." ) ) ; JobManagerRegistration jobManagerRegistration = new JobManagerRegistration ( jobId , jobManagerResourceId , jobMasterGateway ) ; jobManagerRegistrations . put ( jobId , jobManagerRegistration ) ; jmResourceIdRegistrations . put ( jobManagerResourceId , jobManagerRegistration ) ; } } else { // new registration for the job JobManagerRegistration jobManagerRegistration = new JobManagerRegistration ( jobId , jobManagerResourceId , jobMasterGateway ) ; jobManagerRegistrations . put ( jobId , jobManagerRegistration ) ; jmResourceIdRegistrations . put ( jobManagerResourceId , jobManagerRegistration ) ; } log . info ( "Registered job manager {}@{} for job {}." , jobMasterGateway . getFencingToken ( ) , jobManagerAddress , jobId ) ; jobManagerHeartbeatManager . monitorTarget ( jobManagerResourceId , new HeartbeatTarget < Void > ( ) { @ Override public void receiveHeartbeat ( ResourceID resourceID , Void payload ) { // the ResourceManager will always send heartbeat requests to the JobManager } @ Override public void requestHeartbeat ( ResourceID resourceID , Void payload ) { jobMasterGateway . heartbeatFromResourceManager ( resourceID ) ; } } ) ; return new JobMasterRegistrationSuccess ( getFencingToken ( ) , resourceId ) ;
public class ServerConfiguration { /** * Gets the value for the given key as an enum value . * @ param key the key to get the value for * @ param enumType the type of the enum * @ param < T > the type of the enum * @ return the value for the given key as an enum value */ public static < T extends Enum < T > > T getEnum ( PropertyKey key , Class < T > enumType ) { } }
return sConf . getEnum ( key , enumType ) ;
public class DFSFolder { /** * Upload the given file or directory into this DfsFolder * @ param file * @ throws IOException */ public void upload ( IProgressMonitor monitor , final File file ) throws IOException { } }
if ( file . isDirectory ( ) ) { Path filePath = new Path ( this . path , file . getName ( ) ) ; getDFS ( ) . mkdirs ( filePath ) ; DFSFolder newFolder = new DFSFolder ( this , filePath ) ; monitor . worked ( 1 ) ; for ( File child : file . listFiles ( ) ) { if ( monitor . isCanceled ( ) ) return ; newFolder . upload ( monitor , child ) ; } } else if ( file . isFile ( ) ) { Path filePath = new Path ( this . path , file . getName ( ) ) ; DFSFile newFile = new DFSFile ( this , filePath , file , monitor ) ; } else { // XXX don ' t know what the file is ? }
public class TextDifferenceListenerBase { /** * Delegates to the nested DifferenceListener unless the * Difference is of type { @ link DifferenceConstants # ATTR _ VALUE _ ID * ATTR _ VALUE _ ID } , { @ link DifferenceConstants # CDATA _ VALUE _ ID * CDATA _ VALUE _ ID } , { @ link DifferenceConstants # COMMENT _ VALUE _ ID * COMMENT _ VALUE _ ID } or { @ link DifferenceConstants # TEXT _ VALUE _ ID * TEXT _ VALUE _ ID } - for those special differences { @ link * # attributeDifference attributeDifference } , { @ link * # cdataDifference cdataDifference } , { @ link # commentDifference * commentDifference } or { @ link # textDifference textDifference } * are invoked respectively . */ public int differenceFound ( Difference difference ) { } }
switch ( difference . getId ( ) ) { case DifferenceConstants . ATTR_VALUE_ID : return attributeDifference ( difference ) ; case DifferenceConstants . CDATA_VALUE_ID : return cdataDifference ( difference ) ; case DifferenceConstants . COMMENT_VALUE_ID : return commentDifference ( difference ) ; case DifferenceConstants . TEXT_VALUE_ID : return textDifference ( difference ) ; } return delegateTo . differenceFound ( difference ) ;
public class ConfigDescriptorFactory { /** * Build a { @ link ConfigDescriptor } for a specific Method , and given optional scope . * @ param method method to include in config descriptor * @ param scopeOpt optional scope for the config descriptor * @ return a { @ link ConfigDescriptor } for the given method , to be used internally in the config system . */ public ConfigDescriptor buildDescriptor ( Method method , Optional < String > scopeOpt ) { } }
return buildDescriptor ( method , scopeOpt , Optional . empty ( ) ) ;
public class ServerVulnerabilityAssessmentsInner { /** * Creates or updates the server ' s vulnerability assessment . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server for which the vulnerability assessment is defined . * @ param parameters The requested resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ServerVulnerabilityAssessmentInner object */ public Observable < ServerVulnerabilityAssessmentInner > createOrUpdateAsync ( String resourceGroupName , String serverName , ServerVulnerabilityAssessmentInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . map ( new Func1 < ServiceResponse < ServerVulnerabilityAssessmentInner > , ServerVulnerabilityAssessmentInner > ( ) { @ Override public ServerVulnerabilityAssessmentInner call ( ServiceResponse < ServerVulnerabilityAssessmentInner > response ) { return response . body ( ) ; } } ) ;
public class DataWriterUtil { /** * The function to convert time without a date ( hour , minute , second ) from the sas7bdat file format * ( which is the number of seconds elapsed from the midnight ) into a string of the format set by the constants : * { @ link DataWriterUtil # HOURS _ OUTPUT _ FORMAT } , { @ link DataWriterUtil # MINUTES _ OUTPUT _ FORMAT } , * { @ link DataWriterUtil # SECONDS _ OUTPUT _ FORMAT } , and { @ link DataWriterUtil # TIME _ DELIMETER } . * @ param secondsFromMidnight the number of seconds elapsed from the midnight . * @ return the string of time in the format set by constants . */ private static String convertTimeElementToString ( Long secondsFromMidnight ) { } }
return String . format ( HOURS_OUTPUT_FORMAT , secondsFromMidnight / SECONDS_IN_MINUTE / MINUTES_IN_HOUR ) + TIME_DELIMETER + String . format ( MINUTES_OUTPUT_FORMAT , secondsFromMidnight / SECONDS_IN_MINUTE % MINUTES_IN_HOUR ) + TIME_DELIMETER + String . format ( SECONDS_OUTPUT_FORMAT , secondsFromMidnight % SECONDS_IN_MINUTE ) ;
public class AsyncDownloader { /** * Aborts a transfer at the given slot * @ param slot */ public void abort ( int slot ) { } }
try { HttpGet httpGet = httpGets . get ( slot ) ; httpGet . abort ( ) ; abortListeners ( httpGet . getURI ( ) . toString ( ) ) ; } catch ( Exception e ) { log . error ( e . getMessage ( ) ) ; }
public class CertificatesInner { /** * Get all certificates in a resource group . * Get all certificates in a resource group . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; CertificateInner & gt ; object */ public Observable < Page < CertificateInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } }
return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < CertificateInner > > , Page < CertificateInner > > ( ) { @ Override public Page < CertificateInner > call ( ServiceResponse < Page < CertificateInner > > response ) { return response . body ( ) ; } } ) ;
public class CmsAttributeHandler { /** * Returns the attribute choice name for the given index . < p > * @ param valueIndex the value index * @ return the attribute choice name */ private String getChoiceName ( int valueIndex ) { } }
if ( isChoiceHandler ( ) ) { CmsEntity choice = m_entity . getAttribute ( CmsType . CHOICE_ATTRIBUTE_NAME ) . getComplexValues ( ) . get ( valueIndex ) ; if ( choice != null ) { for ( String option : getAttributeType ( ) . getAttributeNames ( ) ) { if ( choice . hasAttribute ( option ) ) { return option ; } } } } return null ;
public class WebPage { /** * Gets the index of this page in the parents list of children pages . */ final public int getPageIndexInParent ( WebSiteRequest req ) throws IOException , SQLException { } }
WebPage [ ] myPages = getParent ( ) . getCachedPages ( req ) ; int len = myPages . length ; for ( int c = 0 ; c < len ; c ++ ) if ( myPages [ c ] . equals ( this ) ) return c ; throw new RuntimeException ( "Unable to find page index in parent." ) ;
public class BigDecimal { /** * Multiplies two long values and rounds according { @ code MathContext } */ private static BigDecimal multiplyAndRound ( long x , long y , int scale , MathContext mc ) { } }
long product = multiply ( x , y ) ; if ( product != INFLATED ) { return doRound ( product , scale , mc ) ; } // attempt to do it in 128 bits int rsign = 1 ; if ( x < 0 ) { x = - x ; rsign = - 1 ; } if ( y < 0 ) { y = - y ; rsign *= - 1 ; } // multiply dividend0 * dividend1 long m0_hi = x >>> 32 ; long m0_lo = x & LONG_MASK ; long m1_hi = y >>> 32 ; long m1_lo = y & LONG_MASK ; product = m0_lo * m1_lo ; long m0 = product & LONG_MASK ; long m1 = product >>> 32 ; product = m0_hi * m1_lo + m1 ; m1 = product & LONG_MASK ; long m2 = product >>> 32 ; product = m0_lo * m1_hi + m1 ; m1 = product & LONG_MASK ; m2 += product >>> 32 ; long m3 = m2 >>> 32 ; m2 &= LONG_MASK ; product = m0_hi * m1_hi + m2 ; m2 = product & LONG_MASK ; m3 = ( ( product >>> 32 ) + m3 ) & LONG_MASK ; final long mHi = make64 ( m3 , m2 ) ; final long mLo = make64 ( m1 , m0 ) ; BigDecimal res = doRound128 ( mHi , mLo , rsign , scale , mc ) ; if ( res != null ) { return res ; } res = new BigDecimal ( BigInteger . valueOf ( x ) . multiply ( y * rsign ) , INFLATED , scale , 0 ) ; return doRound ( res , mc ) ;
public class TcpIpServer { /** * Start the TcpIpServer * @ throws IOException */ public final synchronized void start ( ) throws IOException { } }
if ( this . acceptor != null ) { if ( this . acceptor . getHandler ( ) == null ) { this . acceptor . setHandler ( new TcpIpHandlerAdapter ( ) ) ; } LaunchingMessageKind . ITCPIP0001 . format ( this . host , this . port ) ; this . acceptor . bind ( new InetSocketAddress ( this . host , this . port ) ) ; }
public class DataUnitBuilder { /** * Creates a length - optimized application layer protocol data unit out of a service * code and a service data unit . * The transport layer bits in the first byte ( TL / AL control field ) are set 0 . If the compact * APDU shall not contain any ASDU information , < code > asdu < / code > can be left < code > null < / code > . * @ param service application layer service code * @ param asdu application layer service data unit , < code > asdu . length < / code > & lt ; 255 ; or * < code > null < / code > for no ASDU * @ return APDU as byte array */ public static byte [ ] createLengthOptimizedAPDU ( final int service , final byte ... asdu ) { } }
final byte [ ] apdu = new byte [ ( asdu != null && asdu . length > 0 ) ? 1 + asdu . length : 2 ] ; if ( apdu . length > 255 ) throw new KNXIllegalArgumentException ( "APDU length exceeds maximum of 255 bytes" ) ; apdu [ 0 ] = ( byte ) ( ( service >> 8 ) & 0x03 ) ; apdu [ 1 ] = ( byte ) service ; if ( asdu != null && asdu . length > 0 ) { // maximum of 6 bits in asdu [ 0 ] are valid apdu [ 1 ] |= asdu [ 0 ] & 0x3F ; for ( int i = 1 ; i < asdu . length ; ++ i ) apdu [ i + 1 ] = asdu [ i ] ; } return apdu ;
public class DependencyVersion { /** * Determines if the three most major major version parts are identical . For * instances , if version 1.2.3.4 was compared to 1.2.3 this function would * return true . * @ param version the version number to compare * @ return true if the first three major parts of the version are identical */ public boolean matchesAtLeastThreeLevels ( DependencyVersion version ) { } }
if ( version == null ) { return false ; } if ( Math . abs ( this . versionParts . size ( ) - version . versionParts . size ( ) ) >= 3 ) { return false ; } final int max = ( this . versionParts . size ( ) < version . versionParts . size ( ) ) ? this . versionParts . size ( ) : version . versionParts . size ( ) ; boolean ret = true ; for ( int i = 0 ; i < max ; i ++ ) { final String thisVersion = this . versionParts . get ( i ) ; final String otherVersion = version . getVersionParts ( ) . get ( i ) ; if ( i >= 3 ) { if ( thisVersion . compareToIgnoreCase ( otherVersion ) >= 0 ) { ret = false ; break ; } } else if ( ! thisVersion . equals ( otherVersion ) ) { ret = false ; break ; } } return ret ;
public class MetadataUtils { /** * Returns mapped relational name , in case of bi directional mapping , it * will return back pKey name of associated entity . * @ param relation * holding relation . * @ return mapped / join column name . */ public static String getMappedName ( EntityMetadata parentMetadata , Relation relation , final KunderaMetadata kunderaMetadata ) { } }
if ( relation != null ) { String joinColumn = relation . getJoinColumnName ( kunderaMetadata ) ; if ( joinColumn == null ) { Class clazz = relation . getTargetEntity ( ) ; EntityMetadata metadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , clazz ) ; joinColumn = relation . getType ( ) . equals ( ForeignKey . ONE_TO_MANY ) ? ( ( AbstractAttribute ) parentMetadata . getIdAttribute ( ) ) . getJPAColumnName ( ) : ( ( AbstractAttribute ) metadata . getIdAttribute ( ) ) . getJPAColumnName ( ) ; } return joinColumn ; } return null ;
public class ColorUtils { /** * The less function " luminance " . * @ param color * a color value as long * @ return a value in the range from 0.0 to 1.0 */ static double luminance ( double color ) { } }
long argb = Double . doubleToRawLongBits ( color ) ; double r = ( ( argb >> 32 ) & 0xFFFF ) / ( double ) 0xFF00 ; double g = ( ( argb >> 16 ) & 0xFFFF ) / ( double ) 0xFF00 ; double b = ( ( argb ) & 0xFFFF ) / ( double ) 0xFF00 ; return ( 0.2126 * r ) + ( 0.7152 * g ) + ( 0.0722 * b ) ;
public class Block { /** * Removes trailing empty lines . */ public void removeTrailingEmptyLines ( ) { } }
Line aLine = m_aLineTail ; while ( aLine != null && aLine . m_bIsEmpty ) { removeLine ( aLine ) ; aLine = m_aLineTail ; }
public class DifferenceEngine { /** * Character comparison method used by comments , text and CDATA sections * @ param control * @ param test * @ param listener * @ param difference * @ throws DifferenceFoundException */ private void compareCharacterData ( CharacterData control , CharacterData test , DifferenceListener listener , Difference difference ) throws DifferenceFoundException { } }
compare ( control . getData ( ) , test . getData ( ) , control , test , listener , difference ) ;
public class MaterialDataPager { /** * Initialize the data pager for navigation */ @ Override protected void onLoad ( ) { } }
super . onLoad ( ) ; if ( limit == 0 ) { limit = limitOptions [ 0 ] ; } add ( actionsPanel ) ; add ( rowSelection ) ; if ( pageSelection == null ) { pageSelection = new PageNumberBox ( this ) ; } add ( pageSelection ) ; firstPage ( ) ;
public class ArgumentConvertor { /** * check if class and argument are string * @ param cls * @ param arg * @ throws IOException */ void checkStringArgument ( Class cls , String arg ) throws IOException { } }
if ( arg . startsWith ( Constants . QUOTE ) ) { // ca ressemble à une string if ( ! cls . equals ( String . class ) ) { // et on veut pas une string throw new IOException ( ) ; } } else // ca ressemble pas à une string if ( cls . equals ( String . class ) ) { // mais on veut une string throw new IOException ( ) ; }
public class MolecularFormulaManipulator { /** * Compute the most abundant MF . Given the MF C < sub > 6 < / sub > Br < sub > 6 < / sub > * this function rapidly computes the most abundant MF as * < sup > 12 < / sup > C < sub > 6 < / sub > < sup > 79 < / sup > Br < sub > 3 < / sub > < sup > 81 * < / sup > Br < sub > 3 < / sub > . * @ param mf a molecular formula with unspecified isotopes * @ return the most abundant MF , or null if it could not be computed */ public static IMolecularFormula getMostAbundant ( IMolecularFormula mf ) { } }
final Isotopes isofact ; try { isofact = Isotopes . getInstance ( ) ; } catch ( IOException e ) { return null ; } IMolecularFormula res = mf . getBuilder ( ) . newInstance ( IMolecularFormula . class ) ; for ( IIsotope iso : mf . isotopes ( ) ) { int count = mf . getIsotopeCount ( iso ) ; if ( iso . getMassNumber ( ) == null || iso . getMassNumber ( ) == 0 ) { IIsotope [ ] isotopes = isofact . getIsotopes ( iso . getSymbol ( ) ) ; Arrays . sort ( isotopes , NAT_ABUN_COMP ) ; if ( ! addIsotopeDist ( res , isotopes , 0 , count ) ) return null ; } else res . addIsotope ( iso , count ) ; } return res ;
public class LessModuleBuilder { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . core . impl . modulebuilder . css . CSSModuleBuilder # getContentReader ( java . lang . String , com . ibm . jaggr . core . resource . IResource , javax . servlet . http . HttpServletRequest , org . apache . commons . lang3 . mutable . MutableObject ) */ @ Override protected Reader getContentReader ( String mid , IResource resource , HttpServletRequest request , MutableObject < List < ICacheKeyGenerator > > keyGensRef ) throws IOException { } }
final String sourceMethod = "getContentReader" ; // $ NON - NLS - 1 $ final boolean isTraceLogging = log . isLoggable ( Level . FINER ) ; if ( isTraceLogging ) { log . entering ( sourceClass , sourceMethod , new Object [ ] { mid , resource , request , keyGensRef } ) ; } // Call super class implementation and determine if we need to update the cache key generators Reader result = super . getContentReader ( mid , resource , request , keyGensRef ) ; if ( isFeatureDependent ) { List < ICacheKeyGenerator > keyGens = keyGensRef . getValue ( ) ; if ( resource . getPath ( ) . toLowerCase ( ) . endsWith ( LESS_SUFFIX ) ) { Set < String > dependentFeatures = threadLocalDependentFeatures . get ( ) ; if ( keyGens == null ) { keyGens = getCacheKeyGenerators ( getAggregator ( ) ) ; } FeatureSetCacheKeyGenerator fsKeyGen = ( FeatureSetCacheKeyGenerator ) keyGens . get ( 2 ) ; // Cache key generators need to be updated if existing one is provisional or if there are new dependent features if ( fsKeyGen . isProvisional ( ) || ! fsKeyGen . getFeatureSet ( ) . containsAll ( dependentFeatures ) ) { List < ICacheKeyGenerator > newKeyGens = new ArrayList < ICacheKeyGenerator > ( ) ; newKeyGens . add ( keyGens . get ( 0 ) ) ; newKeyGens . add ( keyGens . get ( 1 ) ) ; FeatureSetCacheKeyGenerator newFsKeyGen = new FeatureSetCacheKeyGenerator ( dependentFeatures , false ) ; newKeyGens . add ( newFsKeyGen . combine ( fsKeyGen ) ) ; keyGensRef . setValue ( newKeyGens ) ; if ( isTraceLogging ) { log . logp ( Level . FINER , sourceClass , sourceMethod , "Key generators updated: " + KeyGenUtil . toString ( newKeyGens ) ) ; // $ NON - NLS - 1 $ } } } else { // We ' re processing a CSS file . See if we need to provide key gens if ( keyGens == null ) { keyGensRef . setValue ( super . getCacheKeyGenerators ( getAggregator ( ) ) ) ; } } } if ( isTraceLogging ) { log . exiting ( sourceClass , sourceMethod , result ) ; } return result ;
public class ParameterResolverImpl { /** * Make sure value types match with declared parameter types . Values which types do not match , or for which no * parameter definition exists are removed . Types are converted from persistence format if required . * @ return Cleaned up parameter values */ private Map < String , Object > ensureValidValueTypes ( Map < String , Object > parameterValues ) { } }
Map < String , Object > transformedParameterValues = new HashMap < > ( ) ; for ( Map . Entry < String , Object > entry : parameterValues . entrySet ( ) ) { if ( entry . getKey ( ) == null || entry . getValue ( ) == null ) { continue ; } else { Parameter < ? > parameter = allParametersMap . get ( entry . getKey ( ) ) ; if ( parameter == null ) { continue ; } else { Object transformedValue = PersistenceTypeConversion . fromPersistenceType ( entry . getValue ( ) , parameter . getType ( ) ) ; if ( ! parameter . getType ( ) . isAssignableFrom ( transformedValue . getClass ( ) ) ) { continue ; } transformedParameterValues . put ( entry . getKey ( ) , transformedValue ) ; } } } return transformedParameterValues ;
public class CompilerHelper { /** * for test */ public Map < String , TypeMetaInfo > getTypeMetaInfo ( InternalKieModule kModule ) { } }
KieMetaInfoBuilder kb = new KieMetaInfoBuilder ( kModule ) ; KieModuleMetaInfo info = kb . generateKieModuleMetaInfo ( null ) ; Map < String , TypeMetaInfo > typesMetaInfo = info . getTypeMetaInfos ( ) ; return typesMetaInfo ;
public class IotHubResourcesInner { /** * Get a consumer group from the Event Hub - compatible device - to - cloud endpoint for an IoT hub . * Get a consumer group from the Event Hub - compatible device - to - cloud endpoint for an IoT hub . * @ param resourceGroupName The name of the resource group that contains the IoT hub . * @ param resourceName The name of the IoT hub . * @ param eventHubEndpointName The name of the Event Hub - compatible endpoint in the IoT hub . * @ param name The name of the consumer group to retrieve . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorDetailsException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the EventHubConsumerGroupInfoInner object if successful . */ public EventHubConsumerGroupInfoInner getEventHubConsumerGroup ( String resourceGroupName , String resourceName , String eventHubEndpointName , String name ) { } }
return getEventHubConsumerGroupWithServiceResponseAsync ( resourceGroupName , resourceName , eventHubEndpointName , name ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CommerceNotificationAttachmentPersistenceImpl { /** * Returns an ordered range of all the commerce notification attachments where commerceNotificationQueueEntryId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationAttachmentModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceNotificationQueueEntryId the commerce notification queue entry ID * @ param start the lower bound of the range of commerce notification attachments * @ param end the upper bound of the range of commerce notification attachments ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce notification attachments */ @ Override public List < CommerceNotificationAttachment > findByCommerceNotificationQueueEntryId ( long commerceNotificationQueueEntryId , int start , int end , OrderByComparator < CommerceNotificationAttachment > orderByComparator ) { } }
return findByCommerceNotificationQueueEntryId ( commerceNotificationQueueEntryId , start , end , orderByComparator , true ) ;
public class WaypointNode { /** * METHODS */ protected double computeDistance ( float x , float y , float destinationX , float destinationY ) { } }
return Math . sqrt ( Math . pow ( ( destinationX - x ) , 2 ) + Math . pow ( destinationY - y , 2 ) ) ;
public class Expr { /** * Set the logical value of this atom expression . * @ param type the type of expression * @ param left the left sub - expression * @ param right the right sub - expression */ public void setLogical ( Type type , Expr left , Expr right ) { } }
switch ( type ) { case AND : case OR : this . type = type ; this . value = 0 ; this . left = left ; this . right = right ; this . query = null ; break ; case NOT : this . type = type ; if ( left != null && right == null ) this . left = left ; else if ( left == null && right != null ) this . left = right ; else if ( left != null ) throw new IllegalArgumentException ( "Only one sub-expression" + " should be provided" + " for NOT expressions!" ) ; this . query = null ; this . value = 0 ; break ; default : throw new IllegalArgumentException ( "Left/Right sub expressions " + "supplied for " + " non-logical operator!" ) ; }
public class sdcard { /** * Creates the specified < code > toFile < / code > as a byte for byte copy of the * < code > fromFile < / code > . If < code > toFile < / code > already exists , then it * will be replaced with a copy of < code > fromFile < / code > . The name and path * of < code > toFile < / code > will be that of < code > toFile < / code > . < br / > * < br / > * < i > Note : < code > fromFile < / code > and < code > toFile < / code > will be closed by * this function . < / i > * @ param fromFile - FileInputStream for the file to copy from . * @ param toFile - FileOutpubStream for the file to copy to . */ public static void copyFile ( FileInputStream fromFile , FileOutputStream toFile ) throws IOException { } }
FileChannel fromChannel = null ; FileChannel toChannel = null ; try { fromChannel = fromFile . getChannel ( ) ; toChannel = toFile . getChannel ( ) ; fromChannel . transferTo ( 0 , fromChannel . size ( ) , toChannel ) ; } finally { try { if ( fromChannel != null ) { fromChannel . close ( ) ; } } finally { if ( toChannel != null ) { toChannel . close ( ) ; } } }
public class CmsHtmlImport { /** * Creates all external links , which were found during the HTML - page processing . < p > */ private void createExternalLinks ( ) { } }
int pointerId ; try { pointerId = OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypePointer . getStaticTypeName ( ) ) . getTypeId ( ) ; } catch ( CmsLoaderException e ) { // should not never ever happen pointerId = CmsResourceTypePointer . getStaticTypeId ( ) ; } // loop through all links Iterator i = m_externalLinks . iterator ( ) ; while ( i . hasNext ( ) ) { String linkUrl = ( String ) i . next ( ) ; String filename = getExternalLinkFile ( linkUrl ) ; m_report . print ( Messages . get ( ) . container ( Messages . RPT_CREATE_EXTERNAL_LINK_0 ) , I_CmsReport . FORMAT_NOTE ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_ARGUMENT_1 , filename ) ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; List properties = new ArrayList ( ) ; CmsProperty property1 = new CmsProperty ( CmsPropertyDefinition . PROPERTY_TITLE , "Link to " + linkUrl , "Link to " + linkUrl ) ; properties . add ( property1 ) ; try { m_cmsObject . createResource ( m_linkGallery + filename , pointerId , linkUrl . getBytes ( ) , properties ) ; } catch ( CmsException e ) { // do nothing here , an exception will be thrown if this link already exists } m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_OK_0 ) , I_CmsReport . FORMAT_OK ) ; }
public class HomeController { /** * use case */ @ RequestMapping ( "/id" ) public String id ( Model model , Principal principal ) { } }
model . addAttribute ( "principal" , principal ) ; return "home" ;
public class Validation { /** * Cross validation of a regression model . * @ param < T > the data type of input objects . * @ param k k - fold cross validation . * @ param trainer a regression model trainer that is properly parameterized . * @ param x the test data set . * @ param y the test data response values . * @ param measure the performance measure of regression . * @ return the test results with the same size of order of measures */ public static < T > double cv ( int k , RegressionTrainer < T > trainer , T [ ] x , double [ ] y , RegressionMeasure measure ) { } }
if ( k < 2 ) { throw new IllegalArgumentException ( "Invalid k for k-fold cross validation: " + k ) ; } int n = x . length ; double [ ] predictions = new double [ n ] ; CrossValidation cv = new CrossValidation ( n , k ) ; for ( int i = 0 ; i < k ; i ++ ) { T [ ] trainx = Math . slice ( x , cv . train [ i ] ) ; double [ ] trainy = Math . slice ( y , cv . train [ i ] ) ; Regression < T > model = trainer . train ( trainx , trainy ) ; for ( int j : cv . test [ i ] ) { predictions [ j ] = model . predict ( x [ j ] ) ; } } return measure . measure ( y , predictions ) ;
public class SofaRpcSerialization { /** * 客户端记录响应反序列化大小和响应反序列化耗时 * @ param responseCommand 响应体 */ private void recordDeserializeResponse ( RpcResponseCommand responseCommand , InvokeContext invokeContext ) { } }
if ( ! RpcInternalContext . isAttachmentEnable ( ) ) { return ; } RpcInternalContext context = null ; if ( invokeContext != null ) { // 客户端异步调用的情况下 , 上下文会放在InvokeContext中传递 context = invokeContext . get ( RemotingConstants . INVOKE_CTX_RPC_CTX ) ; } if ( context == null ) { context = RpcInternalContext . getContext ( ) ; } int cost = context . getStopWatch ( ) . tick ( ) . read ( ) ; int respSize = RpcProtocol . getResponseHeaderLength ( ) + responseCommand . getClazzLength ( ) + responseCommand . getContentLength ( ) + responseCommand . getHeaderLength ( ) ; // 记录响应反序列化大小和响应反序列化耗时 context . setAttachment ( RpcConstants . INTERNAL_KEY_RESP_SIZE , respSize ) ; context . setAttachment ( RpcConstants . INTERNAL_KEY_RESP_DESERIALIZE_TIME , cost ) ;
public class CommandTagAttribute { /** * Returns the value as an expression . Only call on an expression attribute . */ public ExprNode valueAsExpr ( ErrorReporter reporter ) { } }
checkState ( value == null ) ; if ( valueExprList . size ( ) > 1 ) { reporter . report ( valueExprList . get ( 1 ) . getSourceLocation ( ) , EXPECTED_A_SINGLE_EXPRESSION , key . identifier ( ) ) ; // Return the first expr to avoid an NPE in CallNode ctor . return valueExprList . get ( 0 ) ; } return Iterables . getOnlyElement ( valueExprList ) ;
public class DatabaseAccountsInner { /** * Patches the properties of an existing Azure Cosmos DB database account . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param updateParameters The tags parameter to patch for the current database account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < DatabaseAccountInner > patchAsync ( String resourceGroupName , String accountName , DatabaseAccountPatchParameters updateParameters ) { } }
return patchWithServiceResponseAsync ( resourceGroupName , accountName , updateParameters ) . map ( new Func1 < ServiceResponse < DatabaseAccountInner > , DatabaseAccountInner > ( ) { @ Override public DatabaseAccountInner call ( ServiceResponse < DatabaseAccountInner > response ) { return response . body ( ) ; } } ) ;
public class GlobalFileProperties { /** * Utiliza el valor de la JVM GlobalFileProperties . class . getName ( ) + " . DefaultPath " * para obtener el path de la propiedad * @ param filePath * @ return */ public static String getPropertiesFilePath ( String filePath ) { } }
// Verificamos is esta definida una propiedad Global para los ficheros de properties String defaultPath = System . getProperty ( GlobalFileProperties . class . getName ( ) + ".DefaultPath" ) ; if ( defaultPath != null ) { return getFilePath ( defaultPath , filePath ) ; } else { return filePath ; }
public class MPP14Reader { /** * Read filter definitions . * @ throws IOException */ private void processFilterData ( ) throws IOException { } }
DirectoryEntry dir = ( DirectoryEntry ) m_viewDir . getEntry ( "CFilter" ) ; FixedMeta fixedMeta ; FixedData fixedData ; VarMeta varMeta ; Var2Data varData ; try { fixedMeta = new FixedMeta ( new DocumentInputStream ( ( ( DocumentEntry ) dir . getEntry ( "FixedMeta" ) ) ) , 10 ) ; fixedData = new FixedData ( fixedMeta , m_inputStreamFactory . getInstance ( dir , "FixedData" ) ) ; varMeta = new VarMeta12 ( new DocumentInputStream ( ( ( DocumentEntry ) dir . getEntry ( "VarMeta" ) ) ) ) ; varData = new Var2Data ( varMeta , new DocumentInputStream ( ( ( DocumentEntry ) dir . getEntry ( "Var2Data" ) ) ) ) ; } catch ( IndexOutOfBoundsException ex ) { // From a sample file where the stream reports an available number of bytes // but attempting to read that number of bytes raises an exception . return ; } catch ( IOException ex ) { // I ' ve come across an unusual sample where the VarMeta magic number is zero , which throws this exception . // MS Project opens the file fine . If we get into this state , we ' ll just ignore the filter definitions . return ; } // System . out . println ( fixedMeta ) ; // System . out . println ( fixedData ) ; // System . out . println ( varMeta ) ; // System . out . println ( varData ) ; FilterReader reader = new FilterReader14 ( ) ; reader . process ( m_file . getProjectProperties ( ) , m_file . getFilters ( ) , fixedData , varData ) ;
public class LocalInputChannel { /** * Releases the partition reader . */ @ Override void releaseAllResources ( ) throws IOException { } }
if ( ! isReleased ) { isReleased = true ; ResultSubpartitionView view = subpartitionView ; if ( view != null ) { view . releaseAllResources ( ) ; subpartitionView = null ; } }
public class TriangulationPoint { /** * Replace points in ptList for all equals object in uniquePts . * @ param uniquePts Map of triangulation points * @ param ptList Point list , updated , but always the same size . */ public static void mergeInstances ( Map < TriangulationPoint , TriangulationPoint > uniquePts , List < TriangulationPoint > ptList ) { } }
for ( int idPoint = 0 ; idPoint < ptList . size ( ) ; idPoint ++ ) { TriangulationPoint pt = ptList . get ( idPoint ) ; TriangulationPoint uniquePt = uniquePts . get ( pt ) ; if ( uniquePt == null ) { uniquePts . put ( pt , pt ) ; } else { // Duplicate point ptList . set ( idPoint , uniquePt ) ; } }
public class HeaderPacket { /** * little - endian byte order */ public void fromBytes ( byte [ ] data ) { } }
if ( data == null || data . length != 4 ) { throw new IllegalArgumentException ( "invalid header data. It can't be null and the length must be 4 byte." ) ; } this . packetBodyLength = ( data [ 0 ] & 0xFF ) | ( ( data [ 1 ] & 0xFF ) << 8 ) | ( ( data [ 2 ] & 0xFF ) << 16 ) ; this . setPacketSequenceNumber ( data [ 3 ] ) ;
public class ChannelUtil { /** * Given a Collection of channels returns a new collection of channels * containing only those channels which have all the tags in the * < tt > tagNames < / tt > * @ param channels * - the input list of channels * @ param tagNames * - the list of tags required on all channels * @ return Collections of Channels which have all the tags within tagNames */ public static Collection < Channel > filterbyTags ( Collection < Channel > channels , Collection < String > tagNames ) { } }
Collection < Channel > result = new ArrayList < Channel > ( ) ; Collection < Channel > input = new ArrayList < Channel > ( channels ) ; for ( Channel channel : input ) { if ( channel . getTagNames ( ) . containsAll ( tagNames ) ) { result . add ( channel ) ; } } return result ;
public class Linqy { /** * An iterable containing a single element . */ public static < E > Iterable < E > singleton ( final E single ) { } }
return new Iterable < E > ( ) { @ Override public Iterator < E > iterator ( ) { return new OnceOnlyIterator < E > ( single ) ; } } ;
public class CholeskyRCTFactorization { /** * Cholesky factorization L of psd matrix , Q = L . LT */ public void factorize ( boolean checkSymmetry ) throws Exception { } }
if ( checkSymmetry && ! Property . TWELVE . isSymmetric ( Q ) ) { throw new Exception ( "Matrix is not symmetric" ) ; } double threshold = Utils . getDoubleMachineEpsilon ( ) ; this . LData = new double [ ( dim + 1 ) * dim / 2 ] ; for ( int i = 0 ; i < dim ; i ++ ) { int iShift = ( i + 1 ) * i / 2 ; for ( int j = 0 ; j < i + 1 ; j ++ ) { int jShift = ( j + 1 ) * j / 2 ; double sum = 0.0 ; for ( int k = 0 ; k < j ; k ++ ) { sum += LData [ jShift + k ] * LData [ iShift + k ] ; } if ( i == j ) { double d = Q . getQuick ( i , i ) - sum ; if ( ! ( d > threshold ) ) { throw new Exception ( "not positive definite matrix" ) ; } LData [ iShift + i ] = Math . sqrt ( d ) ; } else { LData [ iShift + j ] = 1.0 / LData [ jShift + j ] * ( Q . getQuick ( i , j ) - sum ) ; } } }
public class HomepageHighcharts { /** * Used in the renderHead method to highlight the currently * selected theme tab * @ return the index of the currently selected theme tab */ private int getSelectedTab ( ) { } }
String theme = "default" ; List < PageParameters . NamedPair > pairs = getPageParameters ( ) . getAllNamed ( ) ; theme = pairs . get ( 0 ) . getValue ( ) ; if ( "grid" . equals ( theme ) ) { return 1 ; } else if ( "skies" . equals ( theme ) ) { return 2 ; } else if ( "gray" . equals ( theme ) ) { return 3 ; } else if ( "darkblue" . equals ( theme ) ) { return 4 ; } else if ( "darkgreen" . equals ( theme ) ) { return 5 ; } else { return 0 ; }
public class LNGDoublePriorityQueue { /** * Pushes a new positive element to the queue . * @ param element the element * @ throws IllegalArgumentException if the element to add is negative */ public void push ( int element ) { } }
if ( element < 0 ) throw new IllegalArgumentException ( "Cannot add negative integers to the priority queue" ) ; assert ! this . contains ( element ) ; this . doImport ( element ) ; this . pos . set ( element , this . heap . size ( ) ) ; this . heap . push ( element ) ; assert this . heap . get ( this . pos . get ( element ) ) == element ; this . up ( element ) ;
public class ValidationFilter { /** * Set valid attribute values . * < p > The contents of the map is in pseudo - code * { @ code Map < AttName , Map < ElemName , < Set < Value > > > } . * For default element mapping , the value is { @ code * } . */ public void setValidateMap ( final Map < QName , Map < String , Set < String > > > validateMap ) { } }
this . validateMap = validateMap ;
public class CPAttachmentFileEntryUtil { /** * Returns the first cp attachment file entry in the ordered set where displayDate & lt ; & # 63 ; and status = & # 63 ; . * @ param displayDate the display date * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp attachment file entry , or < code > null < / code > if a matching cp attachment file entry could not be found */ public static CPAttachmentFileEntry fetchByLtD_S_First ( Date displayDate , int status , OrderByComparator < CPAttachmentFileEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByLtD_S_First ( displayDate , status , orderByComparator ) ;
public class Try { /** * Convenience overload of { @ link Try # withResources ( CheckedSupplier , CheckedFn1 , CheckedFn1 ) withResources } that * cascades * two dependent resource creations via nested calls . * @ param aSupplier the first resource supplier * @ param bFn the second resource function * @ param cFn the final resource function * @ param fn the function body * @ param < A > the first resource type * @ param < B > the second resource type * @ param < C > the final resource type * @ param < D > the function return type * @ return a { @ link Try } representing the result of the function ' s application to the final dependent resource */ public static < A extends AutoCloseable , B extends AutoCloseable , C extends AutoCloseable , D > Try < Exception , D > withResources ( CheckedSupplier < ? extends Exception , ? extends A > aSupplier , CheckedFn1 < ? extends Exception , ? super A , ? extends B > bFn , CheckedFn1 < ? extends Exception , ? super B , ? extends C > cFn , CheckedFn1 < ? extends Exception , ? super C , ? extends Try < ? extends Exception , ? extends D > > fn ) { } }
return withResources ( aSupplier , bFn , b -> withResources ( ( ) -> cFn . apply ( b ) , fn :: apply ) ) ;
public class AbstractSessionHandler { /** * Bind value if value implements { @ link SessionBindingListener } * ( calls { @ link SessionBindingListener # valueBound ( Session , String , Object ) } ) * @ param session the basic session * @ param name the name with which the object is bound or unbound * @ param value the bound value */ private void bindValue ( Session session , String name , Object value ) { } }
if ( value instanceof SessionBindingListener ) { ( ( SessionBindingListener ) value ) . valueBound ( session , name , value ) ; }
public class ClientSession { /** * Closes the session . * @ return A completable future to be completed once the session is closed . */ public CompletableFuture < Void > close ( ) { } }
CompletableFuture < Void > future = new CompletableFuture < > ( ) ; submitter . close ( ) . thenCompose ( v -> listener . close ( ) ) . thenCompose ( v -> manager . close ( ) ) . whenComplete ( ( managerResult , managerError ) -> { connection . close ( ) . whenComplete ( ( connectionResult , connectionError ) -> { if ( managerError != null ) { future . completeExceptionally ( managerError ) ; } else if ( connectionError != null ) { future . completeExceptionally ( connectionError ) ; } else { future . complete ( null ) ; } } ) ; } ) ; return future ;
public class DefaultFacebookExceptionGenerator { /** * checks if a string may be a json and contains a error string somewhere , this is used for speedup the error parsing * @ param json */ protected void skipResponseStatusExceptionParsing ( String json ) throws ResponseErrorJsonParsingException { } }
// If this is not an object , it ' s not an error response . if ( ! json . startsWith ( "{" ) ) { throw new ResponseErrorJsonParsingException ( ) ; } int subStrEnd = Math . min ( 50 , json . length ( ) ) ; if ( ! json . substring ( 0 , subStrEnd ) . contains ( "\"error\"" ) ) { throw new ResponseErrorJsonParsingException ( ) ; }
public class JdbcEndpointAdapterController { /** * Handle request message and check response is successful . * @ param request The request message to handle * @ return The response Message * @ throws JdbcServerException Thrown when the response has some exception header . */ private Message handleMessageAndCheckResponse ( Message request ) throws JdbcServerException { } }
Message response = handleMessage ( request ) ; checkSuccess ( response ) ; return response ;
public class MeasureTraitType { /** * Gets the value of the attributeSet property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the attributeSet property . * For example , to add a new item , do as follows : * < pre > * getAttributeSet ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link MeasureTraitType . AttributeSet } */ public List < MeasureTraitType . AttributeSet > getAttributeSet ( ) { } }
if ( attributeSet == null ) { attributeSet = new ArrayList < MeasureTraitType . AttributeSet > ( ) ; } return this . attributeSet ;
public class Singular { /** * Compute a singular - value decomposition of A . * @ return A DoubleMatrix [ 3 ] array of U , S , V such that A = U * diag ( S ) * V ' */ public static DoubleMatrix [ ] fullSVD ( DoubleMatrix A ) { } }
int m = A . rows ; int n = A . columns ; DoubleMatrix U = new DoubleMatrix ( m , m ) ; DoubleMatrix S = new DoubleMatrix ( min ( m , n ) ) ; DoubleMatrix V = new DoubleMatrix ( n , n ) ; int info = NativeBlas . dgesvd ( 'A' , 'A' , m , n , A . dup ( ) . data , 0 , m , S . data , 0 , U . data , 0 , m , V . data , 0 , n ) ; if ( info > 0 ) { throw new LapackConvergenceException ( "GESVD" , info + " superdiagonals of an intermediate bidiagonal form failed to converge." ) ; } return new DoubleMatrix [ ] { U , S , V . transpose ( ) } ;
public class GraphReaderAdapter { /** * { @ inheritDoc } */ public DirectedGraph < DirectedEdge > readDirected ( File f ) throws IOException { } }
return readDirected ( f , new HashIndexer < String > ( ) ) ;
public class MetricValue { /** * If the < code > comparisonOperator < / code > calls for a set of ports , use this to specify that set to be compared with * the < code > metric < / code > . * @ param ports * If the < code > comparisonOperator < / code > calls for a set of ports , use this to specify that set to be * compared with the < code > metric < / code > . */ public void setPorts ( java . util . Collection < Integer > ports ) { } }
if ( ports == null ) { this . ports = null ; return ; } this . ports = new java . util . ArrayList < Integer > ( ports ) ;
public class ObjectActionHandler { /** * Creates an indicator for this type of object action . */ public SceneObjectIndicator createIndicator ( MisoScenePanel panel , String text , Icon icon ) { } }
return new SceneObjectTip ( text , icon ) ;
public class LiveQueryClient { /** * Register subscriptions */ public static void registerSubscription ( Subscription subscription ) { } }
mSubscriptions . add ( subscription ) ; if ( isConnected ( ) ) { executeQuery ( subscription . getQuery ( ) ) ; }
public class VirtualHostMap { /** * Add an endpoint that has started listening , and notify associated virtual hosts * @ param endpoint The HttpEndpointImpl that owns the started chain / listener * @ param resolvedHostName A hostname that can be used in messages ( based on endpoint configuration , something other than * ) * @ param port The port the endpoint is listening on * @ param isHttps True if this is an SSL port * @ see HttpChain # chainStarted ( com . ibm . websphere . channelfw . ChainData ) */ public static synchronized void notifyStarted ( HttpEndpointImpl endpoint , String resolvedHostName , int port , boolean isHttps ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Notify endpoint started: " + endpoint , resolvedHostName , port , isHttps , defaultHost . get ( ) , alternateHostSelector ) ; } if ( alternateHostSelector . get ( ) == null ) { if ( defaultHost . get ( ) != null ) { defaultHost . get ( ) . listenerStarted ( endpoint , resolvedHostName , port , isHttps ) ; } } else { alternateHostSelector . get ( ) . alternateNotifyStarted ( endpoint , resolvedHostName , port , isHttps ) ; }
public class ApptentiveInternal { /** * Dismisses any currently - visible interactions . This method is for internal use and is subject to change . */ public static void dismissAllInteractions ( ) { } }
if ( ! isConversationQueue ( ) ) { dispatchOnConversationQueue ( new DispatchTask ( ) { @ Override protected void execute ( ) { dismissAllInteractions ( ) ; } } ) ; return ; } ApptentiveNotificationCenter . defaultCenter ( ) . postNotification ( NOTIFICATION_INTERACTIONS_SHOULD_DISMISS ) ;
public class Timespan { /** * Expresses the provided duration in the unit provided . Note that the timespan returned * represent only the truncated version of the duration : if duration is 1002ms and timeunit * is seconds , then the timespan returned is 1 second . . . leaving behind 2ms . * @ return the timespan */ private static Timespan truncateDurationToUnit ( long durationInMillis , TimeUnit timeUnit ) { } }
Timespan res ; if ( durationInMillis >= timeUnit . getMillisecondsCount ( ) ) { res = new Timespan ( durationInMillis / timeUnit . getMillisecondsCount ( ) , timeUnit ) ; } else { res = ZERO_TIMESPANS . get ( timeUnit ) ; } return res ;
public class TemplatesApi { /** * Gets the tabs information for a signer or sign - in - person recipient in a template . * Gets the tabs information for a signer or sign - in - person recipient in a template . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param templateId The ID of the template being accessed . ( required ) * @ param recipientId The ID of the recipient being accessed . ( required ) * @ return Tabs */ public Tabs listTabs ( String accountId , String templateId , String recipientId ) throws ApiException { } }
return listTabs ( accountId , templateId , recipientId , null ) ;
public class MailerRegularBuilderImpl { /** * For internal use . */ @ SuppressWarnings ( "deprecation" ) ServerConfig buildServerConfig ( ) { } }
vallidateServerConfig ( ) ; final int serverPort = SimpleOptional . ofNullable ( port ) . orElse ( transportStrategy . getDefaultServerPort ( ) ) ; return new ServerConfigImpl ( assumeNonNull ( getHost ( ) ) , serverPort , getUsername ( ) , getPassword ( ) ) ;
public class LCCore { /** * Replace the main thread executor . */ public static void replaceMainThreadExecutor ( MainThreadExecutor executor ) { } }
MainThreadExecutor previous ; synchronized ( LCCore . class ) { previous = mainThreadExecutor ; mainThreadExecutor = executor ; } do { Runnable toExecute = previous . pop ( ) ; if ( toExecute == null ) break ; executor . execute ( toExecute ) ; } while ( true ) ;
public class LPStandardConverter { /** * Get back the vector in the original components . * @ param X vector in the standard variables * @ return the original component */ public double [ ] postConvert ( double [ ] X ) { } }
if ( X . length != standardN ) { throw new IllegalArgumentException ( "wrong array dimension: " + X . length ) ; } double [ ] ret = new double [ originalN ] ; int cntSplitted = 0 ; for ( int i = standardS ; i < standardN ; i ++ ) { if ( splittedVariablesList . contains ( i - standardS ) ) { // this variable was splitted : x = xPlus - xMinus ret [ i - standardS ] = X [ i ] - X [ standardN + cntSplitted ] ; cntSplitted ++ ; } else { ret [ i - standardS ] = X [ i ] ; } } // this . postconvertedX = ret ; return ret ;
public class SuffixParser { /** * Get the first item returned by { @ link # getPages ( Predicate , Page ) } or null if list is empty * @ param filter the resource filter * @ param basePage the suffix path is relative to this page path ( null for current page ) * @ return the first { @ link Page } or null */ public @ Nullable Page getPage ( @ Nullable Predicate < Page > filter , @ Nullable Page basePage ) { } }
List < Page > suffixPages = getPages ( filter , basePage ) ; if ( suffixPages . isEmpty ( ) ) { return null ; } else { return suffixPages . get ( 0 ) ; }
public class Record { /** * Are there any null fields which can ' t be null ? * @ return true If a non - nullable field is null . * @ return false If the fields are okay . */ public boolean isNull ( ) { } }
// Return true if all non null fields have data in them int fieldCount = this . getFieldCount ( ) ; // BaseField Count for ( int fieldSeq = DBConstants . MAIN_FIELD ; fieldSeq < fieldCount + DBConstants . MAIN_FIELD ; fieldSeq ++ ) { BaseField field = this . getField ( fieldSeq ) ; if ( ( ! field . isNullable ( ) ) && ( field . isNull ( ) ) ) return true ; // This field can ' t be null ! ! ! } return false ; // All fields okay
public class LogManager { /** * Create the log if it does not exist or return back exist log * @ param topic the topic name * @ param partition the partition id * @ return read or create a log * @ throws IOException any IOException */ public ILog getOrCreateLog ( String topic , int partition ) throws IOException { } }
final int configPartitionNumber = getPartition ( topic ) ; if ( partition >= configPartitionNumber ) { throw new IOException ( "partition is bigger than the number of configuration: " + configPartitionNumber ) ; } boolean hasNewTopic = false ; Pool < Integer , Log > parts = getLogPool ( topic , partition ) ; if ( parts == null ) { Pool < Integer , Log > found = logs . putIfNotExists ( topic , new Pool < Integer , Log > ( ) ) ; if ( found == null ) { hasNewTopic = true ; } parts = logs . get ( topic ) ; } Log log = parts . get ( partition ) ; if ( log == null ) { log = createLog ( topic , partition ) ; Log found = parts . putIfNotExists ( partition , log ) ; if ( found != null ) { Closer . closeQuietly ( log , logger ) ; log = found ; } else { logger . info ( format ( "Created log for [%s-%d], now create other logs if necessary" , topic , partition ) ) ; final int configPartitions = getPartition ( topic ) ; for ( int i = 0 ; i < configPartitions ; i ++ ) { getOrCreateLog ( topic , i ) ; } } } if ( hasNewTopic && config . getEnableZookeeper ( ) ) { topicRegisterTasks . add ( new TopicTask ( TopicTask . TaskType . CREATE , topic ) ) ; } return log ;
public class BasicRandomRoutingTable { /** * Remove a single TrustGraphNodeId from the routing table * If the node is mapped to itself , the route is removed . Otherwise this * operation merges the two routes of the form X - > neighbor , * neighbor - > Y into X - > Y . * If the table does not contain the referenced neighbor , * this operation has no effect . * @ param neighbor the TrustGraphNodeId to remove * @ see RandomRoutingTable . removeNeighbor */ @ Override public void removeNeighbor ( final TrustGraphNodeId neighbor ) { } }
// all modification operations are serialized synchronized ( this ) { // do nothing if there is no entry for the neighbor specified if ( ! contains ( neighbor ) ) { return ; } /* first remove the neighbor from the ordering . This will * prevent it from being advertised to . */ removeNeighborFromOrdering ( neighbor ) ; removeNeighborFromRoutingTable ( neighbor ) ; }
public class UploadNotificationConfig { /** * Sets the same notification icon for all the notification statuses . * @ param iconColorResourceID Resource ID of the color to use * @ return { @ link UploadNotificationConfig } */ public final UploadNotificationConfig setIconColorForAllStatuses ( int iconColorResourceID ) { } }
progress . iconColorResourceID = iconColorResourceID ; completed . iconColorResourceID = iconColorResourceID ; error . iconColorResourceID = iconColorResourceID ; cancelled . iconColorResourceID = iconColorResourceID ; return this ;
public class Criteria { /** * Creates a criterion using the < b > & gt ; < / b > operator * @ param o * @ return the criteria */ public Criteria gt ( Object o ) { } }
this . criteriaType = RelationalOperator . GT ; this . right = ValueNode . toValueNode ( o ) ; return this ;
public class AsciidoctorFactory { /** * Return the pre - configured { @ link Asciidoctor } instance . * @ return The { @ link Asciidoctor } instance . */ public static Asciidoctor getAsciidoctor ( ) { } }
Asciidoctor asciidoctor = Asciidoctor . Factory . create ( ) ; asciidoctor . requireLibrary ( ASCIIDOCTOR_DIAGRAM ) ; LOGGER . debug ( "Loaded Asciidoctor {}" ) ; // The delegate is used to fix classloading issues if the CLI plugin classloader // is used for adding extensions . Simply passing the required CL to // Asciidoctor . Factory # create ( ClassLoader ) prevents IncludeProcessor to work . // Any better solution highly welcome . . . return new AsciidoctorDelegate ( asciidoctor ) ;
public class DB2Helper { /** * < p > This method configures a connection before first use . This method is invoked only when a new * connection to the database is created . It is not invoked when connections are reused * from the connection pool . < / p > * < p > This class will set a variable db2ZOS to < code > FALSE < code > as default value . This method * sets to < code > TRUE < code > if the backend system is zOS . * @ param conn the connection to set up . * @ exception SQLException if connection setup cannot be completed successfully . */ @ Override public void doConnectionSetup ( Connection conn ) throws SQLException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "doConnectionSetup" ) ; // set the currrentSQLid on the connection if its not null Statement stmt = null ; Transaction suspendedTx = null ; EmbeddableWebSphereTransactionManager tm = mcf . connectorSvc . getTransactionManager ( ) ; try { if ( currentSQLid != null && ! currentSQLid . equals ( "" ) ) { // If the work below is happening under a global transaction scope , we must suspend the // global transaction before we perform the action . That is because if we are to perform an // action that implies that a local transaction should on the thread , we need to make sure that // the action will take place under a local transaction scope . // If we do not do this , others that are aware of the transaction type currently on // the thread ( i . e . DB2 T2 jdbc driver ) may react in a way that is inconsistent with // the actions and expectations below . UOWCurrent uow = ( UOWCurrent ) tm ; UOWCoordinator coord = uow == null ? null : uow . getUOWCoord ( ) ; boolean inGlobalTransaction = coord != null && coord . isGlobal ( ) ; if ( inGlobalTransaction ) { try { suspendedTx = tm . suspend ( ) ; } catch ( Throwable t ) { throw new SQLException ( t ) ; } } if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Setting currentSQLID : " + currentSQLid ) ; } stmt = conn . createStatement ( ) ; String sql = "set current sqlid = '" + currentSQLid + "'" ; stmt . executeUpdate ( sql ) ; } } finally { // close the statement try { if ( stmt != null ) stmt . close ( ) ; } catch ( SQLException e ) { com . ibm . ws . ffdc . FFDCFilter . processException ( e , getClass ( ) . getName ( ) , "231" , this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "SQLException occured in closing the statement " , e ) ; } finally { // If there is a suspended transaction , resume it . if ( suspendedTx != null ) { try { tm . resume ( suspendedTx ) ; } catch ( Throwable t ) { throw new SQLException ( t ) ; } } } } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "doConnectionSetup" ) ;
public class Manager { /** * Removes a { @ link Crouton } immediately , even when it ' s currently being * displayed . * @ param crouton * The { @ link Crouton } that should be removed . */ void removeCroutonImmediately ( Crouton crouton ) { } }
// if Crouton has already been displayed then it may not be in the queue ( because it was popped ) . // This ensures the displayed Crouton is removed from its parent immediately , whether another instance // of it exists in the queue or not . // Note : crouton . isShowing ( ) is false here even if it really is showing , as croutonView object in // Crouton seems to be out of sync with reality ! if ( null != crouton . getActivity ( ) && null != crouton . getView ( ) && null != crouton . getView ( ) . getParent ( ) ) { ( ( ViewGroup ) crouton . getView ( ) . getParent ( ) ) . removeView ( crouton . getView ( ) ) ; // remove any messages pending for the crouton removeAllMessagesForCrouton ( crouton ) ; } // remove any matching croutons from queue final Iterator < Crouton > croutonIterator = croutonQueue . iterator ( ) ; while ( croutonIterator . hasNext ( ) ) { final Crouton c = croutonIterator . next ( ) ; if ( c . equals ( crouton ) && ( null != c . getActivity ( ) ) ) { // remove the crouton from the content view removeCroutonFromViewParent ( crouton ) ; // remove any messages pending for the crouton removeAllMessagesForCrouton ( c ) ; // remove the crouton from the queue croutonIterator . remove ( ) ; // we have found our crouton so just break break ; } }
public class SegmentHelper { /** * The method sends a WireCommand to iterate over table keys . * @ param tableName Qualified table name . * @ param suggestedKeyCount Suggested number of { @ link TableKey } s to be returned by the SegmentStore . * @ param state Last known state of the iterator . * @ param delegationToken The token to be presented to the segmentstore . * @ param clientRequestId Request id . * @ return A CompletableFuture that will return the next set of { @ link TableKey } s returned from the SegmentStore . */ public CompletableFuture < TableSegment . IteratorItem < TableKey < byte [ ] > > > readTableKeys ( final String tableName , final int suggestedKeyCount , final IteratorState state , final String delegationToken , final long clientRequestId ) { } }
final Controller . NodeUri uri = getTableUri ( tableName ) ; final WireCommandType type = WireCommandType . READ_TABLE_KEYS ; final long requestId = ( clientRequestId == RequestTag . NON_EXISTENT_ID ) ? idGenerator . get ( ) : clientRequestId ; final IteratorState token = ( state == null ) ? IteratorState . EMPTY : state ; final CompletableFuture < TableSegment . IteratorItem < TableKey < byte [ ] > > > result = new CompletableFuture < > ( ) ; final FailingReplyProcessor replyProcessor = new FailingReplyProcessor ( ) { @ Override public void connectionDropped ( ) { log . warn ( requestId , "readTableKeys {} Connection dropped" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . ConnectionDropped ) ) ; } @ Override public void wrongHost ( WireCommands . WrongHost wrongHost ) { log . warn ( requestId , "readTableKeys {} wrong host" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . UnknownHost ) ) ; } @ Override public void noSuchSegment ( WireCommands . NoSuchSegment noSuchSegment ) { log . warn ( requestId , "readTableKeys {} NoSuchSegment" , tableName ) ; result . completeExceptionally ( new WireCommandFailedException ( type , WireCommandFailedException . Reason . SegmentDoesNotExist ) ) ; } @ Override public void tableKeysRead ( WireCommands . TableKeysRead tableKeysRead ) { log . info ( requestId , "readTableKeys {} successful." , tableName ) ; final IteratorState state = IteratorState . fromBytes ( tableKeysRead . getContinuationToken ( ) ) ; final List < TableKey < byte [ ] > > keys = tableKeysRead . getKeys ( ) . stream ( ) . map ( k -> new TableKeyImpl < > ( getArray ( k . getData ( ) ) , new KeyVersionImpl ( k . getKeyVersion ( ) ) ) ) . collect ( Collectors . toList ( ) ) ; result . complete ( new TableSegment . IteratorItem < > ( state , keys ) ) ; } @ Override public void processingFailure ( Exception error ) { log . error ( requestId , "readTableKeys {} failed" , tableName , error ) ; handleError ( error , result , type ) ; } @ Override public void authTokenCheckFailed ( WireCommands . AuthTokenCheckFailed authTokenCheckFailed ) { result . completeExceptionally ( new WireCommandFailedException ( new AuthenticationException ( authTokenCheckFailed . toString ( ) ) , type , WireCommandFailedException . Reason . AuthFailed ) ) ; } } ; WireCommands . ReadTableKeys cmd = new WireCommands . ReadTableKeys ( requestId , tableName , delegationToken , suggestedKeyCount , token . toBytes ( ) ) ; sendRequestAsync ( cmd , replyProcessor , result , ModelHelper . encode ( uri ) ) ; return result ;
public class SubsetProblem { /** * Set the maximum subset size . Specified size should be & ge ; the current minimum subset size * and & le ; the number of items in the underlying data . * @ param maxSubsetSize new maximum subset size * @ throws IllegalArgumentException if an invalid maximum size is given */ public void setMaxSubsetSize ( int maxSubsetSize ) { } }
// check size if ( maxSubsetSize < minSubsetSize ) { throw new IllegalArgumentException ( "Error while setting maximum subset size: should be >= minimum subset size." ) ; } if ( maxSubsetSize > getData ( ) . getIDs ( ) . size ( ) ) { throw new IllegalArgumentException ( "Error while setting maximum subset size: can not be larger " + "than number of items in underlying data." ) ; } this . maxSubsetSize = maxSubsetSize ;
public class Sanitizers { /** * Converts the input to the body of a CSS string literal . */ public static String escapeCssString ( SoyValue value ) { } }
value = normalizeNull ( value ) ; return escapeCssString ( value . coerceToString ( ) ) ;
public class JdbcRepository { /** * Processes composite filter . * @ param whereBuilder the specified where builder * @ param paramList the specified parameter list * @ param compositeFilter the specified composite filter * @ throws RepositoryException repository exception */ private void processCompositeFilter ( final StringBuilder whereBuilder , final List < Object > paramList , final CompositeFilter compositeFilter ) throws RepositoryException { } }
final List < Filter > subFilters = compositeFilter . getSubFilters ( ) ; if ( 2 > subFilters . size ( ) ) { throw new RepositoryException ( "At least two sub filters in a composite filter" ) ; } whereBuilder . append ( "(" ) ; final Iterator < Filter > iterator = subFilters . iterator ( ) ; while ( iterator . hasNext ( ) ) { final Filter filter = iterator . next ( ) ; if ( filter instanceof PropertyFilter ) { processPropertyFilter ( whereBuilder , paramList , ( PropertyFilter ) filter ) ; } else { // CompositeFilter processCompositeFilter ( whereBuilder , paramList , ( CompositeFilter ) filter ) ; } if ( iterator . hasNext ( ) ) { switch ( compositeFilter . getOperator ( ) ) { case AND : whereBuilder . append ( " AND " ) ; break ; case OR : whereBuilder . append ( " OR " ) ; break ; default : throw new RepositoryException ( "Unsupported composite filter [operator=" + compositeFilter . getOperator ( ) + "]" ) ; } } } whereBuilder . append ( ")" ) ;
public class MagickUtil { /** * Converts a { @ code MagickImage } to a { @ code BufferedImage } . * The conversion depends on { @ code pImage } ' s { @ code ImageType } : * < dl > * < dt > { @ code ImageType . BilevelType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ BYTE _ BINARY } < / dd > * < dt > { @ code ImageType . GrayscaleType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ BYTE _ GRAY } < / dd > * < dt > { @ code ImageType . GrayscaleMatteType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ USHORT _ GRAY } < / dd > * < dt > { @ code ImageType . PaletteType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ BYTE _ BINARY } ( for images * with a palette of < = 16 colors ) or { @ code TYPE _ BYTE _ INDEXED } < / dd > * < dt > { @ code ImageType . PaletteMatteType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ BYTE _ BINARY } ( for images * with a palette of < = 16 colors ) or { @ code TYPE _ BYTE _ INDEXED } < / dd > * < dt > { @ code ImageType . TrueColorType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ 3BYTE _ BGR } < / dd > * < dt > { @ code ImageType . TrueColorPaletteType } < / dt > * < dd > { @ code BufferedImage } of type { @ code TYPE _ 4BYTE _ ABGR } < / dd > * @ param pImage the original { @ code MagickImage } * @ return a new { @ code BufferedImage } * @ throws IllegalArgumentException if { @ code pImage } is { @ code null } * or if the { @ code ImageType } is not one mentioned above . * @ throws MagickException if an exception occurs during conversion * @ see BufferedImage */ public static BufferedImage toBuffered ( MagickImage pImage ) throws MagickException { } }
if ( pImage == null ) { throw new IllegalArgumentException ( "image == null" ) ; } long start = 0L ; if ( DEBUG ) { start = System . currentTimeMillis ( ) ; } BufferedImage image = null ; try { switch ( pImage . getImageType ( ) ) { case ImageType . BilevelType : image = bilevelToBuffered ( pImage ) ; break ; case ImageType . GrayscaleType : image = grayToBuffered ( pImage , false ) ; break ; case ImageType . GrayscaleMatteType : image = grayToBuffered ( pImage , true ) ; break ; case ImageType . PaletteType : image = paletteToBuffered ( pImage , false ) ; break ; case ImageType . PaletteMatteType : image = paletteToBuffered ( pImage , true ) ; break ; case ImageType . TrueColorType : image = rgbToBuffered ( pImage , false ) ; break ; case ImageType . TrueColorMatteType : image = rgbToBuffered ( pImage , true ) ; break ; case ImageType . ColorSeparationType : image = cmykToBuffered ( pImage , false ) ; break ; case ImageType . ColorSeparationMatteType : image = cmykToBuffered ( pImage , true ) ; break ; case ImageType . OptimizeType : default : throw new IllegalArgumentException ( "Unknown JMagick image type: " + pImage . getImageType ( ) ) ; } } finally { if ( DEBUG ) { long time = System . currentTimeMillis ( ) - start ; System . out . println ( "Converted JMagick image type: " + pImage . getImageType ( ) + " to BufferedImage: " + image ) ; System . out . println ( "Conversion to BufferedImage: " + time + " ms" ) ; } } return image ;