signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ManipulationUtils { /** * Adds the retrieveInternalModelVersion method to the class . */ private static void addRetrieveInternalModelVersion ( CtClass clazz ) throws NotFoundException , CannotCompileException { } }
CtClass [ ] params = generateClassField ( ) ; CtMethod method = new CtMethod ( cp . get ( Integer . class . getName ( ) ) , "retrieveInternalModelVersion" , params , clazz ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( createTrace ( "Called retrieveInternalModelVersion" ) ) . append ( String . format ( "return (Integer) ((OpenEngSBModelEntry)%s.get(\"%s\")).getValue();" , TAIL_FIELD , EDBConstants . MODEL_VERSION ) ) ; method . setBody ( createMethodBody ( builder . toString ( ) ) ) ; clazz . addMethod ( method ) ;
public class Exceptions { /** * Create a { @ link RSocketException } from a Frame that matches the error code it contains . * @ param frame the frame to retrieve the error code and message from * @ return a { @ link RSocketException } that matches the error code in the Frame * @ throws NullPointerException if { @ code frame } is { @ code null } */ public static RuntimeException from ( ByteBuf frame ) { } }
Objects . requireNonNull ( frame , "frame must not be null" ) ; int errorCode = ErrorFrameFlyweight . errorCode ( frame ) ; String message = ErrorFrameFlyweight . dataUtf8 ( frame ) ; switch ( errorCode ) { case APPLICATION_ERROR : return new ApplicationErrorException ( message ) ; case CANCELED : return new CanceledException ( message ) ; case CONNECTION_CLOSE : return new ConnectionCloseException ( message ) ; case CONNECTION_ERROR : return new ConnectionErrorException ( message ) ; case INVALID : return new InvalidException ( message ) ; case INVALID_SETUP : return new InvalidSetupException ( message ) ; case REJECTED : return new RejectedException ( message ) ; case REJECTED_RESUME : return new RejectedResumeException ( message ) ; case REJECTED_SETUP : return new RejectedSetupException ( message ) ; case UNSUPPORTED_SETUP : return new UnsupportedSetupException ( message ) ; default : return new IllegalArgumentException ( String . format ( "Invalid Error frame: %d '%s'" , errorCode , message ) ) ; }
public class ResourceLocks { /** * deletes unused LockedObjects and resets the counter . works recursively starting at the given LockedObject * @ param lo LockedObject * @ param temporary Clean temporary or real locks * @ return if cleaned */ private boolean cleanLockedObjects ( LockedObject lo , boolean temporary ) { } }
if ( lo . children == null ) { if ( lo . owner == null ) { if ( temporary ) { lo . removeTempLockedObject ( ) ; } else { lo . removeLockedObject ( ) ; } return true ; } return false ; } boolean canDelete = true ; int limit = lo . children . length ; for ( int i = 0 ; i < limit ; i ++ ) { if ( ! cleanLockedObjects ( lo . children [ i ] , temporary ) ) { canDelete = false ; } else { // because the deleting shifts the array i -- ; limit -- ; } } if ( canDelete ) { if ( lo . owner == null ) { if ( temporary ) { lo . removeTempLockedObject ( ) ; } else { lo . removeLockedObject ( ) ; } return true ; } return false ; } return false ;
public class ClusterSettings { /** * Hazelcast must be started when cluster is activated on all nodes but search ones */ public static boolean shouldStartHazelcast ( AppSettings appSettings ) { } }
return isClusterEnabled ( appSettings . getProps ( ) ) && toNodeType ( appSettings . getProps ( ) ) . equals ( NodeType . APPLICATION ) ;
public class MonthDay { /** * Obtains an instance of { @ code MonthDay } . * The day - of - month must be valid for the month within a leap year . * Hence , for February , day 29 is valid . * For example , passing in April and day 31 will throw an exception , as * there can never be April 31st in any year . By contrast , passing in * February 29th is permitted , as that month - day can sometimes be valid . * @ param month the month - of - year to represent , not null * @ param dayOfMonth the day - of - month to represent , from 1 to 31 * @ return the month - day , not null * @ throws DateTimeException if the value of any field is out of range , * or if the day - of - month is invalid for the month */ public static MonthDay of ( Month month , int dayOfMonth ) { } }
Objects . requireNonNull ( month , "month" ) ; DAY_OF_MONTH . checkValidValue ( dayOfMonth ) ; if ( dayOfMonth > month . maxLength ( ) ) { throw new DateTimeException ( "Illegal value for DayOfMonth field, value " + dayOfMonth + " is not valid for month " + month . name ( ) ) ; } return new MonthDay ( month . getValue ( ) , dayOfMonth ) ;
public class ASGResource { /** * Changes the status information of the ASG . * @ param asgName the name of the ASG for which the status needs to be changed . * @ param newStatus the new status { @ link ASGStatus } of the ASG . * @ param isReplication a header parameter containing information whether this is replicated from other nodes . * @ return response which indicates if the operation succeeded or not . */ @ PUT @ Path ( "{asgName}/status" ) public Response statusUpdate ( @ PathParam ( "asgName" ) String asgName , @ QueryParam ( "value" ) String newStatus , @ HeaderParam ( PeerEurekaNode . HEADER_REPLICATION ) String isReplication ) { } }
if ( awsAsgUtil == null ) { return Response . status ( 400 ) . build ( ) ; } try { logger . info ( "Trying to update ASG Status for ASG {} to {}" , asgName , newStatus ) ; ASGStatus asgStatus = ASGStatus . valueOf ( newStatus . toUpperCase ( ) ) ; awsAsgUtil . setStatus ( asgName , ( ! ASGStatus . DISABLED . equals ( asgStatus ) ) ) ; registry . statusUpdate ( asgName , asgStatus , Boolean . valueOf ( isReplication ) ) ; logger . debug ( "Updated ASG Status for ASG {} to {}" , asgName , asgStatus ) ; } catch ( Throwable e ) { logger . error ( "Cannot update the status {} for the ASG {}" , newStatus , asgName , e ) ; return Response . serverError ( ) . build ( ) ; } return Response . ok ( ) . build ( ) ;
public class UnindexedFace { /** * An array of reasons that specify why a face wasn ' t indexed . * < ul > * < li > * EXTREME _ POSE - The face is at a pose that can ' t be detected . For example , the head is turned too far away from * the camera . * < / li > * < li > * EXCEEDS _ MAX _ FACES - The number of faces detected is already higher than that specified by the * < code > MaxFaces < / code > input parameter for < code > IndexFaces < / code > . * < / li > * < li > * LOW _ BRIGHTNESS - The image is too dark . * < / li > * < li > * LOW _ SHARPNESS - The image is too blurry . * < / li > * < li > * LOW _ CONFIDENCE - The face was detected with a low confidence . * < / li > * < li > * SMALL _ BOUNDING _ BOX - The bounding box around the face is too small . * < / li > * < / ul > * @ param reasons * An array of reasons that specify why a face wasn ' t indexed . < / p > * < ul > * < li > * EXTREME _ POSE - The face is at a pose that can ' t be detected . For example , the head is turned too far away * from the camera . * < / li > * < li > * EXCEEDS _ MAX _ FACES - The number of faces detected is already higher than that specified by the * < code > MaxFaces < / code > input parameter for < code > IndexFaces < / code > . * < / li > * < li > * LOW _ BRIGHTNESS - The image is too dark . * < / li > * < li > * LOW _ SHARPNESS - The image is too blurry . * < / li > * < li > * LOW _ CONFIDENCE - The face was detected with a low confidence . * < / li > * < li > * SMALL _ BOUNDING _ BOX - The bounding box around the face is too small . * < / li > * @ see Reason */ public void setReasons ( java . util . Collection < String > reasons ) { } }
if ( reasons == null ) { this . reasons = null ; return ; } this . reasons = new java . util . ArrayList < String > ( reasons ) ;
public class DefaultAsyncSearchQueryResult { /** * A utility method to convert an HTTP 400 response from the search service into a proper * { @ link AsyncSearchQueryResult } . HTTP 400 indicates the request was malformed and couldn ' t * be parsed on the server . As of Couchbase Server 4.5 such a response is a text / plain * body that describes the parsing error . The whole body is emitted / thrown , wrapped in a * { @ link FtsMalformedRequestException } . * @ param payload the HTTP 400 response body describing the parsing failure . * @ return an { @ link AsyncSearchQueryResult } that will emit a { @ link FtsMalformedRequestException } when calling its * { @ link AsyncSearchQueryResult # hits ( ) hits ( ) } method . * @ deprecated FTS is still in BETA so the response format is likely to change in a future version , and be * unified with the HTTP 200 response format . */ @ Deprecated public static AsyncSearchQueryResult fromHttp400 ( String payload ) { } }
// dummy default values SearchStatus status = new DefaultSearchStatus ( 1L , 1L , 0L ) ; SearchMetrics metrics = new DefaultSearchMetrics ( 0L , 0L , 0d ) ; return new DefaultAsyncSearchQueryResult ( status , Observable . < SearchQueryRow > error ( new FtsMalformedRequestException ( payload ) ) , Observable . < FacetResult > empty ( ) , Observable . just ( metrics ) ) ;
public class SitesConfigurationBuilder { /** * Defines the site names , from the list of sites names defined within ' backups ' element , to * which this cache backups its data . */ public SitesConfigurationBuilder addInUseBackupSite ( String site ) { } }
Set < String > sites = attributes . attribute ( IN_USE_BACKUP_SITES ) . get ( ) ; sites . add ( site ) ; attributes . attribute ( IN_USE_BACKUP_SITES ) . set ( sites ) ; return this ;
public class CmsStaticExportManager { /** * Sets the link substitution handler class . < p > * @ param handlerClassName the link substitution handler class name */ public void setHandler ( String handlerClassName ) { } }
try { m_handler = ( I_CmsStaticExportHandler ) Class . forName ( handlerClassName ) . newInstance ( ) ; } catch ( Exception e ) { // should never happen LOG . error ( e . getLocalizedMessage ( ) , e ) ; }
public class CPDefinitionVirtualSettingUtil { /** * Returns the cp definition virtual setting where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp definition virtual setting , or < code > null < / code > if a matching cp definition virtual setting could not be found */ public static CPDefinitionVirtualSetting fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ;
public class ContentSpec { /** * Set the Maven POM version that is used in the pom . xml file when building the jDocbook files . * @ param pomVersion The Maven POM version to be used when building . */ public void setPOMVersion ( final String pomVersion ) { } }
if ( pomVersion == null && this . pomVersion == null ) { return ; } else if ( pomVersion == null ) { removeChild ( this . pomVersion ) ; this . pomVersion = null ; } else if ( this . pomVersion == null ) { this . pomVersion = new KeyValueNode < String > ( CommonConstants . CS_MAVEN_POM_VERSION_TITLE , pomVersion ) ; appendChild ( this . pomVersion , false ) ; } else { this . pomVersion . setValue ( pomVersion ) ; }
public class HttpUtil { /** * returns ' true ' if ' lastModified ' is after ' ifModifiedSince ' ( and both values are valid ) */ public static boolean isModifiedSince ( long ifModifiedSince , Calendar lastModified ) { } }
return lastModified != null && isModifiedSince ( ifModifiedSince , lastModified . getTimeInMillis ( ) ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcConstructionMaterialResourceTypeEnum ( ) { } }
if ( ifcConstructionMaterialResourceTypeEnumEEnum == null ) { ifcConstructionMaterialResourceTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 944 ) ; } return ifcConstructionMaterialResourceTypeEnumEEnum ;
public class Vector2f { /** * / * ( non - Javadoc ) * @ see org . joml . Vector2fc # sub ( float , float , org . joml . Vector2f ) */ public Vector2f sub ( float x , float y , Vector2f dest ) { } }
dest . x = this . x - x ; dest . y = this . y - y ; return dest ;
public class ScreenshotListener { /** * Returns a date suffix string * @ return String */ private String getDateSuffix ( ) { } }
Calendar cal = Calendar . getInstance ( ) ; int year = cal . get ( Calendar . YEAR ) ; int month = cal . get ( Calendar . MONTH ) ; // 0 to 11 int day = cal . get ( Calendar . DAY_OF_MONTH ) ; int hour = cal . get ( Calendar . HOUR_OF_DAY ) ; int minute = cal . get ( Calendar . MINUTE ) ; int second = cal . get ( Calendar . SECOND ) ; return String . format ( "-%4d-%02d-%02d_%02dh%02dm%02ds" , year , ( month + 1 ) , day , hour , minute , second ) ;
public class EvalVisitor { /** * Protected helper for { @ code computeFunction } . * @ param fn The function object . * @ param args The arguments to the function . * @ param fnNode The function node . Only used for error reporting . * @ return The result of the function called on the given arguments . */ @ ForOverride protected SoyValue computeFunctionHelper ( SoyJavaSourceFunction fn , List < SoyValue > args , final FunctionNode fnNode ) { } }
try { return new TofuValueFactory ( fnNode , pluginInstances ) . computeForJava ( fn , args , context ) ; } catch ( Exception e ) { throw RenderException . create ( "While computing function \"" + fnNode . toSourceString ( ) + "\": " + e . getMessage ( ) , e ) ; }
public class Files { /** * Remove ALL files and directories from a given base directory . This method remove ALL files and directory tree , * child of given < code > baseDir < / code > but directory itself is not removed . As a result < code > baseDir < / code > becomes * empty , that is , no children . If exception occur base directory state is not defined , that is , some files may be * removed and other may still be there . * @ param baseDir base directory to clean - up . * @ throws IOException if remove operation fails . */ public static void removeFilesHierarchy ( File baseDir ) throws IOException { } }
Params . notNull ( baseDir , "Base directory" ) ; Params . isDirectory ( baseDir , "Base directory" ) ; log . debug ( "Remove files hierarchy with base directory |%s|." , baseDir ) ; removeDirectory ( baseDir ) ;
public class ColumnMapperDouble { /** * { @ inheritDoc } */ @ Override public SortField sortField ( String field , boolean reverse ) { } }
return new SortField ( field , Type . DOUBLE , reverse ) ;
public class ResourceCache { /** * Adds multiple resources to cache . * @ param resources items to add * @ throws IllegalStateException in case < code > init ( ) < / code > was not called */ public void cache ( Map < String , Object > resources ) { } }
verifyState ( ) ; if ( ! cacheLocked . get ( ) ) { resourceCache . get ( ) . putAll ( resources ) ; }
public class AbstractRunMojo { /** * Copy a custom Share Log4J config into the share - war / WEB - INF / classes dir . * There is no custom classpath resolve mechanism for Share log4j , * to log custom stuff overriding standard log4j . properties is needed . * @ throws MojoExecutionException when any problem appears copying the share log4j configuration */ protected void copyShareLog4jConfig ( ) throws MojoExecutionException { } }
if ( ! useCustomShareLog4jConfig ) { getLog ( ) . info ( "NOT overriding share/WEB-INF/classes/log4j.properties" ) ; return ; } final String warOutputDir = getWarOutputDir ( SHARE_WAR_PREFIX_NAME ) ; final String logConfDestDir = warOutputDir + "/WEB-INF/classes" ; getLog ( ) . info ( "Copying Share log4j.properties to: " + logConfDestDir ) ; executeMojo ( plugin ( groupId ( "org.apache.maven.plugins" ) , artifactId ( "maven-resources-plugin" ) , version ( MAVEN_RESOURCE_PLUGIN_VERSION ) ) , goal ( "copy-resources" ) , configuration ( element ( name ( "outputDirectory" ) , logConfDestDir ) , element ( name ( "resources" ) , element ( name ( "resource" ) , element ( name ( "directory" ) , "src/test/resources/share" ) , element ( name ( "includes" ) , element ( name ( "include" ) , "log4j.properties" ) ) , element ( name ( "filtering" ) , "true" ) ) ) ) , execEnv ) ;
public class Events { /** * Recursively finds events defined by the given type and its implemented interfaces . * @ param type the type for which to find events * @ return the events defined by the given type and its parent interfaces */ private static Map < Method , EventType > findMethods ( Class < ? > type ) { } }
Map < Method , EventType > events = new HashMap < > ( ) ; for ( Method method : type . getDeclaredMethods ( ) ) { Event event = method . getAnnotation ( Event . class ) ; if ( event != null ) { String name = event . value ( ) . equals ( "" ) ? method . getName ( ) : event . value ( ) ; events . put ( method , EventType . from ( name ) ) ; } } for ( Class < ? > iface : type . getInterfaces ( ) ) { events . putAll ( findMethods ( iface ) ) ; } return events ;
public class ArrayUtils { /** * 统计Array的长度 * @ param array 数组 * @ return 数组的长度 * @ throws IllegalArgumentException 如果给定的数据不为array */ public static int count ( final Object array ) { } }
if ( isArray ( array ) ) { return Array . getLength ( array ) ; } else if ( array != null ) { throw new IllegalArgumentException ( "Given data is not an array." ) ; } else { return 0 ; }
public class SMailDogmaticPostalPersonnel { protected SMailTextProofreader createProofreader ( ) { } }
final List < SMailTextProofreader > readerList = new ArrayList < SMailTextProofreader > ( 4 ) ; setupProofreader ( readerList ) ; return new SMailBatchProofreader ( readerList ) ;
public class MultiUserChatLight { /** * Change the name of the room . * @ param roomName * @ throws NoResponseException * @ throws XMPPErrorException * @ throws NotConnectedException * @ throws InterruptedException */ public void changeRoomName ( String roomName ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
MUCLightSetConfigsIQ mucLightSetConfigIQ = new MUCLightSetConfigsIQ ( room , roomName , null ) ; connection . createStanzaCollectorAndSend ( mucLightSetConfigIQ ) . nextResultOrThrow ( ) ;
public class PackageNameMapping { /** * Sets the package pattern to match against . */ public static PackageNameMappingWithPackagePattern fromPackage ( String packagePattern ) { } }
PackageNameMapping packageNameMapping = new PackageNameMapping ( ) ; packageNameMapping . setPackagePattern ( packagePattern ) ; return packageNameMapping ;
public class ThresholdSauvola_MT { /** * Converts the input image into a binary image . * @ param input Input image . Not modified . * @ param output Output binary image . Modified . */ @ Override public void process ( GrayF32 input , GrayU8 output ) { } }
inputPow2 . reshape ( input . width , input . height ) ; inputMean . reshape ( input . width , input . height ) ; inputMeanPow2 . reshape ( input . width , input . height ) ; inputPow2Mean . reshape ( input . width , input . height ) ; stdev . reshape ( input . width , input . height ) ; tmp . reshape ( input . width , input . height ) ; inputPow2 . reshape ( input . width , input . height ) ; int radius = width . computeI ( Math . min ( input . width , input . height ) ) / 2 ; // mean of input image = E [ X ] BlurImageOps . mean ( input , inputMean , radius , tmp , work ) ; // standard deviation = sqrt ( E [ X ^ 2 ] + E [ X ] ^ 2) PixelMath . pow2 ( input , inputPow2 ) ; BlurImageOps . mean ( inputPow2 , inputPow2Mean , radius , tmp , work ) ; PixelMath . pow2 ( inputMean , inputMeanPow2 ) ; PixelMath . subtract ( inputPow2Mean , inputMeanPow2 , stdev ) ; PixelMath . sqrt ( stdev , stdev ) ; float R = ImageStatistics . max ( stdev ) ; if ( down ) { BoofConcurrency . loopFor ( 0 , input . height , y -> { int i = y * stdev . width ; int indexIn = input . startIndex + y * input . stride ; int indexOut = output . startIndex + y * output . stride ; for ( int x = 0 ; x < input . width ; x ++ , i ++ ) { // threshold = mean . * ( 1 + k * ( ( deviation / R ) - 1 ) ) ; float threshold = inputMean . data [ i ] * ( 1.0f + k * ( stdev . data [ i ] / R - 1.0f ) ) ; output . data [ indexOut ++ ] = ( byte ) ( input . data [ indexIn ++ ] <= threshold ? 1 : 0 ) ; } } ) ; } else { BoofConcurrency . loopFor ( 0 , input . height , y -> { int i = y * stdev . width ; int indexIn = input . startIndex + y * input . stride ; int indexOut = output . startIndex + y * output . stride ; for ( int x = 0 ; x < input . width ; x ++ , i ++ ) { // threshold = mean . * ( 1 + k * ( ( deviation / R ) - 1 ) ) ; float threshold = inputMean . data [ i ] * ( 1.0f + k * ( stdev . data [ i ] / R - 1.0f ) ) ; output . data [ indexOut ++ ] = ( byte ) ( input . data [ indexIn ++ ] >= threshold ? 1 : 0 ) ; } } ) ; }
public class PaaSPropertyManager { /** * returns the properties for group . When the group is specified with * ApplicationProperties . xml ( old style ) , the names of the properties * returned will not contain the group names and ' / ' , for backward * compatibility . When the group is in the new style , where a group is any * property name prefix terminated by a ' . ' , the property names returned * will contain the whole property names including the group names . * @ param group * @ return Properties for the group */ @ Override public Properties getProperties ( String group ) { } }
Properties props = new Properties ( ) ; int k = group . length ( ) ; for ( Object key : properties . keySet ( ) ) { String propname = ( String ) key ; int l = propname . length ( ) ; char ch = l > k ? propname . charAt ( k ) : ' ' ; if ( ( ch == '.' || ch == '/' ) && propname . startsWith ( group ) ) { if ( ch == '.' ) props . put ( propname , properties . get ( propname ) ) ; else props . put ( propname . substring ( k + 1 ) , properties . get ( propname ) ) ; } } return props ;
public class BasicBinder { /** * Resolve a Marshaller with the given source and target class . * The marshaller is used as follows : Instances of the source can be marshalled into the target class . * @ param key The key to look up */ public < S , T > ToMarshaller < S , T > findMarshaller ( ConverterKey < S , T > key ) { } }
Converter < S , T > converter = findConverter ( key ) ; if ( converter == null ) { return null ; } if ( ToMarshallerConverter . class . isAssignableFrom ( converter . getClass ( ) ) ) { return ( ( ToMarshallerConverter < S , T > ) converter ) . getMarshaller ( ) ; } else { return new ConverterToMarshaller < S , T > ( converter ) ; }
public class GraphPath { /** * Remove the path ' s elements after the * specified one which is starting * at the specified point . The specified element will * be removed . * < p > This function removes after the < i > first occurence < / i > * of the given object . * @ param obj is the segment to remove * @ param pt is the point on which the segment was connected * as its first point . * @ return < code > true < / code > on success , otherwise < code > false < / code > */ public boolean removeFrom ( ST obj , PT pt ) { } }
return removeAfter ( indexOf ( obj , pt ) , true ) ;
public class GroupBasicAdapter { /** * remove a group * @ param removed the group index to be removed */ public void removeGroup ( int removed ) { } }
List < L > cards = getGroups ( ) ; if ( removed >= 0 && removed < cards . size ( ) ) { boolean changed = cards . remove ( removed ) != null ; if ( changed ) { setData ( cards ) ; } }
public class ModeledUser { /** * Stores all unrestricted ( unprivileged ) attributes within the given Map , * pulling the values of those attributes from the underlying user model . * If no value is yet defined for an attribute , that attribute will be set * to null . * @ param attributes * The Map to store all unrestricted attributes within . */ private void putUnrestrictedAttributes ( Map < String , String > attributes ) { } }
// Set full name attribute attributes . put ( User . Attribute . FULL_NAME , getModel ( ) . getFullName ( ) ) ; // Set email address attribute attributes . put ( User . Attribute . EMAIL_ADDRESS , getModel ( ) . getEmailAddress ( ) ) ; // Set organization attribute attributes . put ( User . Attribute . ORGANIZATION , getModel ( ) . getOrganization ( ) ) ; // Set role attribute attributes . put ( User . Attribute . ORGANIZATIONAL_ROLE , getModel ( ) . getOrganizationalRole ( ) ) ;
public class CommonOps_DDRM { /** * Removes columns from the matrix . * @ param A Matrix . Modified * @ param col0 First column * @ param col1 Last column , inclusive . */ public static void removeColumns ( DMatrixRMaj A , int col0 , int col1 ) { } }
if ( col1 < col0 ) { throw new IllegalArgumentException ( "col1 must be >= col0" ) ; } else if ( col0 >= A . numCols || col1 >= A . numCols ) { throw new IllegalArgumentException ( "Columns which are to be removed must be in bounds" ) ; } int step = col1 - col0 + 1 ; int offset = 0 ; for ( int row = 0 , idx = 0 ; row < A . numRows ; row ++ ) { for ( int i = 0 ; i < col0 ; i ++ , idx ++ ) { A . data [ idx ] = A . data [ idx + offset ] ; } offset += step ; for ( int i = col1 + 1 ; i < A . numCols ; i ++ , idx ++ ) { A . data [ idx ] = A . data [ idx + offset ] ; } } A . numCols -= step ;
public class UserRegistryWrapper { /** * { @ inheritDoc } */ @ Override @ FFDCIgnore ( com . ibm . ws . security . registry . CertificateMapFailedException . class ) public String mapCertificate ( X509Certificate [ ] chain ) throws CertificateMapNotSupportedException , CertificateMapFailedException , CustomRegistryException , RemoteException { } }
try { return wrappedUr . mapCertificate ( chain ) ; } catch ( RegistryException e ) { throw new CustomRegistryException ( e . getMessage ( ) , e ) ; } catch ( com . ibm . ws . security . registry . CertificateMapNotSupportedException e ) { throw new CertificateMapNotSupportedException ( e . getMessage ( ) , e ) ; } catch ( com . ibm . ws . security . registry . CertificateMapFailedException e ) { throw new CertificateMapFailedException ( e . getMessage ( ) , e ) ; }
public class MSBuildExecutor { /** * Execute the build . * The function assumes that at least 1 platform configuration has been provided * in a list via { @ link # setPlatforms ( List ) } . * @ throws IOException if there is a problem executing MSBuild * @ throws InterruptedException if execution is interrupted */ public int execute ( ) throws IOException , InterruptedException { } }
for ( BuildPlatform platform : buildPlatforms ) { for ( BuildConfiguration configuration : platform . getConfigurations ( ) ) { int exitCode = runMSBuild ( platform . getName ( ) , configuration . getName ( ) ) ; if ( exitCode != 0 ) { return exitCode ; } } } return 0 ;
public class ByteCodeWriter { /** * Writes UTF - 8 */ public void writeUTF8 ( ByteArrayBuffer bb , String value ) { } }
bb . clear ( ) ; for ( int i = 0 ; i < value . length ( ) ; i ++ ) { int ch = value . charAt ( i ) ; if ( ch > 0 && ch < 0x80 ) bb . append ( ch ) ; else if ( ch < 0x800 ) { bb . append ( 0xc0 + ( ch >> 6 ) ) ; bb . append ( 0x80 + ( ch & 0x3f ) ) ; } else { bb . append ( 0xe0 + ( ch >> 12 ) ) ; bb . append ( 0x80 + ( ( ch >> 6 ) & 0x3f ) ) ; bb . append ( 0x80 + ( ( ch ) & 0x3f ) ) ; } }
public class NetworkClient { /** * Returns the specified network . Gets a list of available networks by making a list ( ) request . * < p > Sample code : * < pre > < code > * try ( NetworkClient networkClient = NetworkClient . create ( ) ) { * ProjectGlobalNetworkName network = ProjectGlobalNetworkName . of ( " [ PROJECT ] " , " [ NETWORK ] " ) ; * Network response = networkClient . getNetwork ( network ) ; * < / code > < / pre > * @ param network Name of the network to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Network getNetwork ( ProjectGlobalNetworkName network ) { } }
GetNetworkHttpRequest request = GetNetworkHttpRequest . newBuilder ( ) . setNetwork ( network == null ? null : network . toString ( ) ) . build ( ) ; return getNetwork ( request ) ;
public class ViewHandler { /** * < p class = " changed _ added _ 2_0 " > Return a JSF action URL derived * from the < code > viewId < / code > argument that is suitable to be used * by the { @ link NavigationHandler } to issue a redirect request to * the URL using a NonFaces request . Compliant implementations * must implement this method as specified in section JSF . 7.6.2. * The default implementation simply calls through to { @ link * # getActionURL } , passing the arguments < code > context < / code > and * < code > viewId < / code > . < / p > * @ param context The FacesContext processing this request * @ param viewId The view identifier of the target page * @ param parameters A mapping of parameter names to one or more values * @ param includeViewParams A flag indicating whether view parameters should be encoded into this URL * @ since 2.0 */ public String getRedirectURL ( FacesContext context , String viewId , Map < String , List < String > > parameters , boolean includeViewParams ) { } }
return getActionURL ( context , viewId ) ;
public class StructureImpl { /** * { @ inheritDoc } */ @ Override public void addChain ( Chain chain , int modelnr ) { } }
// if model has not been initialized , init it ! chain . setStructure ( this ) ; if ( models . isEmpty ( ) ) { Model model = new Model ( ) ; List < Chain > modelChains = new ArrayList < > ( ) ; modelChains . add ( chain ) ; model . setChains ( modelChains ) ; models . add ( model ) ; } else { Model model = models . get ( modelnr ) ; model . addChain ( chain ) ; }
public class RecursiveTypeAnalysis { @ Override public void visitTypeNominal ( Type . Nominal type , Set < QualifiedName > visited ) { } }
// Extract declaration link Decl . Link < Decl . Type > link = type . getLink ( ) ; // Sanity check type makes sense if ( link . isResolved ( ) ) { // Extract the declaration to which this type refers . Decl . Type decl = link . getTarget ( ) ; // Recursively traverse it . visitType ( decl , visited ) ; }
public class LightBulb { /** * Enables / disables the glowing of the lightbulb * @ param ON */ public void setOn ( final boolean ON ) { } }
boolean oldState = on ; on = ON ; propertySupport . firePropertyChange ( STATE_PROPERTY , oldState , on ) ; repaint ( getInnerBounds ( ) ) ;
public class A_CmsXmlDocument { /** * Marshals ( writes ) the content of the current XML document * into an output stream . < p > * @ param out the output stream to write to * @ param encoding the encoding to use * @ return the output stream with the XML content * @ throws CmsXmlException if something goes wrong */ protected OutputStream marshal ( OutputStream out , String encoding ) throws CmsXmlException { } }
return CmsXmlUtils . marshal ( m_document , out , encoding ) ;
public class RF2Importer { /** * Loads all the module dependency information from all RF2 inputs into a single { @ link IModuleDependencyRefset } . * @ return * @ throws ImportException */ protected IModuleDependencyRefset loadModuleDependencies ( RF2Input input ) throws ImportException { } }
Set < InputStream > iss = new HashSet < > ( ) ; InputType inputType = input . getInputType ( ) ; for ( String md : input . getModuleDependenciesRefsetFiles ( ) ) { try { iss . add ( input . getInputStream ( md ) ) ; } catch ( NullPointerException | IOException e ) { final String message = StructuredLog . ModuleLoadFailure . error ( log , inputType , md , e ) ; throw new ImportException ( message , e ) ; } } IModuleDependencyRefset res = RefsetImporter . importModuleDependencyRefset ( iss ) ; return res ;
public class STAXXMLReader { /** * Reads data from specified { @ code reader } , and delegates translated SAX Events * to { @ code handler } . * < b > Note : < / b > The { @ code reader } is not closed by this method . * @ param reader reader to reads data from * @ param handler the SAXHandler which receives SAX events * @ throws SAXException any { @ link XMLStreamException } occured is rethrown as { @ link SAXException } */ @ SuppressWarnings ( { } }
"unchecked" } ) public static void fire ( XMLStreamReader reader , SAXDelegate handler ) throws SAXException { Attributes attrs = new STAXAttributes ( reader ) ; int eventType = reader . getEventType ( ) ; while ( true ) { switch ( eventType ) { case START_DOCUMENT : handler . setDocumentLocator ( new STAXLocator ( reader ) ) ; handler . startDocument ( ) ; break ; case START_ELEMENT : { int nsCount = reader . getNamespaceCount ( ) ; for ( int i = 0 ; i < nsCount ; i ++ ) { String prefix = reader . getNamespacePrefix ( i ) ; String uri = reader . getNamespaceURI ( i ) ; handler . startPrefixMapping ( prefix == null ? "" : prefix , uri == null ? "" : uri ) ; } String localName = reader . getLocalName ( ) ; String prefix = reader . getPrefix ( ) ; String qname = prefix == null || prefix . length ( ) == 0 ? localName : prefix + ':' + localName ; String uri = reader . getNamespaceURI ( ) ; handler . startElement ( uri == null ? "" : uri , localName , qname , attrs ) ; break ; } case CHARACTERS : handler . characters ( reader . getTextCharacters ( ) , reader . getTextStart ( ) , reader . getTextLength ( ) ) ; break ; case CDATA : handler . startCDATA ( ) ; handler . characters ( reader . getTextCharacters ( ) , reader . getTextStart ( ) , reader . getTextLength ( ) ) ; handler . endCDATA ( ) ; break ; case COMMENT : handler . comment ( reader . getTextCharacters ( ) , reader . getTextStart ( ) , reader . getTextLength ( ) ) ; break ; case PROCESSING_INSTRUCTION : handler . processingInstruction ( reader . getPITarget ( ) , reader . getPIData ( ) ) ; break ; case SPACE : handler . ignorableWhitespace ( reader . getTextCharacters ( ) , reader . getTextStart ( ) , reader . getTextLength ( ) ) ; break ; case DTD : for ( NotationDeclaration notation : ( List < NotationDeclaration > ) reader . getProperty ( "javax.xml.stream.notations" ) ) handler . notationDecl ( notation . getName ( ) , notation . getPublicId ( ) , notation . getSystemId ( ) ) ; for ( EntityDeclaration entity : ( List < EntityDeclaration > ) reader . getProperty ( "javax.xml.stream.entities" ) ) handler . unparsedEntityDecl ( entity . getName ( ) , entity . getPublicId ( ) , entity . getSystemId ( ) , entity . getNotationName ( ) ) ; break ; case END_ELEMENT : { String localName = reader . getLocalName ( ) ; String prefix = reader . getPrefix ( ) ; String qname = prefix == null || prefix . length ( ) == 0 ? localName : prefix + ':' + localName ; String uri = reader . getNamespaceURI ( ) ; handler . endElement ( uri == null ? "" : uri , localName , qname ) ; int nsCount = reader . getNamespaceCount ( ) ; for ( int i = 0 ; i < nsCount ; i ++ ) { prefix = reader . getNamespacePrefix ( i ) ; handler . endPrefixMapping ( prefix == null ? "" : prefix ) ; } break ; } case END_DOCUMENT : handler . endDocument ( ) ; return ; } try { eventType = reader . next ( ) ; } catch ( XMLStreamException ex ) { throw new SAXException ( ex ) ; } }
public class GeneratorSet { /** * Adds a new test case generator for the given system function . */ public void addGenerator ( String functionName , ITestCaseGenerator generator ) { } }
String functionKey = getFunctionKey ( functionName ) ; if ( generators_ . containsKey ( functionKey ) ) { throw new IllegalArgumentException ( "Generator already defined for function=" + functionName ) ; } if ( generator != null ) { generators_ . put ( functionKey , generator ) ; }
public class EnvLoader { /** * Adds a permission to the current environment . * @ param perm the permission to add . * @ return the old attribute value */ public static void addPermission ( Permission perm ) { } }
ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; addPermission ( perm , loader ) ;
public class Media { /** * Create a MediaDeleter to execute delete . * @ param pathAccountSid The SID of the Account that created the resource ( s ) to * delete * @ param pathMessageSid The SID of the Message resource that this Media * resource belongs to * @ param pathSid The unique string that identifies this resource * @ return MediaDeleter capable of executing the delete */ public static MediaDeleter deleter ( final String pathAccountSid , final String pathMessageSid , final String pathSid ) { } }
return new MediaDeleter ( pathAccountSid , pathMessageSid , pathSid ) ;
public class BeanBuilder { /** * Parses the bean definition groovy script by first exporting the given { @ link Binding } . */ public void parse ( InputStream script , Binding binding ) { } }
if ( script == null ) throw new IllegalArgumentException ( "No script is provided" ) ; setBinding ( binding ) ; CompilerConfiguration cc = new CompilerConfiguration ( ) ; cc . setScriptBaseClass ( ClosureScript . class . getName ( ) ) ; GroovyShell shell = new GroovyShell ( classLoader , binding , cc ) ; ClosureScript s = ( ClosureScript ) shell . parse ( new InputStreamReader ( script ) ) ; s . setDelegate ( this ) ; s . run ( ) ;
public class IsomorphicGraphCounter { /** * { @ inheritDoc } */ public G max ( ) { } }
int maxCount = - 1 ; G max = null ; for ( Map < G , Integer > m : orderAndSizeToGraphs . values ( ) ) { for ( Map . Entry < G , Integer > e : m . entrySet ( ) ) { if ( e . getValue ( ) > maxCount ) { maxCount = e . getValue ( ) ; max = e . getKey ( ) ; } } } return max ;
public class SheetUpdateRequestResourcesImpl { /** * Gets a list of all Update Requests that have future schedules associated with the specified Sheet . * It mirrors to the following Smartsheet REST API method : GET / sheets / { sheetId } / updaterequests * @ param paging the object containing the pagination parameters * @ return A list of all UpdateRequests ( note that an empty list will be returned if there are none ) . * @ throws IllegalArgumentException if any argument is null or empty string * @ throws InvalidRequestException if there is any problem with the REST API request * @ throws AuthorizationException if there is any problem with the REST API authorization ( access token ) * @ throws ResourceNotFoundException if the resource cannot be found * @ throws ServiceUnavailableException if the REST API service is not available ( possibly due to rate limiting ) * @ throws SmartsheetException if there is any other error during the operation */ public PagedResult < UpdateRequest > listUpdateRequests ( long sheetId , PaginationParameters paging ) throws SmartsheetException { } }
String path = "sheets/" + sheetId + "/updaterequests" ; HashMap < String , Object > parameters = new HashMap < String , Object > ( ) ; if ( paging != null ) { parameters = paging . toHashMap ( ) ; } path += QueryUtil . generateUrl ( null , parameters ) ; return this . listResourcesWithWrapper ( path , UpdateRequest . class ) ;
public class SARLOperationHelper { /** * Replies if the given operator is a reassignment operator . * A reassignment operator changes its left operand . * @ param operator the operator . * @ return { @ code true } if the operator changes its left operand . */ protected boolean isReassignmentOperator ( XBinaryOperation operator ) { } }
if ( operator . isReassignFirstArgument ( ) ) { return true ; } final QualifiedName operatorName = this . operatorMapping . getOperator ( QualifiedName . create ( operator . getFeature ( ) . getSimpleName ( ) ) ) ; final QualifiedName compboundOperatorName = this . operatorMapping . getSimpleOperator ( operatorName ) ; return compboundOperatorName != null ;
public class ApplicationCache { /** * Returns the cache of deployments for the given application , creating one if it doesn ' t exist . * @ param applicationId The id of the application for the cache of deployments * @ return The cache of deployments for the given application */ public DeploymentCache deployments ( long applicationId ) { } }
DeploymentCache cache = deployments . get ( applicationId ) ; if ( cache == null ) deployments . put ( applicationId , cache = new DeploymentCache ( applicationId ) ) ; return cache ;
public class STFastGroupDir { /** * Stolen ruthlessly from * { @ link File # getName ( ) } */ private static final String getFileName ( String path ) { } }
int index = path . lastIndexOf ( '/' ) ; // stop using File . separatorChar as we are not even remotely close to be using that ANYwhere else - apparently . This makes it fail on Windows systems int prefixLength = prefixLength ( path ) ; if ( index < prefixLength ) return path . substring ( prefixLength ) ; return path . substring ( index + 1 ) ;
public class PersonDirectoryConfiguration { /** * Looks in the base UP _ USER table , doesn ' t find attributes but will ensure a result if it the * user exists in the portal database and is searched for by username , results are cached by the * outer caching DAO . */ @ Bean ( name = "uPortalJdbcUserSource" ) @ Qualifier ( "uPortalInternal" ) public IPersonAttributeDao getUPortalJdbcUserSource ( ) { } }
final String sql = "SELECT USER_NAME FROM UP_USER WHERE {0}" ; final SingleRowJdbcPersonAttributeDao rslt = new SingleRowJdbcPersonAttributeDao ( personDb , sql ) ; rslt . setUsernameAttributeProvider ( getUsernameAttributeProvider ( ) ) ; rslt . setQueryAttributeMapping ( Collections . singletonMap ( USERNAME_ATTRIBUTE , USERNAME_COLUMN_NAME ) ) ; final Map < String , Set < String > > resultAttributeMapping = new HashMap < > ( ) ; resultAttributeMapping . put ( USERNAME_COLUMN_NAME , Stream . of ( USERNAME_ATTRIBUTE , UID_ATTRIBUTE , USER_LOGIN_ID_ATTRIBUTE ) . collect ( Collectors . toSet ( ) ) ) ; rslt . setResultAttributeMapping ( resultAttributeMapping ) ; return rslt ;
public class ResultViewTablePanel { /** * < / editor - fold > / / GEN - END : initComponents */ private void cmdExportResultActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ buttonSaveResultsActionPerformed JFileChooser fileChooser = new JFileChooser ( System . getProperty ( "user.dir" ) ) ; fileChooser . setDialogTitle ( "Export to..." ) ; int response = fileChooser . showSaveDialog ( this ) ; if ( response == JFileChooser . APPROVE_OPTION ) { File targetFile = fileChooser . getSelectedFile ( ) ; final String fileLocation = targetFile . getPath ( ) ; if ( canWrite ( targetFile ) ) { Thread thread = new Thread ( ) { public void run ( ) { getOBDASaveQueryToFileAction ( ) . run ( fileLocation ) ; } } ; thread . start ( ) ; } }
public class RestController { /** * Creates a new workflow instance . * < pre > * Request : POST / workflowInstance { workflowName : " credit . step1 " , workflowVersion : null , arguments : { " arg1 " : { " refNum " : 1 , " date " : " 22.09.2014 " } , " arg2 " : [ true , 1 , " text " ] } , label1 : " one " , label2 : null } * Response : OK , { refNum : 1 , workflowName : " credit . step1 " , workflowVersion : null , label1 : " one " , label2 : null , status : NEW } * < / pre > */ @ RequestMapping ( method = RequestMethod . POST , value = "/workflowInstance" , produces = { } }
MediaType . APPLICATION_JSON_VALUE , MediaType . TEXT_XML_VALUE } ) public ResponseEntity < WorkflowInstanceRestModel > create ( @ RequestBody String body ) { JsonObject root = JsonParserUtil . parseJson ( body ) ; CreateWorkflowInstance request = new CreateWorkflowInstance ( ) ; request . setWorkflowName ( JsonParserUtil . getAsNullSafeString ( root , "workflowName" ) ) ; request . setWorkflowVersion ( JsonParserUtil . getAsNullSafeInteger ( root , "workflowVersion" ) ) ; request . setLabel1 ( JsonParserUtil . getAsNullSafeString ( root , "label1" ) ) ; request . setLabel2 ( JsonParserUtil . getAsNullSafeString ( root , "label2" ) ) ; request . setArguments ( JsonUtil . deserializeHashMap ( JsonParserUtil . toNullSafeJsonString ( root , "arguments" ) , String . class , Object . class ) ) ; facade . createWorkflowInstance ( request ) ; WorkflowInstanceState woin = facade . findWorkflowInstance ( request . getRefNum ( ) , true ) ; return new ResponseEntity < > ( createInstanceModel ( woin ) , HttpStatus . OK ) ;
public class Match { /** * Finds a name for the variable . * @ param ele element to check * @ return a name */ public String getAName ( BioPAXElement ele ) { } }
String name = null ; if ( ele instanceof Named ) { Named n = ( Named ) ele ; if ( n . getDisplayName ( ) != null && n . getDisplayName ( ) . length ( ) > 0 ) name = n . getDisplayName ( ) ; else if ( n . getStandardName ( ) != null && n . getStandardName ( ) . length ( ) > 0 ) name = n . getStandardName ( ) ; else if ( ! n . getName ( ) . isEmpty ( ) && n . getName ( ) . iterator ( ) . next ( ) . length ( ) > 0 ) name = n . getName ( ) . iterator ( ) . next ( ) ; } if ( name == null ) name = ele . getUri ( ) ; return name + " (" + ele . getModelInterface ( ) . getName ( ) . substring ( ele . getModelInterface ( ) . getName ( ) . lastIndexOf ( "." ) + 1 ) + ")" ;
public class Engine { /** * Get config value . * @ param key - config key * @ param defaultValue - default value * @ return config value * @ see # getEngine ( ) */ public String getProperty ( String key , String defaultValue ) { } }
String value = getProperty ( key , String . class ) ; return StringUtils . isEmpty ( value ) ? defaultValue : value ;
public class Result { /** * For SQLEXECUTE * For execution of SQL prepared statements . * The parameters are set afterwards as the Result is reused */ public static Result newPreparedExecuteRequest ( Type [ ] types , long statementId ) { } }
Result result = newResult ( ResultConstants . EXECUTE ) ; result . metaData = ResultMetaData . newSimpleResultMetaData ( types ) ; result . statementID = statementId ; result . navigator . add ( ValuePool . emptyObjectArray ) ; return result ;
public class CmsXmlContent { /** * Synchronizes the locale independent fields for the given locale . < p > * @ param cms the cms context * @ param skipPaths the paths to skip * @ param sourceLocale the source locale */ public void synchronizeLocaleIndependentValues ( CmsObject cms , Collection < String > skipPaths , Locale sourceLocale ) { } }
if ( getContentDefinition ( ) . getContentHandler ( ) . hasSynchronizedElements ( ) && ( getLocales ( ) . size ( ) > 1 ) ) { for ( String elementPath : getContentDefinition ( ) . getContentHandler ( ) . getSynchronizations ( ) ) { synchronizeElement ( cms , elementPath , skipPaths , sourceLocale ) ; } }
public class UCaseProps { /** * compare s , which has a length , with t = unfold [ unfoldOffset . . ] , which has a maximum length or is NUL - terminated * must be s . length ( ) > 0 and max > 0 and s . length ( ) < = max */ private final int strcmpMax ( String s , int unfoldOffset , int max ) { } }
int i1 , length , c1 , c2 ; length = s . length ( ) ; max -= length ; /* we require length < = max , so no need to decrement max in the loop */ i1 = 0 ; do { c1 = s . charAt ( i1 ++ ) ; c2 = unfold [ unfoldOffset ++ ] ; if ( c2 == 0 ) { return 1 ; /* reached the end of t but not of s */ } c1 -= c2 ; if ( c1 != 0 ) { return c1 ; /* return difference result */ } } while ( -- length > 0 ) ; /* ends with length = = 0 */ if ( max == 0 || unfold [ unfoldOffset ] == 0 ) { return 0 ; /* equal to length of both strings */ } else { return - max ; /* return lengh difference */ }
public class Task { private void startProcessingRequest ( final T data ) { } }
Logger . v ( "Task[%s] start processing request" , this ) ; if ( mExecutor != null ) { final ProcessingPrioritizable < T > prioritizable = new ProcessingPrioritizable < T > ( this , data ) ; final ProcessingRequest < T > request = new ProcessingRequest < T > ( prioritizable , mPriority . ordinal ( ) , this ) ; mExecutor . executeProcessingRequest ( request ) ; } else { throw new IllegalStateException ( Messages . NO_EXECUTOR ) ; }
public class NumberUtilities { /** * Given an double string , it checks if it ' s a valid double ( base on apaches NumberUtils . createDouble ) and if * it ' s between the lowerBound and upperBound ( including the lower bound and excluding the upper one ) * @ param doubleStr the integer string to check * @ param lowerBound the lower bound of the interval * @ param upperBound the upper bound of the interval * @ return true if the integer string is valid and in between the lowerBound and upperBound , false otherwise * @ throws IllegalArgumentException if the lowerBound is not less than the upperBound */ public static boolean isValidDouble ( @ Nullable final String doubleStr , double lowerBound , double upperBound ) { } }
return isValidDouble ( doubleStr , lowerBound , upperBound , true , false ) ;
public class DistributedSolidMessage { /** * This method will be started in context of executor , either Shard , Client or Backup node */ @ Override public void processMessage ( ) { } }
if ( overwrite ) storage . setArray ( key , payload ) ; else if ( ! storage . arrayExists ( key ) ) storage . setArray ( key , payload ) ;
public class Mixer { /** * < p > mix . < / p > * @ param sample1 a { @ link ameba . captcha . audio . Sample } object . * @ param volAdj1 a double . * @ param sample2 a { @ link ameba . captcha . audio . Sample } object . * @ param volAdj2 a double . * @ return a { @ link ameba . captcha . audio . Sample } object . */ public final static Sample mix ( Sample sample1 , double volAdj1 , Sample sample2 , double volAdj2 ) { } }
double [ ] s1_ary = sample1 . getInterleavedSamples ( ) ; double [ ] s2_ary = sample2 . getInterleavedSamples ( ) ; double [ ] mixed = mix ( s1_ary , volAdj1 , s2_ary , volAdj2 ) ; return buildSample ( sample1 . getSampleCount ( ) , mixed ) ;
public class HashMap { /** * Computes key . hashCode ( ) and spreads ( XORs ) higher bits of hash * to lower . Because the table uses power - of - two masking , sets of * hashes that vary only in bits above the current mask will * always collide . ( Among known examples are sets of Float keys * holding consecutive whole numbers in small tables . ) So we * apply a transform that spreads the impact of higher bits * downward . There is a tradeoff between speed , utility , and * quality of bit - spreading . Because many common sets of hashes * are already reasonably distributed ( so don ' t benefit from * spreading ) , and because we use trees to handle large sets of * collisions in bins , we just XOR some shifted bits in the * cheapest possible way to reduce systematic lossage , as well as * to incorporate impact of the highest bits that would otherwise * never be used in index calculations because of table bounds . */ static final int hash ( Object key ) { } }
int h ; return ( key == null ) ? 0 : ( h = key . hashCode ( ) ) ^ ( h >>> 16 ) ;
public class AuroraProtocol { /** * Reinitialize loopAddresses with all hosts : all servers in randomize order with cluster * address . If there is an active connection , connected host are remove from list . * @ param listener current listener * @ param loopAddresses the list to reinitialize */ private static void resetHostList ( AuroraListener listener , Deque < HostAddress > loopAddresses ) { } }
// if all servers have been connected without result // add back all servers List < HostAddress > servers = new ArrayList < > ( ) ; servers . addAll ( listener . getUrlParser ( ) . getHostAddresses ( ) ) ; Collections . shuffle ( servers ) ; // if cluster host is set , add it to the end of the list if ( listener . getClusterHostAddress ( ) != null && listener . getUrlParser ( ) . getHostAddresses ( ) . size ( ) < 2 ) { servers . add ( listener . getClusterHostAddress ( ) ) ; } // remove current connected hosts to avoid reconnect them servers . removeAll ( listener . connectedHosts ( ) ) ; loopAddresses . clear ( ) ; loopAddresses . addAll ( servers ) ;
public class MessageStoreImpl { /** * lohith liberty change , these are currently dummy as MessageStore is not used as a component by Admin */ @ Override public final void initialize ( JsRecoveryMessagingEngine recoveryME , String mode ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "initialize" , new Object [ ] { recoveryME , mode } ) ; this . _messagingEngine = recoveryME ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "initialize" , "Assigned new recoverME to messages store" ) ;
public class Span { /** * Adjusts the start and end Position of the Span , if they are * larger than the offset . */ public Span adjust ( int offset , int n ) { } }
if ( offset < 0 ) return this ; // null if ( offset < end ) { end += n ; if ( end < offset ) end = offset ; } else return this ; // null if ( offset < start ) { start += n ; if ( start < offset ) start = offset ; } return this ;
public class MapboxOfflineRouter { /** * Uses libvalhalla and local tile data to generate mapbox - directions - api - like JSON . * @ param route the { @ link OfflineRoute } to get a { @ link DirectionsRoute } from * @ param callback a callback to pass back the result */ public void findRoute ( @ NonNull OfflineRoute route , OnOfflineRouteFoundCallback callback ) { } }
offlineNavigator . retrieveRouteFor ( route , callback ) ;
public class UpdateAccountAuditConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateAccountAuditConfigurationRequest updateAccountAuditConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateAccountAuditConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateAccountAuditConfigurationRequest . getRoleArn ( ) , ROLEARN_BINDING ) ; protocolMarshaller . marshall ( updateAccountAuditConfigurationRequest . getAuditNotificationTargetConfigurations ( ) , AUDITNOTIFICATIONTARGETCONFIGURATIONS_BINDING ) ; protocolMarshaller . marshall ( updateAccountAuditConfigurationRequest . getAuditCheckConfigurations ( ) , AUDITCHECKCONFIGURATIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class IsotopeHandler { /** * { @ inheritDoc } */ @ Override public void startElement ( String uri , String local , String raw , Attributes atts ) { } }
currentChars = "" ; dictRef = "" ; logger . debug ( "startElement: " , raw ) ; logger . debug ( "uri: " , uri ) ; logger . debug ( "local: " , local ) ; logger . debug ( "raw: " , raw ) ; if ( "isotope" . equals ( local ) ) { workingIsotope = createIsotopeOfElement ( currentElement , atts ) ; } else if ( "isotopeList" . equals ( local ) ) { currentElement = getElementSymbol ( atts ) ; } else if ( "scalar" . equals ( local ) ) { for ( int i = 0 ; i < atts . getLength ( ) ; i ++ ) { if ( "dictRef" . equals ( atts . getQName ( i ) ) ) { dictRef = atts . getValue ( i ) ; } } }
public class CollectUtils { /** * 将一个集合按照固定大小查分成若干个集合 。 * @ param list a { @ link java . util . List } object . * @ param count a int . * @ param < T > a T object . * @ return a { @ link java . util . List } object . */ public static < T > List < List < T > > split ( final List < T > list , final int count ) { } }
List < List < T > > subIdLists = CollectUtils . newArrayList ( ) ; if ( list . size ( ) < count ) { subIdLists . add ( list ) ; } else { int i = 0 ; while ( i < list . size ( ) ) { int end = i + count ; if ( end > list . size ( ) ) { end = list . size ( ) ; } subIdLists . add ( list . subList ( i , end ) ) ; i += count ; } } return subIdLists ;
public class AbstractDetachableListServiceModel { /** * { @ inheritDoc } */ @ Override protected List < E > load ( ) { } }
context = BundleReference . class . cast ( serviceType . getClassLoader ( ) ) . getBundle ( ) . getBundleContext ( ) ; List < E > returnValues = new ArrayList < E > ( ) ; Collection < ServiceReference < T > > refs = Collections . EMPTY_LIST ; try { refs = context . getServiceReferences ( serviceType , filter ) ; } catch ( InvalidSyntaxException e ) { LOGGER . error ( "Could not load object from service. There was a problem with the filter syntax. " + "Returning null. Service called: " + serviceType + " from bundle " + context . getBundle ( ) . getSymbolicName ( ) , e ) ; return null ; } for ( ServiceReference < T > ref : refs ) { try { T service = context . getService ( ref ) ; returnValues . add ( doLoad ( service ) ) ; } catch ( Exception e ) { LOGGER . error ( "Could not load object from service. Trying next service. Service called: " + serviceType + " from bundle " + context . getBundle ( ) . getSymbolicName ( ) , e ) ; } finally { context . ungetService ( ref ) ; } } return returnValues ;
public class AntClassLoader { /** * Get the certificates for a given jar entry , if it is indeed a jar . * @ param container the File from which to read the entry * @ param entry the entry of which the certificates are requested * @ return the entry ' s certificates or null is the container is * not a jar or it has no certificates . * @ exception IOException if the manifest cannot be read . */ private Certificate [ ] getCertificates ( File container , String entry ) throws IOException { } }
if ( container . isDirectory ( ) ) { return null ; } JarFile jarFile = ( JarFile ) jarFiles . get ( container ) ; if ( jarFile == null ) { return null ; } JarEntry ent = jarFile . getJarEntry ( entry ) ; return ent == null ? null : ent . getCertificates ( ) ;
public class web { /** * Queries the given URL with a list of params via POST * @ param url the url to query * @ param params list of pair - values * @ return the result JSON */ public static JSONObject getJSONFromUrlViaPOST ( String url , List < NameValuePair > params ) { } }
InputStream is = null ; JSONObject jObj = null ; String json = "" ; // Making HTTP request try { // defaultHttpClient DefaultHttpClient httpClient = new DefaultHttpClient ( ) ; HttpPost httpPost = new HttpPost ( url ) ; httpPost . setEntity ( new UrlEncodedFormEntity ( params ) ) ; HttpResponse httpResponse = httpClient . execute ( httpPost ) ; HttpEntity httpEntity = httpResponse . getEntity ( ) ; is = httpEntity . getContent ( ) ; } catch ( UnsupportedEncodingException e ) { e . printStackTrace ( ) ; } catch ( ClientProtocolException e ) { e . printStackTrace ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } try { BufferedReader reader = new BufferedReader ( new InputStreamReader ( is , "iso-8859-1" ) , 8 ) ; StringBuilder sb = new StringBuilder ( ) ; String line = null ; while ( ( line = reader . readLine ( ) ) != null ) { sb . append ( line + "\n" ) ; } is . close ( ) ; json = sb . toString ( ) ; } catch ( Exception e ) { Log . e ( "Buffer Error" , "Error converting result " + e . toString ( ) ) ; } // try parse the string to a JSON object try { jObj = new JSONObject ( json ) ; } catch ( JSONException e ) { Log . e ( "JSON Parser" , "Error parsing data " + e . toString ( ) ) ; } // return JSON String return jObj ;
public class CmsMessageBundleEditor { /** * Unlock all edited resources . */ private void cleanUpAction ( ) { } }
try { m_model . deleteDescriptorIfNecessary ( ) ; } catch ( CmsException e ) { LOG . error ( m_messages . key ( Messages . ERR_DELETING_DESCRIPTOR_0 ) , e ) ; } // unlock resource m_model . unlock ( ) ;
public class CmsJspTagContentLoad { /** * Initializes this content load tag . < p > * @ param container the parent container ( could be a preloader ) * @ throws JspException in case something goes wrong */ protected void init ( I_CmsXmlContentContainer container ) throws JspException { } }
// check if the tag contains a pageSize , pageIndex and pageNavLength attribute , or none of them int pageAttribCount = 0 ; pageAttribCount += CmsStringUtil . isNotEmpty ( m_pageSize ) ? 1 : 0 ; pageAttribCount += CmsStringUtil . isNotEmpty ( m_pageIndex ) ? 1 : 0 ; if ( ( pageAttribCount > 0 ) && ( pageAttribCount < 2 ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_TAG_CONTENTLOAD_INDEX_SIZE_0 ) ) ; } I_CmsXmlContentContainer usedContainer ; if ( container == null ) { // no preloading ancestor has been found usedContainer = this ; if ( CmsStringUtil . isEmpty ( m_collector ) ) { // check if the tag contains a collector attribute throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_TAG_CONTENTLOAD_MISSING_COLLECTOR_0 ) ) ; } if ( CmsStringUtil . isEmpty ( m_param ) ) { // check if the tag contains a param attribute throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_TAG_CONTENTLOAD_MISSING_PARAM_0 ) ) ; } } else { // use provided container ( preloading ancestor ) usedContainer = container ; } if ( isPreloader ( ) ) { // always disable direct edit for preload m_directEditMode = CmsDirectEditMode . FALSE ; } else if ( m_directEditMode == null ) { // direct edit mode must not be null m_directEditMode = CmsDirectEditMode . FALSE ; } // initialize OpenCms access objects m_controller = CmsFlexController . getController ( pageContext . getRequest ( ) ) ; m_cms = m_controller . getCmsObject ( ) ; // get the resource name from the selected container String resourcename = getResourceName ( m_cms , usedContainer ) ; // initialize a string mapper to resolve EL like strings in tag attributes CmsMacroResolver resolver = CmsMacroResolver . newInstance ( ) . setCmsObject ( m_cms ) . setJspPageContext ( pageContext ) . setResourceName ( resourcename ) . setKeepEmptyMacros ( true ) ; // resolve the collector name if ( container == null ) { // no preload parent container , initialize new values m_collectorName = resolver . resolveMacros ( getCollector ( ) ) ; // resolve the parameter m_collectorParam = resolver . resolveMacros ( getParam ( ) ) ; m_collectorResult = null ; } else { // preload parent content container available , use values from this container m_collectorName = usedContainer . getCollectorName ( ) ; m_collectorParam = usedContainer . getCollectorParam ( ) ; m_collectorResult = usedContainer . getCollectorResult ( ) ; if ( m_locale == null ) { // use locale from ancestor if available m_locale = usedContainer . getXmlDocumentLocale ( ) ; } } if ( m_locale == null ) { // no locale set , use locale from users request context m_locale = m_cms . getRequestContext ( ) . getLocale ( ) ; } try { // now collect the resources I_CmsResourceCollector collector = OpenCms . getResourceManager ( ) . getContentCollector ( m_collectorName ) ; if ( collector == null ) { throw new CmsException ( Messages . get ( ) . container ( Messages . ERR_COLLECTOR_NOT_FOUND_1 , m_collectorName ) ) ; } // execute the collector if not already done in parent tag if ( m_collectorResult == null ) { m_collectorResult = collector . getResults ( m_cms , m_collectorName , m_collectorParam ) ; } m_contentInfoBean = new CmsContentInfoBean ( ) ; m_contentInfoBean . setPageSizeAsString ( resolver . resolveMacros ( m_pageSize ) ) ; m_contentInfoBean . setPageIndexAsString ( resolver . resolveMacros ( m_pageIndex ) ) ; m_contentInfoBean . setPageNavLengthAsString ( resolver . resolveMacros ( m_pageNavLength ) ) ; m_contentInfoBean . setResultSize ( m_collectorResult . size ( ) ) ; m_contentInfoBean . setLocale ( m_locale . toString ( ) ) ; m_contentInfoBean . initResultIndex ( ) ; if ( ! isPreloader ( ) ) { // not required when only preloading m_collectorResult = CmsJspTagResourceLoad . limitCollectorResult ( m_contentInfoBean , m_collectorResult ) ; m_contentInfoBean . initPageNavIndexes ( ) ; String createParam = collector . getCreateParam ( m_cms , m_collectorName , m_collectorParam ) ; if ( ( createParam != null ) && CmsDefaultFileNameGenerator . hasNumberMacro ( createParam ) ) { // use " create link " only if collector supports it and it contains the number macro for new file names m_directEditLinkForNew = CmsEncoder . encode ( m_collectorName + "|" + createParam ) ; } } else if ( isScopeVarSet ( ) ) { // scope variable is set , store content load bean in JSP context CmsJspContentLoadBean bean = new CmsJspContentLoadBean ( m_cms , m_locale , m_collectorResult ) ; storeAttribute ( bean ) ; } } catch ( CmsException e ) { m_controller . setThrowable ( e , m_cms . getRequestContext ( ) . getUri ( ) ) ; throw new JspException ( e ) ; } // reset the direct edit options ( required because of re - used tags ) m_directEditOpen = false ; m_directEditFollowButtons = null ; // the next loop is the first loop m_isFirstLoop = true ;
public class VictimsSQL { /** * Give a query and list of objects to set , a prepared statement is created , * cached and returned with the objects set in the order they are provided . * @ param query * @ param objects * @ return * @ throws SQLException */ protected PreparedStatement setObjects ( Connection connection , String query , Object ... objects ) throws SQLException { } }
PreparedStatement ps = statement ( connection , query ) ; setObjects ( ps , objects ) ; return ps ;
public class TcpChannelHub { /** * the transaction id are generated as unique timestamps * @ param timeMs in milliseconds * @ return a unique transactionId */ public long nextUniqueTransaction ( long timeMs ) { } }
long id = timeMs ; for ( ; ; ) { long old = transactionID . get ( ) ; if ( old >= id ) id = old + 1 ; if ( transactionID . compareAndSet ( old , id ) ) break ; } return id ;
public class MCAAuthorizationManager { /** * Unregisters authentication listener * @ param realm the realm the listener was registered for */ public void unregisterAuthenticationListener ( String realm ) { } }
if ( realm != null && ! realm . isEmpty ( ) ) { challengeHandlers . remove ( realm ) ; }
public class Log { /** * Androlog Init Method . This method loads the Androlog Configuration from : * < ol > * < li > < code > / SDCARD / fileName < / code > if the file name if not * < code > null < / code > < / li > * < li > < code > / SDCARD / Application _ Package . properties < / code > if the file name * is < code > null < / code > and context is not < code > null < / code > < / li > * < li > < code > Application _ Assets / fileName < / code > if the file name if not * < code > null < / code > and the context is not < code > null < / code > < / li > * < li > < code > Application _ Assets / Application _ Package . properties < / code > if the * file name is < code > null < / code > and the context is not < code > null < / code > < / li > * < / ol > * The first found file is used , allowing overriding the configuration by * just pushing a file on the SDCard . Passing < code > null < / code > to both * parameters is equivalent to the case 2 . If the lookup failed , the logging * is disabled . * @ param context * the application context * @ param fileName * the file name */ public static void init ( Context context , String fileName ) { } }
reset ( ) ; Log . context = context ; String file = fileName != null && ! fileName . endsWith ( ".properties" ) ? fileName + ".properties" : fileName ; if ( file == null && context != null ) { file = context . getPackageName ( ) + ".properties" ; } // Check from SDCard InputStream fileIs = LogHelper . getConfigurationFileFromSDCard ( file ) ; if ( fileIs == null ) { // Check from Assets fileIs = LogHelper . getConfigurationFileFromAssets ( context , file ) ; } if ( fileIs != null ) { Properties configuration = new Properties ( ) ; try { // There is no load ( Reader ) method on Android , // so we have to use InputStream configuration . load ( fileIs ) ; // Then call configure . configure ( configuration ) ; } catch ( IOException e ) { return ; } finally { LogHelper . closeQuietly ( fileIs ) ; } }
public class WRadioButtonSelectExample { /** * Examples of readonly states . When in a read only state only the selected option is output . Since a * WRadioButtonSeelct can only have 0 or 1 selected option the LAYOUT and FRAME are ignored . */ private void addReadOnlyExamples ( ) { } }
add ( new WHeading ( HeadingLevel . H3 , "Read-only WRadioButtonSelect examples" ) ) ; add ( new ExplanatoryText ( "These examples all use the same list of options: the states and territories list from the editable examples above. " + "When the readOnly state is specified only that option which is selected is output.\n" + "Since no more than one option is able to be selected the layout and frame settings are ignored in the read only state." ) ) ; WFieldLayout layout = new WFieldLayout ( ) ; add ( layout ) ; WRadioButtonSelect select = new WRadioButtonSelect ( "australian_state" ) ; select . setReadOnly ( true ) ; layout . addField ( "Read only with no selection" , select ) ; select = new SelectWithSelection ( "australian_state" ) ; select . setReadOnly ( true ) ; layout . addField ( "Read only with selection" , select ) ;
public class UtilFolder { /** * Construct a usable path using a list of string , automatically separated by the portable separator . * @ param separator The separator to use ( must not be < code > null < / code > ) . * @ param path The list of directories , if has , and file ( must not be < code > null < / code > ) . * @ return The full media path . * @ throws LionEngineException If invalid parameters . */ public static String getPathSeparator ( String separator , String ... path ) { } }
Check . notNull ( separator ) ; Check . notNull ( path ) ; final StringBuilder fullPath = new StringBuilder ( path . length ) ; for ( int i = 0 ; i < path . length ; i ++ ) { if ( i == path . length - 1 ) { fullPath . append ( path [ i ] ) ; } else if ( path [ i ] != null && path [ i ] . length ( ) > 0 ) { fullPath . append ( path [ i ] ) ; if ( ! fullPath . substring ( fullPath . length ( ) - 1 , fullPath . length ( ) ) . equals ( separator ) ) { fullPath . append ( separator ) ; } } } return fullPath . toString ( ) ;
public class RDBMSEntityReader { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . persistence . EntityReader # findById ( java . lang . Object , * com . impetus . kundera . metadata . model . EntityMetadata , java . util . List , * com . impetus . kundera . client . Client ) */ @ Override public EnhanceEntity findById ( Object primaryKey , EntityMetadata m , Client client ) { } }
List < String > relationNames = m . getRelationNames ( ) ; if ( relationNames != null && ! relationNames . isEmpty ( ) ) { Set < String > keys = new HashSet < String > ( 1 ) ; keys . add ( primaryKey . toString ( ) ) ; String query = getSqlQueryFromJPA ( m , relationNames , keys ) ; List < EnhanceEntity > results = populateEnhanceEntities ( m , relationNames , client , query ) ; return results != null && ! results . isEmpty ( ) ? results . get ( 0 ) : null ; } else { Object o ; try { o = client . find ( m . getEntityClazz ( ) , primaryKey ) ; } catch ( Exception e ) { throw new PersistenceException ( e ) ; } return o != null ? new EnhanceEntity ( o , getId ( o , m ) , null ) : null ; } // return super . findById ( primaryKey , m , client ) ;
public class PyValidator { /** * Check that import mapping are known . * @ param importDeclaration the declaration . */ @ Check public void checkImportsMapping ( XImportDeclaration importDeclaration ) { } }
final JvmDeclaredType type = importDeclaration . getImportedType ( ) ; doTypeMappingCheck ( importDeclaration , type , this . typeErrorHandler1 ) ;
public class M { /** * Returns an < code > IfNotExists < / code > object which represents an < a href = * " http : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . Modifying . html " * > if _ not _ exists ( path , operand ) < / a > function call where path refers to that * of the current path operand ; used for building expressions . * < pre > * " if _ not _ exists ( path , operand ) – If the item does not contain an attribute * at the specified path , then if _ not _ exists evaluates to operand ; otherwise , * it evaluates to path . You can use this function to avoid overwriting an * attribute already present in the item . " * < / pre > * @ param defaultValue * the default value that will be used as the operand to the * if _ not _ exists function call . */ public IfNotExistsFunction < M > ifNotExists ( Map < String , ? > defaultValue ) { } }
return new IfNotExistsFunction < M > ( this , new LiteralOperand ( defaultValue ) ) ;
public class PublisherFlexible { /** * Gets the publisher of the specified type , if it is wrapped by the " Flexible Publish " publisher in a project . * Null is returned if no such publisher is found . * @ param project The project * @ param type The type of the publisher */ public T find ( AbstractProject < ? , ? > project , Class < T > type ) { } }
// First check that the Flexible Publish plugin is installed : if ( Jenkins . getInstance ( ) . getPlugin ( FLEXIBLE_PUBLISH_PLUGIN ) != null ) { // Iterate all the project ' s publishers and find the flexible publisher : for ( Publisher publisher : project . getPublishersList ( ) ) { // Found the flexible publisher : if ( publisher instanceof FlexiblePublisher ) { // See if it wraps a publisher of the specified type and if it does , return it : T pub = getWrappedPublisher ( publisher , type ) ; if ( pub != null ) { return pub ; } } } } return null ;
public class Circle { /** * Set the CircleStrokeColor property * The stroke color of the circle . * To update the circle on the map use { @ link CircleManager # update ( Annotation ) } . * @ param color value for String */ public void setCircleStrokeColor ( @ ColorInt int color ) { } }
jsonObject . addProperty ( CircleOptions . PROPERTY_CIRCLE_STROKE_COLOR , ColorUtils . colorToRgbaString ( color ) ) ;
public class Requests { /** * Create a new { @ link PublishNotify } instance that is used to publish * metadata to an { @ link Identifier } . * @ param i1 the { @ link Identifier } to which the given metadata is published to * @ param md the metadata that shall be published * @ return the new { @ link PublishNotify } instance */ public static PublishNotify createPublishNotify ( Identifier i1 , Document md ) { } }
return createPublishNotify ( i1 , null , md ) ;
public class DataWriterBuilder { /** * Tell the writer how many branches are being used . * @ param branches is the number of branches * @ return this { @ link DataWriterBuilder } instance */ public DataWriterBuilder < S , D > withBranches ( int branches ) { } }
this . branches = branches ; log . debug ( "With branches: {}" , this . branches ) ; return this ;
public class Source { /** * Sets the source buffer . Equivalent to unqueueing all buffers , then queuing the provided * buffer . Cannot be called when the source is playing or paused . * @ param buffer the buffer to set , or < code > null < / code > to clear . */ public void setBuffer ( Buffer buffer ) { } }
_queue . clear ( ) ; if ( buffer != null ) { _queue . add ( buffer ) ; } AL10 . alSourcei ( _id , AL10 . AL_BUFFER , buffer == null ? AL10 . AL_NONE : buffer . getId ( ) ) ;
public class Inflector { /** * Turns a non - negative number into an ordinal string used to denote the position in an ordered sequence , such as 1st , 2nd , * 3rd , 4th . * @ param number the non - negative number * @ return the string with the number and ordinal suffix */ public String ordinalize ( int number ) { } }
String numberStr = Integer . toString ( number ) ; if ( 11 <= number && number <= 13 ) return numberStr + "th" ; int remainder = number % 10 ; if ( remainder == 1 ) return numberStr + "st" ; if ( remainder == 2 ) return numberStr + "nd" ; if ( remainder == 3 ) return numberStr + "rd" ; return numberStr + "th" ;
public class GeoCodeMongo { /** * { @ inheritDoc } */ @ Override public GeoDocument addDocument ( final GeoDocument document ) { } }
if ( document == null || document . getName ( ) == null ) { return document ; } geoDocumentRepository . save ( ( GeoDocumentMongo ) document ) ; return document ;
public class TransactionHandler { /** * Used by stack for relaying received MGCP response messages to the application . * @ param message * receive MGCP response message . */ public void receiveResponse ( byte [ ] data , SplitDetails [ ] msg , Integer txID , ReturnCode returnCode ) { } }
cancelReTransmissionTimer ( ) ; cancelLongtranTimer ( ) ; JainMgcpResponseEvent event = null ; try { event = decodeResponse ( data , msg , txID , returnCode ) ; } catch ( Exception e ) { logger . error ( "Could not decode message: " , e ) ; } event . setTransactionHandle ( remoteTID ) ; if ( this . isProvisional ( event . getReturnCode ( ) ) ) resetLongtranTimer ( ) ; stack . provider . processMgcpResponseEvent ( event , commandEvent ) ; if ( ! this . isProvisional ( event . getReturnCode ( ) ) ) this . release ( ) ;
public class AnalyticFormulas { /** * Re - implementation of the Excel PRICE function ( a rather primitive bond price formula ) . * The re - implementation is not exact , because this function does not consider daycount conventions . * We assume we have ( int ) timeToMaturity / frequency future periods and the running period has * an accrual period of timeToMaturity - frequency * ( ( int ) timeToMaturity / frequency ) . * @ param timeToMaturity The time to maturity . * @ param coupon Coupon payment . * @ param yield Yield ( discount factor , using frequency : 1 / ( 1 + yield / frequency ) . * @ param redemption Redemption ( notional repayment ) . * @ param frequency Frequency ( 1,2,4 ) . * @ return price Clean price . */ public static double price ( double timeToMaturity , double coupon , double yield , double redemption , int frequency ) { } }
double price = 0.0 ; if ( timeToMaturity > 0 ) { price += redemption ; } double paymentTime = timeToMaturity ; while ( paymentTime > 0 ) { price += coupon ; // Discount back price = price / ( 1.0 + yield / frequency ) ; paymentTime -= 1.0 / frequency ; } // Accrue running period double accrualPeriod = 0.0 - paymentTime ; price *= Math . pow ( 1.0 + yield / frequency , accrualPeriod ) ; price -= coupon * accrualPeriod ; return price ;
public class SeriesCalculator { /** * Returns the factor of the term with specified index . * @ param index the index ( starting with 0) * @ return the factor of the specified term */ protected BigRational getFactor ( int index ) { } }
while ( factors . size ( ) <= index ) { BigRational factor = getCurrentFactor ( ) ; factors . add ( factor ) ; calculateNextFactor ( ) ; } return factors . get ( index ) ;
public class ResourcesInner { /** * Creates a resource . * @ param resourceGroupName The name of the resource group for the resource . The name is case insensitive . * @ param resourceProviderNamespace The namespace of the resource provider . * @ param parentResourcePath The parent resource identity . * @ param resourceType The resource type of the resource to create . * @ param resourceName The name of the resource to create . * @ param apiVersion The API version to use for the operation . * @ param parameters Parameters for creating or updating the resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < GenericResourceInner > > createOrUpdateWithServiceResponseAsync ( String resourceGroupName , String resourceProviderNamespace , String parentResourcePath , String resourceType , String resourceName , String apiVersion , GenericResourceInner parameters ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( resourceProviderNamespace == null ) { throw new IllegalArgumentException ( "Parameter resourceProviderNamespace is required and cannot be null." ) ; } if ( parentResourcePath == null ) { throw new IllegalArgumentException ( "Parameter parentResourcePath is required and cannot be null." ) ; } if ( resourceType == null ) { throw new IllegalArgumentException ( "Parameter resourceType is required and cannot be null." ) ; } if ( resourceName == null ) { throw new IllegalArgumentException ( "Parameter resourceName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( apiVersion == null ) { throw new IllegalArgumentException ( "Parameter apiVersion is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; Observable < Response < ResponseBody > > observable = service . createOrUpdate ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , this . client . subscriptionId ( ) , apiVersion , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPutOrPatchResultAsync ( observable , new TypeToken < GenericResourceInner > ( ) { } . getType ( ) ) ;
public class ResourceadapterImpl { /** * Returns all < code > security - permission < / code > elements * @ return list of < code > security - permission < / code > */ public List < SecurityPermission < Resourceadapter < T > > > getAllSecurityPermission ( ) { } }
List < SecurityPermission < Resourceadapter < T > > > list = new ArrayList < SecurityPermission < Resourceadapter < T > > > ( ) ; List < Node > nodeList = childNode . get ( "security-permission" ) ; for ( Node node : nodeList ) { SecurityPermission < Resourceadapter < T > > type = new SecurityPermissionImpl < Resourceadapter < T > > ( this , "security-permission" , childNode , node ) ; list . add ( type ) ; } return list ;
public class JobHistoryFileParserBase { /** * fetches the submit time from a raw job history byte representation * @ param jobHistoryRaw from which to pull the SUBMIT _ TIME * @ return the job submit time in milliseconds since January 1 , 1970 UTC ; * or 0 if no value can be found . */ public static long getSubmitTimeMillisFromJobHistory ( byte [ ] jobHistoryRaw ) { } }
long submitTimeMillis = 0 ; if ( null == jobHistoryRaw ) { return submitTimeMillis ; } HadoopVersion hv = JobHistoryFileParserFactory . getVersion ( jobHistoryRaw ) ; switch ( hv ) { case TWO : // look for the job submitted event , since that has the job submit time int startIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . JOB_SUBMIT_EVENT_BYTES , 0 ) ; if ( startIndex != - 1 ) { // now look for the submit time in this event int secondQuoteIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . SUBMIT_TIME_PREFIX_HADOOP2_BYTES , startIndex ) ; if ( secondQuoteIndex != - 1 ) { // read the string that contains the unix timestamp String submitTimeMillisString = Bytes . toString ( jobHistoryRaw , secondQuoteIndex + Constants . EPOCH_TIMESTAMP_STRING_LENGTH , Constants . EPOCH_TIMESTAMP_STRING_LENGTH ) ; try { submitTimeMillis = Long . parseLong ( submitTimeMillisString ) ; } catch ( NumberFormatException nfe ) { LOG . error ( " caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe . getMessage ( ) ) ; submitTimeMillis = 0 ; } } } break ; case ONE : default : // The start of the history file looks like this : // Meta VERSION = " 1 " . // Job JOBID = " job _ 20120101120000_12345 " JOBNAME = " . . . " // USER = " username " SUBMIT _ TIME = " 1339063492288 " JOBCONF = " // First we look for the first occurrence of SUBMIT _ TIME = " // Then we find the place of the next close quote " // Then our value is in between those two if valid at all . startIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . SUBMIT_TIME_PREFIX_BYTES , 0 ) ; if ( startIndex != - 1 ) { int prefixEndIndex = startIndex + Constants . SUBMIT_TIME_PREFIX_BYTES . length ; // Find close quote in the snippet , start looking where the prefix ends . int secondQuoteIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . QUOTE_BYTES , prefixEndIndex ) ; if ( secondQuoteIndex != - 1 ) { int numberLength = secondQuoteIndex - prefixEndIndex ; String submitTimeMillisString = Bytes . toString ( jobHistoryRaw , prefixEndIndex , numberLength ) ; try { submitTimeMillis = Long . parseLong ( submitTimeMillisString ) ; } catch ( NumberFormatException nfe ) { LOG . error ( " caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe . getMessage ( ) ) ; submitTimeMillis = 0 ; } } } break ; } return submitTimeMillis ;