signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class FhirTerser { /** * Returns < code > true < / code > if < code > theSource < / code > is in the compartment named < code > theCompartmentName < / code >
* belonging to resource < code > theTarget < / code >
* @ param theCompartmentName The name of the compartment
* @ param theSource The potential member of the compartment
* @ param theTarget The owner of the compartment . Note that both the resource type and ID must be filled in on this IIdType or the method will throw an { @ link IllegalArgumentException }
* @ return < code > true < / code > if < code > theSource < / code > is in the compartment
* @ throws IllegalArgumentException If theTarget does not contain both a resource type and ID */
public boolean isSourceInCompartmentForTarget ( String theCompartmentName , IBaseResource theSource , IIdType theTarget ) { } } | Validate . notBlank ( theCompartmentName , "theCompartmentName must not be null or blank" ) ; Validate . notNull ( theSource , "theSource must not be null" ) ; Validate . notNull ( theTarget , "theTarget must not be null" ) ; Validate . notBlank ( defaultString ( theTarget . getResourceType ( ) ) , "theTarget must have a populated resource type (theTarget.getResourceType() does not return a value)" ) ; Validate . notBlank ( defaultString ( theTarget . getIdPart ( ) ) , "theTarget must have a populated ID (theTarget.getIdPart() does not return a value)" ) ; String wantRef = theTarget . toUnqualifiedVersionless ( ) . getValue ( ) ; RuntimeResourceDefinition sourceDef = myContext . getResourceDefinition ( theSource ) ; if ( theSource . getIdElement ( ) . hasIdPart ( ) ) { if ( wantRef . equals ( sourceDef . getName ( ) + '/' + theSource . getIdElement ( ) . getIdPart ( ) ) ) { return true ; } } List < RuntimeSearchParam > params = sourceDef . getSearchParamsForCompartmentName ( theCompartmentName ) ; for ( RuntimeSearchParam nextParam : params ) { for ( String nextPath : nextParam . getPathsSplit ( ) ) { /* * DSTU3 and before just defined compartments as being ( e . g . ) named
* Patient with a path like CarePlan . subject
* R4 uses a fancier format like CarePlan . subject . where ( resolve ( ) is Patient )
* The following Regex is a hack to make that efficient at runtime . */
String wantType = null ; Pattern pattern = COMPARTMENT_MATCHER_PATH ; Matcher matcher = pattern . matcher ( nextPath ) ; if ( matcher . matches ( ) ) { nextPath = matcher . group ( 1 ) ; wantType = matcher . group ( 2 ) ; } for ( IBaseReference nextValue : getValues ( theSource , nextPath , IBaseReference . class ) ) { IIdType nextTargetId = nextValue . getReferenceElement ( ) ; String nextRef = nextTargetId . toUnqualifiedVersionless ( ) . getValue ( ) ; /* * If the reference isn ' t an explicit resource ID , but instead is just
* a resource object , we ' ll calculate its ID and treat the target
* as that . */
if ( isBlank ( nextRef ) && nextValue . getResource ( ) != null ) { IBaseResource nextTarget = nextValue . getResource ( ) ; nextTargetId = nextTarget . getIdElement ( ) . toUnqualifiedVersionless ( ) ; if ( ! nextTargetId . hasResourceType ( ) ) { String resourceType = myContext . getResourceDefinition ( nextTarget ) . getName ( ) ; nextTargetId . setParts ( null , resourceType , nextTargetId . getIdPart ( ) , null ) ; } nextRef = nextTargetId . getValue ( ) ; } if ( isNotBlank ( wantType ) ) { if ( ! nextTargetId . getResourceType ( ) . equals ( wantType ) ) { continue ; } } if ( wantRef . equals ( nextRef ) ) { return true ; } } } } return false ; |
public class SliderLayout { /** * start auto cycle .
* @ param delay delay time
* @ param duration animation duration time .
* @ param autoRecover if recover after user touches the slider . */
public void startAutoCycle ( long delay , long duration , boolean autoRecover ) { } } | if ( mCycleTimer != null ) mCycleTimer . cancel ( ) ; if ( mCycleTask != null ) mCycleTask . cancel ( ) ; if ( mResumingTask != null ) mResumingTask . cancel ( ) ; if ( mResumingTimer != null ) mResumingTimer . cancel ( ) ; mSliderDuration = duration ; mCycleTimer = new Timer ( ) ; mAutoRecover = autoRecover ; mCycleTask = new TimerTask ( ) { @ Override public void run ( ) { mh . sendEmptyMessage ( 0 ) ; } } ; mCycleTimer . schedule ( mCycleTask , delay , mSliderDuration ) ; mCycling = true ; mAutoCycle = true ; |
public class JDBCOutputFormat { /** * Configures this JDBCOutputFormat .
* @ param parameters
* Configuration containing all parameters . */
@ Override public void configure ( Configuration parameters ) { } } | this . driverName = parameters . getString ( DRIVER_KEY , null ) ; this . username = parameters . getString ( USERNAME_KEY , null ) ; this . password = parameters . getString ( PASSWORD_KEY , null ) ; this . dbURL = parameters . getString ( URL_KEY , null ) ; this . query = parameters . getString ( QUERY_KEY , null ) ; this . fieldCount = parameters . getInteger ( FIELD_COUNT_KEY , 0 ) ; this . batchInterval = parameters . getInteger ( BATCH_INTERVAL , DEFAULT_BATCH_INTERVERAL ) ; @ SuppressWarnings ( "unchecked" ) Class < Value > [ ] classes = new Class [ this . fieldCount ] ; this . fieldClasses = classes ; for ( int i = 0 ; i < this . fieldCount ; i ++ ) { @ SuppressWarnings ( "unchecked" ) Class < ? extends Value > clazz = ( Class < ? extends Value > ) parameters . getClass ( FIELD_TYPE_KEY + i , null ) ; if ( clazz == null ) { throw new IllegalArgumentException ( "Invalid configuration for JDBCOutputFormat: " + "No type class for parameter " + i ) ; } this . fieldClasses [ i ] = clazz ; } |
public class UnionSet { /** * Triggers combining . */
@ Override public Object [ ] toArray ( ) { } } | combine ( ) ; if ( combined == null ) { Set < E > emptySet = java . util . Collections . emptySet ( ) ; return emptySet . toArray ( ) ; } return combined . toArray ( ) ; |
public class AuthReplicatedSessionDataFactory { /** * ( non - Javadoc )
* @ see org . jdiameter . common . api . app . IAppSessionDataFactory # getAppSessionData ( java . lang . Class , java . lang . String ) */
@ Override public IAuthSessionData getAppSessionData ( Class < ? extends AppSession > clazz , String sessionId ) { } } | if ( clazz . equals ( ClientAuthSession . class ) ) { ClientAuthSessionDataReplicatedImpl data = new ClientAuthSessionDataReplicatedImpl ( sessionId , this . mobicentsCluster ) ; return data ; } else if ( clazz . equals ( ServerAuthSession . class ) ) { ServerAuthSessionDataReplicatedImpl data = new ServerAuthSessionDataReplicatedImpl ( sessionId , this . mobicentsCluster ) ; return data ; } throw new IllegalArgumentException ( ) ; |
public class ThriftCodecByteCodeGenerator { /** * Define the write method . */
private void defineWriteUnionMethod ( ) { } } | MethodDefinition write = new MethodDefinition ( a ( PUBLIC ) , "write" , null , arg ( "struct" , structType ) , arg ( "protocol" , TProtocol . class ) ) ; classDefinition . addMethod ( write ) ; // TProtocolWriter writer = new TProtocolWriter ( protocol ) ;
write . addLocalVariable ( type ( TProtocolWriter . class ) , "writer" ) ; write . newObject ( TProtocolWriter . class ) ; write . dup ( ) ; write . loadVariable ( "protocol" ) ; write . invokeConstructor ( type ( TProtocolWriter . class ) , type ( TProtocol . class ) ) ; write . storeVariable ( "writer" ) ; LocalVariableDefinition protocol = write . getLocalVariable ( "writer" ) ; // protocol . writeStructBegin ( " bonk " ) ;
write . loadVariable ( protocol ) . loadConstant ( metadata . getStructName ( ) ) . invokeVirtual ( TProtocolWriter . class , "writeStructBegin" , void . class , String . class ) ; // find the @ ThriftUnionId field
ThriftFieldMetadata idField = getOnlyElement ( metadata . getFields ( THRIFT_UNION_ID ) ) ; // load its value
loadFieldValue ( write , idField ) ; // switch ( fieldId )
List < CaseStatement > cases = new ArrayList < > ( ) ; for ( ThriftFieldMetadata field : metadata . getFields ( THRIFT_FIELD ) ) { cases . add ( caseStatement ( field . getId ( ) , field . getName ( ) + "-write-field" ) ) ; } write . switchStatement ( "default-write" , cases ) ; // write fields
for ( ThriftFieldMetadata field : metadata . getFields ( THRIFT_FIELD ) ) { write . visitLabel ( field . getName ( ) + "-write-field" ) ; writeField ( write , protocol , field ) ; write . gotoLabel ( "default-write" ) ; } write . visitLabel ( "default-write" ) . loadVariable ( protocol ) . invokeVirtual ( TProtocolWriter . class , "writeStructEnd" , void . class ) ; write . ret ( ) ; |
public class MPCoreUtils { /** * Validates if an url is a valid url address
* @ param url url address to validate
* @ return */
public static boolean validateUrl ( String url ) { } } | String [ ] schemes = { "https" } ; UrlValidator urlValidator = new UrlValidator ( schemes ) ; return urlValidator . isValid ( url ) ; |
public class AnnotatedGenericRowMapper { /** * { @ inheritDoc } */
@ Override public Map < String , ColAttrMapping > getColumnAttributeMappings ( ) { } } | if ( cachedColumnAttributeMappings == null ) { cachedColumnAttributeMappings = new HashMap < > ( ) ; ColumnAttribute [ ] annoMappings = getClass ( ) . getAnnotationsByType ( ColumnAttribute . class ) ; for ( ColumnAttribute colAttr : annoMappings ) { cachedColumnAttributeMappings . put ( colAttr . column ( ) , new ColAttrMapping ( colAttr . column ( ) , colAttr . attr ( ) , colAttr . attrClass ( ) ) ) ; } } return cachedColumnAttributeMappings ; |
public class ClassUtility { /** * Extract the last annotation requested found into the class hierarchy . < br / >
* Interfaces are not yet supported .
* @ param sourceClass the class ( wit its parent classes ) to inspect
* @ param annotationClass the annotation to find
* @ param < A > the type of the requested annotation
* @ return the request annotation or null if none have been found into the class hierarchy */
public static < A extends Annotation > A getLastClassAnnotation ( final Class < ? > sourceClass , final Class < A > annotationClass ) { } } | A annotation = null ; Class < ? > currentClass = sourceClass ; while ( annotation == null && currentClass != null ) { annotation = currentClass . getAnnotation ( annotationClass ) ; currentClass = currentClass . getSuperclass ( ) ; } return annotation ; |
public class Enforcer { /** * deletePermissionForUser deletes a permission for a user or role .
* Returns false if the user or role does not have the permission ( aka not affected ) .
* @ param user the user .
* @ param permission the permission , usually be ( obj , act ) . It is actually the rule without the subject .
* @ return succeeds or not . */
public boolean deletePermissionForUser ( String user , String ... permission ) { } } | List < String > params = new ArrayList < > ( ) ; params . add ( user ) ; Collections . addAll ( params , permission ) ; return removePolicy ( params ) ; |
public class LdapConnectionWrapper { /** * Retrieves a list of all groups the user is a member of .
* @ param dirContext a DirContext
* @ param ldapUser the LdapUser to retrieve group membership for
* @ return A list of Strings representing the fully qualified DN of each group
* @ throws NamingException if an exception is thrown
* @ since 1.4.0 */
public List < String > getGroups ( final DirContext dirContext , final LdapUser ldapUser ) throws NamingException { } } | LOGGER . debug ( "Retrieving groups for: " + ldapUser . getDN ( ) ) ; final List < String > groupDns = new ArrayList < > ( ) ; final String searchFilter = variableSubstitution ( USER_GROUPS_FILTER , ldapUser ) ; final SearchControls sc = new SearchControls ( ) ; sc . setSearchScope ( SearchControls . SUBTREE_SCOPE ) ; final NamingEnumeration < SearchResult > ne = dirContext . search ( BASE_DN , searchFilter , sc ) ; while ( hasMoreEnum ( ne ) ) { final SearchResult result = ne . next ( ) ; groupDns . add ( result . getNameInNamespace ( ) ) ; LOGGER . debug ( "Found group: " + result . getNameInNamespace ( ) + " for user: " + ldapUser . getDN ( ) ) ; } closeQuietly ( ne ) ; return groupDns ; |
public class AWSLambdaAsyncClientFactory { /** * The client returned from a builder .
* @ return client object */
@ Refreshable @ Requires ( beans = AWSLambdaConfiguration . class ) AWSLambdaAsync awsLambdaAsyncClient ( ) { } } | AWSLambdaAsyncClientBuilder builder = configuration . getBuilder ( ) ; return builder . build ( ) ; |
public class LoggerOnThread { /** * Move the current logfile to a backup name , taking care of any existing
* backup files according to the configured limit . */
private void addBackup ( ) { } } | // move the current log file to the newly formatted backup name
String newname = this . fileinfo + this . myFormat . format ( new Date ( HttpDispatcher . getApproxTime ( ) ) ) + this . extensioninfo ; File newFile = new File ( newname ) ; renameFile ( this . myFile , newFile ) ; // now see if we need to delete an existing backup to make room
if ( this . backups . size ( ) == getMaximumBackupFiles ( ) ) { File oldest = this . backups . removeLast ( ) ; if ( null != oldest && oldest . exists ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , getFileName ( ) + ": Purging oldest backup-> " + oldest . getName ( ) ) ; } oldest . delete ( ) ; } } this . backups . addFirst ( newFile ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , getFileName ( ) + ": number of backup files-> " + this . backups . size ( ) ) ; } |
public class CompoundMethodChooser { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . ba . JavaClassAndMethodChooser # choose ( edu . umd . cs . findbugs
* . ba . XMethod ) */
@ Override public boolean choose ( XMethod method ) { } } | for ( JavaClassAndMethodChooser chooser : conjunctList ) { if ( ! chooser . choose ( method ) ) { return false ; } } return true ; |
public class CodepointHelper { /** * Wrap the string with the specified bidi control
* @ param sStr
* source string
* @ param cChar
* source char
* @ return The wrapped string */
@ Nullable public static String wrapBidi ( @ Nullable final String sStr , final char cChar ) { } } | switch ( cChar ) { case RLE : return _wrap ( sStr , RLE , PDF ) ; case RLO : return _wrap ( sStr , RLO , PDF ) ; case LRE : return _wrap ( sStr , LRE , PDF ) ; case LRO : return _wrap ( sStr , LRO , PDF ) ; case RLM : return _wrap ( sStr , RLM , RLM ) ; case LRM : return _wrap ( sStr , LRM , LRM ) ; default : return sStr ; } |
public class EpicsApi { /** * Get a Pager of all epics of the requested group and its subgroups .
* < pre > < code > GitLab Endpoint : GET / groups / : id / epics < / code > < / pre >
* @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path
* @ param authorId returns epics created by the given user id
* @ param labels return epics matching a comma separated list of labels names .
* Label names from the epic group or a parent group can be used
* @ param itemsPerPage the number of issues per page
* @ param orderBy return epics ordered by CREATED _ AT or UPDATED _ AT . Default is CREATED _ AT
* @ param sortOrder return epics sorted in ASC or DESC order . Default is DESC
* @ param search search epics against their title and description
* @ return the Pager of matching epics of the requested group and its subgroups
* @ throws GitLabApiException if any exception occurs */
public Pager < Epic > getEpics ( Object groupIdOrPath , Integer authorId , String labels , EpicOrderBy orderBy , SortOrder sortOrder , String search , int itemsPerPage ) throws GitLabApiException { } } | GitLabApiForm formData = new GitLabApiForm ( ) . withParam ( "author_id" , authorId ) . withParam ( "labels" , labels ) . withParam ( "order_by" , orderBy ) . withParam ( "sort" , sortOrder ) . withParam ( "search" , search ) ; return ( new Pager < Epic > ( this , Epic . class , itemsPerPage , formData . asMap ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" ) ) ; |
public class ProviderFactory { /** * Liberty Change for CXF Begin */
public static Object createJsonpProvider ( ) { } } | JsonProvider jsonProvider = AccessController . doPrivileged ( new PrivilegedAction < JsonProvider > ( ) { @ Override public JsonProvider run ( ) { try { Bundle b = FrameworkUtil . getBundle ( ProviderFactory . class ) ; if ( b != null ) { BundleContext bc = b . getBundleContext ( ) ; ServiceReference < JsonProvider > sr = bc . getServiceReference ( JsonProvider . class ) ; return ( JsonProvider ) bc . getService ( sr ) ; } } catch ( NoClassDefFoundError ncdfe ) { // ignore - return null
} return null ; } } ) ; return new JsonPProvider ( jsonProvider ) ; |
public class FXBindableASTTransformation { /** * Generates a SyntaxErrorMessage based on the current SourceUnit , AnnotationNode , and a specified
* error message .
* @ param sourceUnit The SourceUnit
* @ param node The node that was annotated
* @ param msg The error message to display */
private void generateSyntaxErrorMessage ( SourceUnit sourceUnit , AnnotationNode node , String msg ) { } } | SyntaxException error = new SyntaxException ( msg , node . getLineNumber ( ) , node . getColumnNumber ( ) ) ; sourceUnit . getErrorCollector ( ) . addErrorAndContinue ( new SyntaxErrorMessage ( error , sourceUnit ) ) ; |
public class DDUtil { /** * Matches the original getVerticalDropLocation ( Element , int , int , double ) method . < p > */
private static VerticalDropLocation internalGetVerticalDropLocation ( Element element , int offsetHeight , int clientY , double topBottomRatio ) { } } | // Event coordinates are relative to the viewport , element absolute
// position is relative to the document . Make element position relative
// to viewport by adjusting for viewport scrolling . See # 6021
int elementTop = element . getAbsoluteTop ( ) - Window . getScrollTop ( ) ; int fromTop = clientY - elementTop ; float percentageFromTop = ( fromTop / ( float ) offsetHeight ) ; if ( percentageFromTop < topBottomRatio ) { return VerticalDropLocation . TOP ; } else if ( percentageFromTop > 1 - topBottomRatio ) { return VerticalDropLocation . BOTTOM ; } else { return VerticalDropLocation . MIDDLE ; } |
public class WVideo { /** * Sets the alternative text to display when the video clip can not be played .
* @ param altText the text to set . */
public void setAltText ( final String altText ) { } } | String currAltText = getAltText ( ) ; if ( ! Objects . equals ( altText , currAltText ) ) { getOrCreateComponentModel ( ) . altText = altText ; } |
public class LogoutUserRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LogoutUserRequest logoutUserRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( logoutUserRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( logoutUserRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( logoutUserRequest . getUserId ( ) , USERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BinaryString { /** * Returns whether this contains ` substring ` or not .
* Same to like ' % substring % ' . */
public boolean contains ( final BinaryString substring ) { } } | ensureMaterialized ( ) ; substring . ensureMaterialized ( ) ; if ( substring . sizeInBytes == 0 ) { return true ; } int find = SegmentsUtil . find ( segments , offset , sizeInBytes , substring . segments , substring . offset , substring . sizeInBytes ) ; return find != - 1 ; |
public class FacetCheckBoxWidget { /** * TODO : This is an eyesore , Natxo needs to clean this */
@ Override public HandlerRegistration addChangeHandler ( final ChangeHandler changeHandler ) { } } | ValueChangeHandler valueChangeHandler = new ValueChangeHandler ( ) { @ Override public void onValueChange ( ValueChangeEvent valueChangeEvent ) { changeHandler . onChange ( null ) ; } } ; for ( CheckBox checkBox : checkBoxes ) { checkBox . addValueChangeHandler ( valueChangeHandler ) ; } return null ; |
public class LogAnalyticsInner { /** * Export logs that show total throttled Api requests for this subscription in the given time window .
* @ param location The location upon which virtual - machine - sizes is queried .
* @ param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the LogAnalyticsOperationResultInner object */
public Observable < LogAnalyticsOperationResultInner > beginExportThrottledRequestsAsync ( String location , ThrottledRequestsInput parameters ) { } } | return beginExportThrottledRequestsWithServiceResponseAsync ( location , parameters ) . map ( new Func1 < ServiceResponse < LogAnalyticsOperationResultInner > , LogAnalyticsOperationResultInner > ( ) { @ Override public LogAnalyticsOperationResultInner call ( ServiceResponse < LogAnalyticsOperationResultInner > response ) { return response . body ( ) ; } } ) ; |
public class BeanUtils { /** * 将一个Bean的数据装换到另外一个 ( 需实现setter和getter , 以及一个默认无参构造函数 )
* @ param object 一个Bean
* @ param clazz 另外一个Bean
* @ param < T > 另外Bean类型
* @ return { @ link T }
* @ throws InstantiationException 异常
* @ throws IllegalAccessException 异常
* @ throws InvocationTargetException 异常
* @ since 1.1.1 */
public static < T > T bean2Another ( Object object , Class < T > clazz ) throws IllegalAccessException , InstantiationException , InvocationTargetException { } } | return bean2Another ( object , clazz , clazz . newInstance ( ) ) ; |
public class Firmata { /** * Send a series of raw bytes over the serial port to a Firmata supported device .
* @ param rawBytes byte array to be sent over the SerialPort OutputStream .
* @ return True if the bytes were sent . False if the bytes were not sent . */
public synchronized Boolean sendRaw ( byte ... rawBytes ) { } } | if ( ! start ( ) ) { log . error ( "Firmata library is not connected / started! Cannot send bytes {}" , FirmataHelper . bytesToHexString ( rawBytes ) ) ; return false ; } try { serialPort . getOutputStream ( ) . write ( rawBytes ) ; return true ; } catch ( IOException e ) { log . error ( "Unable to transmit raw bytes through serial port. Bytes: {}" , FirmataHelper . bytesToHexString ( rawBytes ) ) ; stop ( ) ; } return false ; |
public class MethodSimulator { /** * Merges a stored element to the local variables .
* @ param index The index of the variable
* @ param type The type of the variable or the element ( whatever is more specific )
* @ param element The element to merge */
private void mergeElementStore ( final int index , final String type , final Element element ) { } } | // new element must be created for immutability
final String elementType = type . equals ( Types . OBJECT ) ? determineLeastSpecificType ( element . getTypes ( ) . toArray ( new String [ element . getTypes ( ) . size ( ) ] ) ) : type ; final Element created = new Element ( elementType ) ; created . merge ( element ) ; localVariables . merge ( index , created , Element :: merge ) ; |
public class EnvUtil { /** * Fix path on Windows machines , i . e . convert ' c : \ . . . \ ' to ' / c / . . . . / '
* @ param path path to fix
* @ return the fixed path */
public static String fixupPath ( String path ) { } } | // Hack - fix for mounting on Windows where the $ { projectDir } variable and other
// contain backslashes and what not . Related to # 188
Pattern pattern = Pattern . compile ( "^(?i)([A-Z]):(.*)$" ) ; Matcher matcher = pattern . matcher ( path ) ; if ( matcher . matches ( ) ) { String result = "/" + matcher . group ( 1 ) . toLowerCase ( ) + matcher . group ( 2 ) ; return result . replace ( "\\" , "/" ) ; } return path ; |
public class BinarySimpleHeap { /** * swap data in node n with data in node m
* @ param n index of the node containing a data
* @ param m index of the node containing a data */
private void swap ( int n , int m ) { } } | int eM = elements [ m ] ; int eN = elements [ n ] ; elements [ m ] = eN ; elements [ n ] = eM ; positions [ eM ] = n ; positions [ eN ] = m ; |
public class ImageSubsamplingImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . IMAGE_SUBSAMPLING__SDFS : getSdfs ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ; |
public class GcloudArgs { /** * Returns { @ code [ - - name , value ] } or { @ code [ ] } if value is null . */
public static List < String > get ( String name , @ Nullable String value ) { } } | return Args . string ( name , value ) ; |
public class KernelResolverRepository { /** * Checks whether { @ code featureList } contains a feature with the same name and version as { @ code feature } .
* @ param featureList the list of features
* @ param feature the feature
* @ return { @ code true } if { @ code featureList } contains a feature with the same symbolic name and version as { @ code feature } , otherwise { @ code false } */
private boolean listContainsDuplicate ( List < ProvisioningFeatureDefinition > featureList , ProvisioningFeatureDefinition feature ) { } } | for ( ProvisioningFeatureDefinition f : featureList ) { if ( hasSameSymbolicNameAndVersion ( feature , f ) ) { return true ; } } return false ; |
public class CmsBasicDialog { /** * Initializes the dialog window . < p >
* @ param width the dialog width
* @ return the window to be used by dialogs */
public static Window prepareWindow ( DialogWidth width ) { } } | Window window = new Window ( ) ; window . setModal ( true ) ; window . setClosable ( true ) ; int pageWidth = Page . getCurrent ( ) . getBrowserWindowWidth ( ) ; if ( ( ( width == DialogWidth . wide ) && ( pageWidth < 810 ) ) || ( ( width == DialogWidth . narrow ) && ( pageWidth < 610 ) ) ) { // in case the available page width does not allow the desired width , use max
width = DialogWidth . max ; } if ( width == DialogWidth . max ) { // in case max width would result in a width very close to wide or narrow , use their static width instead of relative width
if ( ( pageWidth >= 610 ) && ( pageWidth < 670 ) ) { width = DialogWidth . narrow ; } else if ( ( pageWidth >= 810 ) && ( pageWidth < 890 ) ) { width = DialogWidth . wide ; } } switch ( width ) { case content : // do nothing
break ; case wide : window . setWidth ( "800px" ) ; break ; case max : window . setWidth ( "90%" ) ; break ; case narrow : default : window . setWidth ( "600px" ) ; break ; } window . center ( ) ; return window ; |
public class Quaterniond { /** * / * ( non - Javadoc )
* @ see org . joml . Quaterniondc # integrate ( double , double , double , double , org . joml . Quaterniond ) */
public Quaterniond integrate ( double dt , double vx , double vy , double vz , Quaterniond dest ) { } } | double thetaX = dt * vx * 0.5 ; double thetaY = dt * vy * 0.5 ; double thetaZ = dt * vz * 0.5 ; double thetaMagSq = thetaX * thetaX + thetaY * thetaY + thetaZ * thetaZ ; double s ; double dqX , dqY , dqZ , dqW ; if ( thetaMagSq * thetaMagSq / 24.0 < 1E-8 ) { dqW = 1.0 - thetaMagSq * 0.5 ; s = 1.0 - thetaMagSq / 6.0 ; } else { double thetaMag = Math . sqrt ( thetaMagSq ) ; double sin = Math . sin ( thetaMag ) ; s = sin / thetaMag ; dqW = Math . cosFromSin ( sin , thetaMag ) ; } dqX = thetaX * s ; dqY = thetaY * s ; dqZ = thetaZ * s ; /* Pre - multiplication */
dest . set ( dqW * x + dqX * w + dqY * z - dqZ * y , dqW * y - dqX * z + dqY * w + dqZ * x , dqW * z + dqX * y - dqY * x + dqZ * w , dqW * w - dqX * x - dqY * y - dqZ * z ) ; return dest ; |
public class PortletCachingHeaderUtils { /** * Set the Last - Modified , CacheControl , and Expires headers based on the maxAge , publicScope and
* lastModified .
* @ param maxAge Maximum age for the content , follows the portlet rules ( - 1 cache forever , 0
* cache never , N cache seconds )
* @ param publicScope If the content is public
* @ param lastModified The last modification timestamp of the content
* @ param portletResourceOutputHandler The handler to write the headers to */
public static void setCachingHeaders ( int maxAge , boolean publicScope , long lastModified , PortletResourceOutputHandler portletResourceOutputHandler ) { } } | if ( maxAge != 0 ) { portletResourceOutputHandler . setDateHeader ( "Last-Modified" , lastModified ) ; if ( publicScope ) { portletResourceOutputHandler . setHeader ( "CacheControl" , "public" ) ; } else { portletResourceOutputHandler . setHeader ( "CacheControl" , "private" ) ; } if ( maxAge < 0 ) { // If caching " forever " set expires and max - age to 1 year
maxAge = YEAR_OF_SECONDS ; } portletResourceOutputHandler . setDateHeader ( "Expires" , System . currentTimeMillis ( ) + TimeUnit . SECONDS . toMillis ( maxAge ) ) ; portletResourceOutputHandler . addHeader ( "CacheControl" , "max-age=" + maxAge ) ; } |
public class Traverson { /** * Iterates over pages by following ' next ' links . For every page , a { @ code Traverson } is created and provided as a
* parameter to the callback function .
* < pre >
* next
* . . . - - & gt ; HalRepresentation - - & gt ; HalRepresentation
* v N item v N item
* & lt ; embedded type & gt ; & lt ; embedded type & gt ;
* < / pre >
* The { @ code Traverson } is backed by a { @ link HalRepresentation } with { @ link EmbeddedTypeInfo } . This way it
* is possible to access items embedded into the page resources as specific subtypes of HalRepresentation .
* Iteration stops if the callback returns { @ code false } , or if the last page is processed .
* @ param embeddedTypeInfo type information about possibly embedded items .
* @ param pageHandler the callback used to process pages of items .
* @ throws IOException if a low - level I / O problem ( unexpected end - of - input , network error ) occurs .
* @ throws JsonParseException if the json document can not be parsed by Jackson ' s ObjectMapper
* @ throws JsonMappingException if the input JSON structure can not be mapped to the specified HalRepresentation type
* @ since 1.0.0 */
public void paginateNext ( final EmbeddedTypeInfo embeddedTypeInfo , final PageHandler pageHandler ) throws IOException { } } | paginateAs ( "next" , HalRepresentation . class , embeddedTypeInfo , pageHandler ) ; |
public class WaveTypeBase { /** * Return the required method parameter list to handle this WaveType .
* @ return the parameter list ( Type1 arg1 , Type2 arg2 . . . ) */
public String getItems ( ) { } } | final StringBuilder sb = new StringBuilder ( ) ; boolean first = true ; for ( final WaveItem < ? > waveItem : items ( ) ) { if ( first ) { first = false ; } else { sb . append ( ", " ) ; } String fullName = waveItem . type ( ) instanceof ParameterizedType ? ( ( ParameterizedType ) waveItem . type ( ) ) . toString ( ) : ( ( Class < ? > ) waveItem . type ( ) ) . getName ( ) ; sb . append ( fullName ) . append ( " " ) ; fullName = fullName . replaceAll ( "[<>]" , "" ) ; if ( waveItem . name ( ) == null || waveItem . name ( ) . isEmpty ( ) ) { sb . append ( ObjectUtility . lowerFirstChar ( fullName . substring ( fullName . lastIndexOf ( '.' ) + 1 ) ) ) ; } else { sb . append ( waveItem . name ( ) ) ; } } return sb . toString ( ) ; |
public class JsonBuiltin { /** * - - - IMPLEMENTED PARSER METHODS - - - */
@ Override public Object parse ( String source ) throws Exception { } } | Source s = sources . poll ( ) ; final char [ ] chars = source . toCharArray ( ) ; if ( s == null ) { s = new Source ( chars ) ; } else { s . set ( chars ) ; } final Object result = parseNext ( s ) ; if ( sources . size ( ) > Config . POOL_SIZE ) { return result ; } sources . add ( s ) ; return result ; |
public class HomographyInducedStereoLinePt { /** * Specify the fundamental matrix and the camera 2 epipole .
* @ param F Fundamental matrix .
* @ param e2 Epipole for camera 2 . If null it will be computed internally . */
public void setFundamental ( DMatrixRMaj F , Point3D_F64 e2 ) { } } | this . F = F ; if ( e2 != null ) this . e2 . set ( e2 ) ; else { MultiViewOps . extractEpipoles ( F , new Point3D_F64 ( ) , this . e2 ) ; } |
public class ChainWriter { /** * @ see TextInXhtmlEncoder
* @ param value the value to be encoded */
public ChainWriter encodeXhtml ( Object value ) throws IOException { } } | if ( value != null ) { if ( value instanceof Writable && ! ( ( Writable ) value ) . isFastToString ( ) ) { // Avoid unnecessary toString calls
Coercion . write ( value , textInXhtmlEncoder , out ) ; } else { String str = Coercion . toString ( value ) ; BundleLookupMarkup lookupMarkup ; BundleLookupThreadContext threadContext = BundleLookupThreadContext . getThreadContext ( false ) ; if ( threadContext != null ) { lookupMarkup = threadContext . getLookupMarkup ( str ) ; } else { lookupMarkup = null ; } if ( lookupMarkup != null ) lookupMarkup . appendPrefixTo ( MarkupType . XHTML , out ) ; textInXhtmlEncoder . write ( str , out ) ; if ( lookupMarkup != null ) lookupMarkup . appendSuffixTo ( MarkupType . XHTML , out ) ; } } return this ; |
public class AzureAsyncOperationPollStrategy { /** * Try to create a new AzureAsyncOperationPollStrategy object that will poll the provided
* operation resource URL . If the provided HttpResponse doesn ' t have an Azure - AsyncOperation
* header or if the header is empty , then null will be returned .
* @ param restProxy The proxy object that is attempting to create a PollStrategy .
* @ param methodParser The method parser that describes the service interface method that
* initiated the long running operation .
* @ param originalHttpRequest The original HTTP request that initiated the long running
* operation .
* @ param httpResponse The HTTP response that the required header values for this pollStrategy
* will be read from .
* @ param delayInMilliseconds The delay ( in milliseconds ) that the resulting pollStrategy will
* use when polling . */
static PollStrategy tryToCreate ( RestProxy restProxy , SwaggerMethodParser methodParser , HttpRequest originalHttpRequest , HttpResponse httpResponse , long delayInMilliseconds ) { } } | String urlHeader = getHeader ( httpResponse ) ; URL azureAsyncOperationUrl = null ; if ( urlHeader != null ) { try { azureAsyncOperationUrl = new URL ( urlHeader ) ; } catch ( MalformedURLException ignored ) { } } urlHeader = httpResponse . headerValue ( "Location" ) ; URL locationUrl = null ; if ( urlHeader != null ) { try { locationUrl = new URL ( urlHeader ) ; } catch ( MalformedURLException ignored ) { } } return azureAsyncOperationUrl != null ? new AzureAsyncOperationPollStrategy ( new AzureAsyncOperationPollStrategyData ( restProxy , methodParser , azureAsyncOperationUrl , originalHttpRequest . url ( ) , locationUrl , originalHttpRequest . httpMethod ( ) , delayInMilliseconds ) ) : null ; |
public class Matrix3D { /** * Invert this matrix .
* @ return true if successful */
@ Override public boolean invert ( ) { } } | double determinant = determinant ( ) ; if ( determinant == 0 ) { return false ; } // first row
double t00 = determinant3x3 ( m11 , m12 , m13 , m21 , m22 , m23 , m31 , m32 , m33 ) ; double t01 = - determinant3x3 ( m10 , m12 , m13 , m20 , m22 , m23 , m30 , m32 , m33 ) ; double t02 = determinant3x3 ( m10 , m11 , m13 , m20 , m21 , m23 , m30 , m31 , m33 ) ; double t03 = - determinant3x3 ( m10 , m11 , m12 , m20 , m21 , m22 , m30 , m31 , m32 ) ; // second row
double t10 = - determinant3x3 ( m01 , m02 , m03 , m21 , m22 , m23 , m31 , m32 , m33 ) ; double t11 = determinant3x3 ( m00 , m02 , m03 , m20 , m22 , m23 , m30 , m32 , m33 ) ; double t12 = - determinant3x3 ( m00 , m01 , m03 , m20 , m21 , m23 , m30 , m31 , m33 ) ; double t13 = determinant3x3 ( m00 , m01 , m02 , m20 , m21 , m22 , m30 , m31 , m32 ) ; // third row
double t20 = determinant3x3 ( m01 , m02 , m03 , m11 , m12 , m13 , m31 , m32 , m33 ) ; double t21 = - determinant3x3 ( m00 , m02 , m03 , m10 , m12 , m13 , m30 , m32 , m33 ) ; double t22 = determinant3x3 ( m00 , m01 , m03 , m10 , m11 , m13 , m30 , m31 , m33 ) ; double t23 = - determinant3x3 ( m00 , m01 , m02 , m10 , m11 , m12 , m30 , m31 , m32 ) ; // fourth row
double t30 = - determinant3x3 ( m01 , m02 , m03 , m11 , m12 , m13 , m21 , m22 , m23 ) ; double t31 = determinant3x3 ( m00 , m02 , m03 , m10 , m12 , m13 , m20 , m22 , m23 ) ; double t32 = - determinant3x3 ( m00 , m01 , m03 , m10 , m11 , m13 , m20 , m21 , m23 ) ; double t33 = determinant3x3 ( m00 , m01 , m02 , m10 , m11 , m12 , m20 , m21 , m22 ) ; // transpose and divide by the determinant
m00 = t00 / determinant ; m01 = t10 / determinant ; m02 = t20 / determinant ; m03 = t30 / determinant ; m10 = t01 / determinant ; m11 = t11 / determinant ; m12 = t21 / determinant ; m13 = t31 / determinant ; m20 = t02 / determinant ; m21 = t12 / determinant ; m22 = t22 / determinant ; m23 = t32 / determinant ; m30 = t03 / determinant ; m31 = t13 / determinant ; m32 = t23 / determinant ; m33 = t33 / determinant ; return true ; |
public class MetadataStore { /** * convert Object to String depending on key .
* StoreRepository takes only StorageEngine < ByteArray , byte [ ] > and for
* persistence on disk we need to convert them to String . < br >
* @ param key
* @ param value
* @ return */
private Versioned < Object > convertStringToObject ( String key , Versioned < String > value ) { } } | Object valueObject = null ; if ( CLUSTER_KEY . equals ( key ) ) { valueObject = clusterMapper . readCluster ( new StringReader ( value . getValue ( ) ) ) ; } else if ( STORES_KEY . equals ( key ) ) { valueObject = storeMapper . readStoreList ( new StringReader ( value . getValue ( ) ) ) ; } else if ( SERVER_STATE_KEY . equals ( key ) ) { valueObject = VoldemortState . valueOf ( value . getValue ( ) ) ; } else if ( NODE_ID_KEY . equals ( key ) ) { valueObject = Integer . parseInt ( value . getValue ( ) ) ; } else if ( SLOP_STREAMING_ENABLED_KEY . equals ( key ) || PARTITION_STREAMING_ENABLED_KEY . equals ( key ) || READONLY_FETCH_ENABLED_KEY . equals ( key ) || QUOTA_ENFORCEMENT_ENABLED_KEY . equals ( key ) ) { valueObject = Boolean . parseBoolean ( value . getValue ( ) ) ; } else if ( REBALANCING_STEAL_INFO . equals ( key ) ) { String valueString = value . getValue ( ) ; if ( valueString . startsWith ( "[" ) ) { valueObject = RebalancerState . create ( valueString ) ; } else { valueObject = new RebalancerState ( Arrays . asList ( RebalanceTaskInfo . create ( valueString ) ) ) ; } } else if ( REBALANCING_SOURCE_CLUSTER_XML . equals ( key ) ) { if ( value . getValue ( ) != null && value . getValue ( ) . length ( ) > 0 ) { valueObject = clusterMapper . readCluster ( new StringReader ( value . getValue ( ) ) ) ; } } else if ( REBALANCING_SOURCE_STORES_XML . equals ( key ) ) { if ( value . getValue ( ) != null && value . getValue ( ) . length ( ) > 0 ) { valueObject = storeMapper . readStoreList ( new StringReader ( value . getValue ( ) ) ) ; } } else { throw new VoldemortException ( "Unhandled key:'" + key + "' for String to Object serialization." ) ; } return new Versioned < Object > ( valueObject , value . getVersion ( ) ) ; |
public class MapUtils { /** * Gets the union of the { @ code keySet ( ) } s of all provided { @ link Maps } s . */
public static < K , V > ImmutableSet < K > allKeys ( final Iterable < ? extends Map < K , V > > maps ) { } } | final ImmutableSet . Builder < K > builder = ImmutableSet . builder ( ) ; for ( final Map < K , V > map : maps ) { builder . addAll ( map . keySet ( ) ) ; } return builder . build ( ) ; |
public class HookedXADataSource { @ Override public XAConnection getXAConnection ( ) throws SQLException { } } | final XAConnection xaconn = super . getXAConnection ( ) ; if ( newbornConnectionHook != null ) { newbornConnectionHook . hook ( xaconn ) ; } return xaconn ; |
public class VcfUtils { /** * Create a VCFHeader to use with VariantContextWriter .
* @ param cohortNames List of cohort names
* @ param annotations List of annotations to include in the header
* @ param formatFields List of formats
* @ param formatFieldsType List of format types
* @ param formatFieldsDescr List of format descriptions
* @ param sampleNames List of sample names
* @ param converter Function to convert sample names
* @ return The VCF header
* @ deprecated use org . opencb . biodata . tools . variant . converters . avro . VariantStudyMetadataToVCFHeaderConverter */
@ Deprecated public static VCFHeader createVCFHeader ( List < String > cohortNames , List < String > annotations , List < String > formatFields , List < String > formatFieldsType , List < String > formatFieldsDescr , List < String > sampleNames , Function < String , String > converter ) { } } | LinkedHashSet < VCFHeaderLine > meta = new LinkedHashSet < > ( ) ; // sample name management
AtomicReference < Function < String , String > > sampleNameConverter = new AtomicReference < > ( s -> s ) ; if ( converter != null ) { sampleNameConverter . set ( converter ) ; } Map < String , String > sampleNameMapping = new ConcurrentHashMap < > ( ) ; // sampleNameMapping . putAll ( sampleNames . stream ( ) . collect (
// Collectors . toMap ( s - > s , s - > sampleNameConverter . get ( ) . apply ( s ) ) ) ) ;
// List < String > names = sampleNames . stream ( ) . map ( s - > sampleNameMapping . get ( s ) ) . collect ( Collectors . toList ( ) ) ;
List < String > names = sampleNames ; /* FILTER */
meta . add ( new VCFFilterHeaderLine ( "PASS" , "Valid variant" ) ) ; meta . add ( new VCFFilterHeaderLine ( "." , "No FILTER info" ) ) ; /* INFO */
// cohort info
for ( String cohortName : cohortNames ) { if ( cohortName . toUpperCase ( ) . equals ( "ALL" ) ) { // meta . add ( new VCFInfoHeaderLine ( VCFConstants . ALLELE _ COUNT _ KEY , VCFHeaderLineCount . A ,
// VCFHeaderLineType . Integer , " Total number of alternate alleles in called genotypes , "
// + " for each ALT allele , in the same order as listed " ) ) ;
meta . add ( new VCFInfoHeaderLine ( VCFConstants . ALLELE_FREQUENCY_KEY , VCFHeaderLineCount . A , VCFHeaderLineType . Float , "Allele Frequency, for each ALT allele, calculated from AC and AN, in the range (0,1)," + " in the same order as listed" ) ) ; // meta . add ( new VCFInfoHeaderLine ( VCFConstants . ALLELE _ NUMBER _ KEY , 1,
// VCFHeaderLineType . Integer , " Total number of alleles in called genotypes " ) ) ;
} else { meta . add ( new VCFInfoHeaderLine ( cohortName + "_" + VCFConstants . ALLELE_FREQUENCY_KEY , VCFHeaderLineCount . A , VCFHeaderLineType . Float , "Allele frequency in the " + cohortName + " cohort calculated from AC and AN, in the range (0,1)," + " in the same order as listed" ) ) ; } } // annotations
if ( annotations != null && annotations . size ( ) > 0 ) { meta . add ( new VCFInfoHeaderLine ( STATS_INFO_KEY , 1 , VCFHeaderLineType . String , "Allele frequency " + " for cohorts (separated by |), e.g.: ALL:0.0564705|MXL:0.0886758" ) ) ; meta . add ( new VCFInfoHeaderLine ( ANNOTATION_INFO_KEY , 1 , VCFHeaderLineType . String , "Consequence annotations (separated " + " by &) from CellBase. Format: " + String . join ( "|" , ANNOTATION_INFO_VALUES ) ) ) ; meta . add ( new VCFInfoHeaderLine ( POPFREQ_INFO_KEY , 1 , VCFHeaderLineType . String , "Alternate allele frequencies " + " for study and population (separated by |), e.g.: 1kG_phase3_IBS:0.06542056|1kG_phase3_CEU:0.08585858" ) ) ; } /* FORMAT */
// meta . add ( new VCFFormatHeaderLine ( " GT " , 1 , VCFHeaderLineType . String , " Genotype " ) ) ;
// meta . add ( new VCFFormatHeaderLine ( " PF " , 1 , VCFHeaderLineType . Integer ,
// " Variant was PASS ( 1 ) filter in original vcf " ) ) ;
if ( formatFields != null ) { for ( int i = 0 ; i < formatFields . size ( ) ; i ++ ) { switch ( formatFields . get ( i ) ) { case "GT" : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . String , "Genotype" ) ) ; break ; case "AD" : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . String , "Allelic depth" ) ) ; break ; case "DP" : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . Integer , "Read depth" ) ) ; break ; case "GQ" : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . Integer , "Genotype quality" ) ) ; break ; case "PL" : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . Integer , "Normalized, Phred-scaled likelihoods for genotypes as defined in the VCF specification" ) ) ; break ; default : meta . add ( new VCFFormatHeaderLine ( formatFields . get ( i ) , 1 , VCFHeaderLineType . valueOf ( formatFieldsType . get ( i ) ) , formatFieldsDescr . get ( i ) ) ) ; break ; } } } return new VCFHeader ( meta , names ) ; |
public class HivePurgerQueryTemplate { /** * If staging table doesn ' t exist , it will create a staging table . */
public static String getCreateTableQuery ( String completeNewTableName , String likeTableDbName , String likeTableName , String location ) { } } | return getCreateTableQuery ( completeNewTableName , likeTableDbName , likeTableName ) + " LOCATION " + PartitionUtils . getQuotedString ( location ) ; |
public class JobsImpl { /** * Adds a job to the specified account .
* The Batch service supports two ways to control the work done as part of a job . In the first approach , the user specifies a Job Manager task . The Batch service launches this task when it is ready to start the job . The Job Manager task controls all other tasks that run under this job , by using the Task APIs . In the second approach , the user directly controls the execution of tasks under an active job , by using the Task APIs . Also note : when naming jobs , avoid including sensitive information such as user names or secret project names . This information may appear in telemetry logs accessible to Microsoft Support engineers .
* @ param job The job to be added .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > addAsync ( JobAddParameter job , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( addWithServiceResponseAsync ( job ) , serviceCallback ) ; |
public class DeleteTransitGatewayRouteTableRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DeleteTransitGatewayRouteTableRequest > getDryRunRequest ( ) { } } | Request < DeleteTransitGatewayRouteTableRequest > request = new DeleteTransitGatewayRouteTableRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class OffsetTime { /** * Obtains an instance of { @ code OffsetTime } from a temporal object .
* This obtains an offset time based on the specified temporal .
* A { @ code TemporalAccessor } represents an arbitrary set of date and time information ,
* which this factory converts to an instance of { @ code OffsetTime } .
* The conversion extracts and combines the { @ code ZoneOffset } and the
* { @ code LocalTime } from the temporal object .
* Implementations are permitted to perform optimizations such as accessing
* those fields that are equivalent to the relevant objects .
* This method matches the signature of the functional interface { @ link TemporalQuery }
* allowing it to be used as a query via method reference , { @ code OffsetTime : : from } .
* @ param temporal the temporal object to convert , not null
* @ return the offset time , not null
* @ throws DateTimeException if unable to convert to an { @ code OffsetTime } */
public static OffsetTime from ( TemporalAccessor temporal ) { } } | if ( temporal instanceof OffsetTime ) { return ( OffsetTime ) temporal ; } try { LocalTime time = LocalTime . from ( temporal ) ; ZoneOffset offset = ZoneOffset . from ( temporal ) ; return new OffsetTime ( time , offset ) ; } catch ( DateTimeException ex ) { throw new DateTimeException ( "Unable to obtain OffsetTime from TemporalAccessor: " + temporal + " of type " + temporal . getClass ( ) . getName ( ) , ex ) ; } |
public class AsyncWork { /** * Call onready on success , or forward error / cancellation to onErrorAndCancel . */
public final void listenInline ( Listener < T > onready , ISynchronizationPoint < TError > onErrorAndCancel ) { } } | listenInline ( new AsyncWorkListener < T , TError > ( ) { @ Override public void ready ( T result ) { onready . fire ( result ) ; } @ Override public void error ( TError error ) { onErrorAndCancel . error ( error ) ; } @ Override public void cancelled ( CancelException event ) { onErrorAndCancel . cancel ( event ) ; } @ Override public String toString ( ) { return "AsyncWork.listenInline: " + onready ; } } ) ; |
public class HTMLReporter { /** * Create the index file that sets up the frameset .
* @ param outputDirectory The target directory for the generated file ( s ) . */
private void createFrameset ( File outputDirectory ) throws Exception { } } | VelocityContext context = createContext ( ) ; generateFile ( new File ( outputDirectory , INDEX_FILE ) , INDEX_FILE + TEMPLATE_EXTENSION , context ) ; |
public class LongArray { /** * Sets the value at position { @ code index } . */
public void set ( int index , long value ) { } } | assert index >= 0 : "index (" + index + ") should >= 0" ; assert index < length : "index (" + index + ") should < length (" + length + ")" ; Platform . putLong ( baseObj , baseOffset + index * WIDTH , value ) ; |
public class IndexedSet { /** * Gets a subset of objects with the specified field value . If there is no object with the
* specified field value , a newly created empty set is returned .
* @ param indexDefinition the field index definition
* @ param value the field value to be satisfied
* @ param < V > the field type
* @ return the set of objects or an empty set if no such object exists */
public < V > Set < T > getByField ( IndexDefinition < T , V > indexDefinition , V value ) { } } | FieldIndex < T , V > index = ( FieldIndex < T , V > ) mIndices . get ( indexDefinition ) ; if ( index == null ) { throw new IllegalStateException ( "the given index isn't defined for this IndexedSet" ) ; } return index . getByField ( value ) ; |
public class TransformerHandlerImpl { /** * Filter a start element event .
* @ param uri The element ' s Namespace URI , or the empty string .
* @ param localName The element ' s local name , or the empty string .
* @ param qName The element ' s qualified ( prefixed ) name , or the empty
* string .
* @ param atts The element ' s attributes .
* @ throws SAXException The client may throw
* an exception during processing .
* @ see org . xml . sax . ContentHandler # startElement */
public void startElement ( String uri , String localName , String qName , Attributes atts ) throws SAXException { } } | if ( DEBUG ) System . out . println ( "TransformerHandlerImpl#startElement: " + qName ) ; if ( m_contentHandler != null ) { m_contentHandler . startElement ( uri , localName , qName , atts ) ; } |
public class ConnectorProperties { /** * override the Vector add method to not add duplicate entries . That is , entries with the same name . */
@ Override public boolean add ( Object o ) { } } | ConnectorProperty connectorPropertyToAdd = ( ConnectorProperty ) o ; String nameToAdd = connectorPropertyToAdd . getName ( ) ; ConnectorProperty connectorProperty = null ; String name = null ; Enumeration < Object > e = this . elements ( ) ; while ( e . hasMoreElements ( ) ) { connectorProperty = ( ConnectorProperty ) e . nextElement ( ) ; name = connectorProperty . getName ( ) ; if ( name . equals ( nameToAdd ) ) { if ( tc . isDebugEnabled ( ) ) { String value = ( String ) connectorPropertyToAdd . getValue ( ) ; if ( ! value . equals ( "" ) ) { if ( name . equals ( "UserName" ) || name . equals ( "Password" ) ) { Tr . debug ( tc , "DUPLICATE_USERNAME_PASSWORD_CONNECTOR_PROPERTY_J2CA0103" , new Object [ ] { ( ConnectorProperty ) o } ) ; } else { Tr . warning ( tc , "DUPLICATE_CONNECTOR_PROPERTY_J2CA0308" , new Object [ ] { ( ConnectorProperty ) o } ) ; } } } return true ; } } return super . add ( o ) ; |
public class LightWeightLinkedSet { /** * Remove and return first element on the linked list of all elements .
* @ return first element */
public T pollFirst ( ) { } } | if ( head == null ) { return null ; } T first = head . element ; this . remove ( first ) ; return first ; |
public class Portfolio { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime .
* Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time .
* Cashflows prior evaluationTime are not considered .
* @ TODO The conversion between different currencies is currently not performed .
* @ param evaluationTime The time on which this products value should be observed .
* @ param model The model used to price the product .
* @ return The random variable representing the value of the product discounted to evaluation time
* @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */
@ Override public RandomVariable getValue ( double evaluationTime , LIBORModelMonteCarloSimulationModel model ) throws CalculationException { } } | RandomVariable values = new RandomVariableFromDoubleArray ( 0.0 ) ; for ( int productIndex = 0 ; productIndex < products . length ; productIndex ++ ) { RandomVariable valueOfProduct = products [ productIndex ] . getValue ( evaluationTime , model ) ; double weightOfProduct = weights [ productIndex ] ; values = values . addProduct ( valueOfProduct , weightOfProduct ) ; } return values ; |
public class MessageRetriever { /** * Get and format message string from resource
* @ param key selects message from resource
* @ param args arguments to be replaced in the message .
* @ throws MissingResourceException when the key does not
* exist in the properties file . */
public String getText ( String key , Object ... args ) throws MissingResourceException { } } | ResourceBundle bundle = initRB ( ) ; String message = bundle . getString ( key ) ; return MessageFormat . format ( message , args ) ; |
public class DeleteFleetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteFleetRequest deleteFleetRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteFleetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteFleetRequest . getFleetId ( ) , FLEETID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CachingXmlDataStore { /** * Tries to read the content of specified cache file and returns them as fallback data store . If the cache file
* contains unexpected data the given fallback data store will be returned instead .
* @ param reader
* data reader to read the given { @ code dataUrl }
* @ param cacheFile
* file with cached < em > UAS data < / em > in XML format or empty file
* @ param versionUrl
* URL to version information about the given < em > UAS data < / em >
* @ param charset
* the character set in which the data should be read
* @ param fallback
* < em > UAS data < / em > as fallback in case the data on the specified resource can not be read correctly
* @ return a fallback data store */
private static DataStore readCacheFileAsFallback ( @ Nonnull final DataReader reader , @ Nonnull final File cacheFile , @ Nonnull final Charset charset , @ Nonnull final DataStore fallback ) { } } | DataStore fallbackDataStore ; if ( ! isEmpty ( cacheFile , charset ) ) { final URL cacheFileUrl = UrlUtil . toUrl ( cacheFile ) ; try { fallbackDataStore = new CacheFileDataStore ( reader . read ( cacheFileUrl , charset ) , reader , cacheFileUrl , charset ) ; LOG . debug ( MSG_CACHE_FILE_IS_FILLED ) ; } catch ( final RuntimeException e ) { fallbackDataStore = fallback ; deleteCacheFile ( cacheFile ) ; } } else { fallbackDataStore = fallback ; LOG . debug ( MSG_CACHE_FILE_IS_EMPTY ) ; } return fallbackDataStore ; |
public class SCM { /** * Obtains a fresh workspace of the module ( s ) into the specified directory
* of the specified machine .
* The " update " operation can be performed instead of a fresh checkout if
* feasible .
* This operation should also capture the information necessary to tag the workspace later .
* @ param launcher
* Abstracts away the machine that the files will be checked out .
* @ param workspace
* a directory to check out the source code . May contain left - over
* from the previous build .
* @ param changelogFile
* Upon a successful return , this file should capture the changelog .
* When there ' s no change , this file should contain an empty entry .
* See { @ link # createEmptyChangeLog ( File , TaskListener , String ) } .
* May be null , in which case no changelog was requested .
* @ param baseline version from the previous build to use for changelog creation , if requested and available
* @ throws InterruptedException
* interruption is usually caused by the user aborting the build .
* this exception will cause the build to be aborted .
* @ throws AbortException in case of a routine failure
* @ since 1.568 */
public void checkout ( @ Nonnull Run < ? , ? > build , @ Nonnull Launcher launcher , @ Nonnull FilePath workspace , @ Nonnull TaskListener listener , @ CheckForNull File changelogFile , @ CheckForNull SCMRevisionState baseline ) throws IOException , InterruptedException { } } | if ( build instanceof AbstractBuild && listener instanceof BuildListener && Util . isOverridden ( SCM . class , getClass ( ) , "checkout" , AbstractBuild . class , Launcher . class , FilePath . class , BuildListener . class , File . class ) ) { if ( changelogFile == null ) { changelogFile = File . createTempFile ( "changelog" , ".xml" ) ; try { if ( ! checkout ( ( AbstractBuild ) build , launcher , workspace , ( BuildListener ) listener , changelogFile ) ) { throw new AbortException ( ) ; } } finally { Util . deleteFile ( changelogFile ) ; } } else { if ( ! checkout ( ( AbstractBuild ) build , launcher , workspace , ( BuildListener ) listener , changelogFile ) ) { throw new AbortException ( ) ; } } } else { throw new AbstractMethodError ( "you must override the new overload of checkout" ) ; } |
public class Matth { /** * Returns the difference of { @ code a } and { @ code b } , provided it does not overflow .
* @ throws ArithmeticException if { @ code a - b } overflows in signed { @ code long } arithmetic */
public static long subtractExact ( long a , long b ) { } } | long result = a - b ; checkNoOverflow ( ( a ^ b ) >= 0 | ( a ^ result ) >= 0 ) ; return result ; |
public class MySqlCommunication { /** * Restores table content .
* @ param tableName table to be restored
* @ param tempTableName temporary table name
* @ throws SQLException for any issue */
public void restoreTable ( String tableName , String tempTableName ) throws SQLException { } } | LOG . debug ( "Restore table {} from {}" , tableName , tempTableName ) ; try { this . setForeignKeyCheckEnabled ( false ) ; this . truncateTable ( tableName ) ; final String sql = "INSERT INTO " + tableName + " SELECT * FROM " + tempTableName + ";" ; this . executeUpdate ( sql ) ; } finally { this . setForeignKeyCheckEnabled ( true ) ; } |
public class TaskManagerService { /** * and ( 2 ) enough time has passed since its last execution that it ' s time to run . */
private boolean taskShouldExecute ( Task task , TaskRecord taskRecord ) { } } | String taskID = taskRecord . getTaskID ( ) ; if ( taskRecord . getStatus ( ) == TaskStatus . NEVER_EXECUTED ) { m_logger . debug ( "Task '{}' has never executed" , taskID ) ; return true ; } if ( taskRecord . getStatus ( ) == TaskStatus . IN_PROGRESS ) { m_logger . debug ( "Task '{}' is already being executed" , taskID ) ; return false ; } Calendar startTime = taskRecord . getTime ( TaskRecord . PROP_START_TIME ) ; long startTimeMillis = startTime == null ? 0 : startTime . getTimeInMillis ( ) ; long taskPeriodMillis = task . getTaskFreq ( ) . getValueInMinutes ( ) * 60 * 1000 ; long nowMillis = System . currentTimeMillis ( ) ; boolean bShouldStart = startTimeMillis + taskPeriodMillis <= nowMillis ; m_logger . debug ( "Considering task {}: Last started at {}; periodicity in millis: {}; current time: {}; next start: {}; should start: {}" , new Object [ ] { task . getTaskID ( ) , Utils . formatDateUTC ( startTimeMillis , Calendar . MILLISECOND ) , taskPeriodMillis , Utils . formatDateUTC ( nowMillis , Calendar . MILLISECOND ) , Utils . formatDateUTC ( startTimeMillis + taskPeriodMillis , Calendar . MILLISECOND ) , bShouldStart } ) ; return bShouldStart ; |
public class SrvI18n { /** * < p > Add message bundles by given language / country names . < / p >
* @ param pLangCountries array of language / country names ,
* e . g . [ " en " , " US " , " ru " , " RU " ] */
public final void add ( final String [ ] pLangCountries ) { } } | if ( pLangCountries != null && pLangCountries . length % 2 == 0 ) { for ( int i = 0 ; i < pLangCountries . length / 2 ; i ++ ) { Locale locale = new Locale ( pLangCountries [ i * 2 ] , pLangCountries [ i * 2 + 1 ] ) ; if ( ! ( messages != null && locale . equals ( Locale . getDefault ( ) ) ) ) { ResourceBundle msgs = null ; try { msgs = ResourceBundle . getBundle ( "Messages" , locale ) ; } catch ( Exception e ) { this . logger . error ( null , SrvI18n . class , " when loading msg for locale " + locale , e ) ; } if ( msgs != null ) { this . messagesMap . put ( pLangCountries [ i * 2 ] , msgs ) ; this . logger . info ( null , SrvI18n . class , "Added messages for lang/country: " + pLangCountries [ i * 2 ] + "/" + pLangCountries [ i * 2 + 1 ] ) ; if ( this . messages == null ) { this . messages = msgs ; } } else { // If there is no MessagesBundle [ current - locale ] . properties
this . logger . error ( null , SrvI18n . class , "There is no messages for lang/country: " + pLangCountries [ i * 2 ] + "/" + pLangCountries [ i * 2 + 1 ] ) ; } } else { this . logger . info ( null , SrvI18n . class , "Messages already added as default for lang/country: " + pLangCountries [ i * 2 ] + "/" + pLangCountries [ i * 2 + 1 ] ) ; } } } else { String msg = null ; if ( pLangCountries == null ) { msg = "is null!" ; } else { for ( String str : pLangCountries ) { if ( msg == null ) { msg = str ; } else { msg += "/" + str ; } } } this . logger . error ( null , SrvI18n . class , "Parameters language error, pLangCountries: " + msg ) ; } |
public class XmRequestContextInterceptor { /** * { @ inheritDoc } */
@ Override public void afterCompletion ( HttpServletRequest request , HttpServletResponse response , Object handler , Exception ex ) { } } | LOGGER . debug ( "Destroy XM request context" ) ; getXmPrivilegedRequestContext ( ) . destroyCurrentContext ( ) ; |
public class HFCACertificateRequest { /** * Get certificates that have been revoked before this date
* @ param revokedEnd Revoked before date
* @ throws InvalidArgumentException Date can ' t be null */
public void setRevokedEnd ( Date revokedEnd ) throws InvalidArgumentException { } } | if ( revokedEnd == null ) { throw new InvalidArgumentException ( "Date can't be null" ) ; } queryParms . put ( "revoked_end" , Util . dateToString ( revokedEnd ) ) ; |
public class CoreService { /** * Start server . */
private void startServer ( ) { } } | if ( mServer . isRunning ( ) ) { String hostAddress = mServer . getInetAddress ( ) . getHostAddress ( ) ; ServerManager . onServerStart ( CoreService . this , hostAddress ) ; } else { mServer . startup ( ) ; } |
public class Assembly { /** * Adds a file to your assembly . If the field name specified already exists , it will override the content of the
* existing name .
* @ param file { @ link File } the file to be uploaded .
* @ param name { @ link String } the field name of the file when submitted Transloadit . */
public void addFile ( File file , String name ) { } } | files . put ( name , file ) ; // remove duplicate key
if ( fileStreams . containsKey ( name ) ) { fileStreams . remove ( name ) ; } |
public class GradientEditor { /** * Add a new control point */
private void addPoint ( ) { } } | ControlPoint point = new ControlPoint ( Color . white , 0.5f ) ; for ( int i = 0 ; i < list . size ( ) - 1 ; i ++ ) { ControlPoint now = ( ControlPoint ) list . get ( i ) ; ControlPoint next = ( ControlPoint ) list . get ( i + 1 ) ; if ( ( now . pos <= 0.5f ) && ( next . pos >= 0.5f ) ) { list . add ( i + 1 , point ) ; break ; } } selected = point ; sortPoints ( ) ; repaint ( 0 ) ; fireUpdate ( ) ; |
public class ReviewsImpl { /** * The reviews created would show up for Reviewers on your team . As Reviewers complete reviewing , results of the Review would be POSTED ( i . e . HTTP POST ) on the specified CallBackEndpoint .
* & lt ; h3 & gt ; CallBack Schemas & lt ; / h3 & gt ;
* & lt ; h4 & gt ; Review Completion CallBack Sample & lt ; / h4 & gt ;
* & lt ; p & gt ;
* { & lt ; br / & gt ;
* " ReviewId " : " & lt ; Review Id & gt ; " , & lt ; br / & gt ;
* " ModifiedOn " : " 2016-10-11T22:36:32.9934851Z " , & lt ; br / & gt ;
* " ModifiedBy " : " & lt ; Name of the Reviewer & gt ; " , & lt ; br / & gt ;
* " CallBackType " : " Review " , & lt ; br / & gt ;
* " ContentId " : " & lt ; The ContentId that was specified input & gt ; " , & lt ; br / & gt ;
* " Metadata " : { & lt ; br / & gt ;
* " adultscore " : " 0 . xxx " , & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " racyscore " : " 0 . xxx " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } , & lt ; br / & gt ;
* " ReviewerResultTags " : { & lt ; br / & gt ;
* " a " : " False " , & lt ; br / & gt ;
* " r " : " True " & lt ; br / & gt ;
* } & lt ; br / & gt ;
* } & lt ; br / & gt ;
* & lt ; / p & gt ; .
* @ param teamName Your team name .
* @ param contentType The content type .
* @ param createVideoReviewsBody Body for create reviews API
* @ param createVideoReviewsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; String & gt ; object */
public Observable < List < String > > createVideoReviewsAsync ( String teamName , String contentType , List < CreateVideoReviewsBodyItem > createVideoReviewsBody , CreateVideoReviewsOptionalParameter createVideoReviewsOptionalParameter ) { } } | return createVideoReviewsWithServiceResponseAsync ( teamName , contentType , createVideoReviewsBody , createVideoReviewsOptionalParameter ) . map ( new Func1 < ServiceResponse < List < String > > , List < String > > ( ) { @ Override public List < String > call ( ServiceResponse < List < String > > response ) { return response . body ( ) ; } } ) ; |
public class CPRulePersistenceImpl { /** * Returns a range of all the cp rules where groupId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPRuleModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param groupId the group ID
* @ param start the lower bound of the range of cp rules
* @ param end the upper bound of the range of cp rules ( not inclusive )
* @ return the range of matching cp rules */
@ Override public List < CPRule > findByGroupId ( long groupId , int start , int end ) { } } | return findByGroupId ( groupId , start , end , null ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AttributesReferenceType } { @ code > } } */
@ XmlElementDecl ( namespace = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17" , name = "AttributesReference" ) public JAXBElement < AttributesReferenceType > createAttributesReference ( AttributesReferenceType value ) { } } | return new JAXBElement < AttributesReferenceType > ( _AttributesReference_QNAME , AttributesReferenceType . class , null , value ) ; |
public class RegExp { /** * Builds a regular expression equivalent to the union of several
* ( possibly more than 2 ) terms . In the normal case , this method
* applies the binary < code > Union < / code > constructor repeatedly .
* If the set of terms contains a single distinct term , the
* method returns that term ( so , the resulting
* < code > RegExp < / code > is not always an instance of
* < code > Union < / code > ) . If the set of terms is empty , the method
* returns < code > None < / code > . */
public static < A > RegExp < A > buildUnion ( Set < RegExp < A > > unionTerms ) { } } | if ( unionTerms . isEmpty ( ) ) return new None < A > ( ) ; Iterator < RegExp < A > > it = unionTerms . iterator ( ) ; RegExp < A > re = it . next ( ) ; while ( it . hasNext ( ) ) { re = new Union < A > ( re , it . next ( ) ) ; } return re ; |
public class BaseRecordOwner { /** * Get the main record for this screen .
* @ return The main record ( or null if none ) . */
public Record getMainRecord ( ) { } } | Record record = null ; if ( m_vRecordList != null ) record = m_vRecordList . getMainRecord ( ) ; if ( record == null ) if ( m_taskParent instanceof RecordOwner ) record = ( Record ) ( ( RecordOwner ) m_taskParent ) . getMainRecord ( ) ; // Look thru the parent window now
return record ; |
public class Scope { /** * 设置全局变量
* 全局作用域是指本次请求的整个 template */
public void setGlobal ( Object key , Object value ) { } } | for ( Scope cur = this ; true ; cur = cur . parent ) { if ( cur . parent == null ) { cur . data . put ( key , value ) ; return ; } } |
public class OptionsParamCheckForUpdates { /** * Sets whether or not the " check for updates on start up " is enabled .
* @ param checkOnStart { @ code true } if the " check for updates on start up " should be enabled , { @ code false } otherwise . */
public void setCheckOnStart ( boolean checkOnStart ) { } } | this . checkOnStart = checkOnStart ; getConfig ( ) . setProperty ( CHECK_ON_START , checkOnStart ) ; if ( dayLastChecked . length ( ) == 0 ) { dayLastChecked = "Never" ; getConfig ( ) . setProperty ( DAY_LAST_CHECKED , dayLastChecked ) ; } |
public class ResourceLoader { /** * Loads a resource as { @ link BufferedReader } .
* @ param resource
* The resource to be loaded .
* @ param encoding
* The encoding to use
* @ return The reader */
public static BufferedReader getBufferedReader ( final File resource , final String encoding ) throws IOException { } } | return new BufferedReader ( getInputStreamReader ( resource , encoding ) ) ; |
public class Solo { /** * Scrolls a ListView matching the specified index to the bottom .
* @ param index the index of the { @ link ListView } to scroll . { @ code 0 } if only one list is available
* @ return { @ code true } if more scrolling can be performed */
public boolean scrollListToBottom ( int index ) { } } | if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "scrollListToBottom(" + index + ")" ) ; } return scroller . scrollList ( waiter . waitForAndGetView ( index , ListView . class ) , Scroller . DOWN , true ) ; |
public class PathDescriptorVariableConstraint { /** * Factory method . Tries to split the string of the form < code > x [ = y ] < / code >
* where " x " is the constraint type and " y " is the constraint value . All
* possible constraint types are located in
* { @ link EPathDescriptorVariableConstraintType } . If the constraint type
* requires no value the " y " part may be omitted .
* @ param sConstraint
* Constraint to be parsed .
* @ return < code > null < / code > if the passed constraint string could not be
* parsed . */
@ Nullable public static PathDescriptorVariableConstraint createOrNull ( @ Nonnull final String sConstraint ) { } } | final String sRealValue = StringHelper . trim ( sConstraint ) ; if ( StringHelper . hasNoText ( sRealValue ) ) { LOGGER . warn ( "Empty path descriptor variable constraint is ignored!" ) ; return null ; } // Split in type and value
final ICommonsList < String > aParts = StringHelper . getExploded ( '=' , sConstraint , 2 ) ; // Mandatory type
final String sConstraintType = aParts . getAtIndex ( 0 ) ; final EPathDescriptorVariableConstraintType eConstraintType = EPathDescriptorVariableConstraintType . getFromIDOrNull ( sConstraintType ) ; if ( eConstraintType == null ) { LOGGER . error ( "Unsupported variable constraint type '" + sConstraintType + "' used!" ) ; return null ; } // Optional value
final String sConstraintValue = aParts . getAtIndex ( 1 ) ; if ( eConstraintType . isRequiresValue ( ) && StringHelper . hasNoText ( sConstraintValue ) ) { LOGGER . error ( "Variable constraint type '" + sConstraintType + "' requires a value but no value provided! Separate type and value with a '=' character." ) ; return null ; } return new PathDescriptorVariableConstraint ( eConstraintType , sConstraintValue ) ; |
public class LocalDoTask { /** * The main method .
* @ param args
* the arguments */
public static void main ( String [ ] args ) { } } | // ArrayList < String > tmpArgs = new ArrayList < String > ( Arrays . asList ( args ) ) ;
// args = tmpArgs . toArray ( new String [ 0 ] ) ;
FlagOption suppressStatusOutOpt = new FlagOption ( "suppressStatusOut" , 'S' , SUPPRESS_STATUS_OUT_MSG ) ; FlagOption suppressResultOutOpt = new FlagOption ( "suppressResultOut" , 'R' , SUPPRESS_RESULT_OUT_MSG ) ; IntOption statusUpdateFreqOpt = new IntOption ( "statusUpdateFrequency" , 'F' , STATUS_UPDATE_FREQ_MSG , 1000 , 0 , Integer . MAX_VALUE ) ; Option [ ] extraOptions = new Option [ ] { suppressStatusOutOpt , suppressResultOutOpt , statusUpdateFreqOpt } ; StringBuilder cliString = new StringBuilder ( ) ; for ( String arg : args ) { cliString . append ( " " ) . append ( arg ) ; } logger . debug ( "Command line string = {}" , cliString . toString ( ) ) ; System . out . println ( "Command line string = " + cliString . toString ( ) ) ; Task task ; try { task = ClassOption . cliStringToObject ( cliString . toString ( ) , Task . class , extraOptions ) ; logger . info ( "Successfully instantiating {}" , task . getClass ( ) . getCanonicalName ( ) ) ; } catch ( Exception e ) { logger . error ( "Fail to initialize the task" , e ) ; System . out . println ( "Fail to initialize the task" + e ) ; return ; } task . setFactory ( new SimpleComponentFactory ( ) ) ; task . init ( ) ; SimpleEngine . submitTopology ( task . getTopology ( ) ) ; |
public class S3RestServiceHandler { /** * TODO ( cc ) : Support automatic abortion after a timeout . */
private void abortMultipartUpload ( String bucket , String object , long uploadId ) throws S3Exception { } } | String bucketPath = parseBucketPath ( AlluxioURI . SEPARATOR + bucket ) ; checkBucketIsAlluxioDirectory ( bucketPath ) ; String objectPath = bucketPath + AlluxioURI . SEPARATOR + object ; AlluxioURI multipartTemporaryDir = new AlluxioURI ( S3RestUtils . getMultipartTemporaryDirForObject ( bucketPath , object ) ) ; checkUploadId ( multipartTemporaryDir , uploadId ) ; try { mFileSystem . delete ( multipartTemporaryDir , DeletePOptions . newBuilder ( ) . setRecursive ( true ) . build ( ) ) ; } catch ( Exception e ) { throw toObjectS3Exception ( e , objectPath ) ; } |
public class XMPPTCPConnection { /** * Add a new Stanza ID acknowledged listener for the given ID .
* The listener will be invoked if the stanza with the given ID was acknowledged by the server . It will
* automatically be removed after the listener was run .
* @ param id the stanza ID .
* @ param listener the listener to invoke .
* @ return the previous listener for this stanza ID or null .
* @ throws StreamManagementNotEnabledException if Stream Management is not enabled . */
@ SuppressWarnings ( "FutureReturnValueIgnored" ) public StanzaListener addStanzaIdAcknowledgedListener ( final String id , StanzaListener listener ) throws StreamManagementNotEnabledException { } } | // Prevent users from adding callbacks that will never get removed
if ( ! smWasEnabledAtLeastOnce ) { throw new StreamManagementException . StreamManagementNotEnabledException ( ) ; } // Remove the listener after max . 3 hours
final int removeAfterSeconds = Math . min ( getMaxSmResumptionTime ( ) , 3 * 60 * 60 ) ; schedule ( new Runnable ( ) { @ Override public void run ( ) { stanzaIdAcknowledgedListeners . remove ( id ) ; } } , removeAfterSeconds , TimeUnit . SECONDS ) ; return stanzaIdAcknowledgedListeners . put ( id , listener ) ; |
public class RippleDrawableICS { /** * Attempts to start an exit animation for the active hotspot . Fails if there is no active
* hotspot . */
private void tryRippleExit ( ) { } } | if ( mRipple != null ) { if ( mExitingRipples == null ) { mExitingRipples = new RippleForeground [ MAX_RIPPLES ] ; } mExitingRipples [ mExitingRipplesCount ++ ] = mRipple ; mRipple . exit ( ) ; mRipple = null ; } |
public class Matrix4x3f { /** * Pre - multiply a rotation around the Z axis to this matrix by rotating the given amount of radians
* about the Z axis and store the result in < code > dest < / code > .
* When used with a right - handed coordinate system , the produced rotation will rotate a vector
* counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin .
* When used with a left - handed coordinate system , the rotation is clockwise .
* If < code > M < / code > is < code > this < / code > matrix and < code > R < / code > the rotation matrix ,
* then the new matrix will be < code > R * M < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > R * M * v < / code > , the
* rotation will be applied last !
* In order to set the matrix to a rotation matrix without pre - multiplying the rotation
* transformation , use { @ link # rotationZ ( float ) rotationZ ( ) } .
* Reference : < a href = " http : / / en . wikipedia . org / wiki / Rotation _ matrix # Rotation _ matrix _ from _ axis _ and _ angle " > http : / / en . wikipedia . org < / a >
* @ see # rotationZ ( float )
* @ param ang
* the angle in radians to rotate about the Z axis
* @ param dest
* will hold the result
* @ return dest */
public Matrix4x3f rotateLocalZ ( float ang , Matrix4x3f dest ) { } } | float sin = ( float ) Math . sin ( ang ) ; float cos = ( float ) Math . cosFromSin ( sin , ang ) ; float nm00 = cos * m00 - sin * m01 ; float nm01 = sin * m00 + cos * m01 ; float nm10 = cos * m10 - sin * m11 ; float nm11 = sin * m10 + cos * m11 ; float nm20 = cos * m20 - sin * m21 ; float nm21 = sin * m20 + cos * m21 ; float nm30 = cos * m30 - sin * m31 ; float nm31 = sin * m30 + cos * m31 ; dest . _m00 ( nm00 ) ; dest . _m01 ( nm01 ) ; dest . _m02 ( m02 ) ; dest . _m10 ( nm10 ) ; dest . _m11 ( nm11 ) ; dest . _m12 ( m12 ) ; dest . _m20 ( nm20 ) ; dest . _m21 ( nm21 ) ; dest . _m22 ( m22 ) ; dest . _m30 ( nm30 ) ; dest . _m31 ( nm31 ) ; dest . _m32 ( m32 ) ; dest . properties = properties & ~ ( PROPERTY_IDENTITY | PROPERTY_TRANSLATION ) ; return dest ; |
public class RDBMServices { /** * Releases database connection . Unlike the underlying connection . close ( ) , this method does not
* throw SQLException or any other exception . It will fail silently from the perspective of
* calling code , logging errors using Commons Logging .
* @ param con a database Connection object
* @ deprecated Where possible code should be injected with a { @ link DataSource } object via the
* Spring application context */
@ Deprecated public static void releaseConnection ( final Connection con ) { } } | // If we had failed allocating the connection , insure we don ' t try to count it or close it .
// UP - 4446
if ( con != null ) { try { activeConnections . decrementAndGet ( ) ; con . close ( ) ; } catch ( Exception e ) { if ( LOG . isWarnEnabled ( ) ) LOG . warn ( "Error closing Connection: " + con , e ) ; } } |
public class CloudFormationSample { /** * Convert a stream into a single , newline separated string */
public static String convertStreamToString ( InputStream in ) throws Exception { } } | BufferedReader reader = new BufferedReader ( new InputStreamReader ( in ) ) ; StringBuilder stringbuilder = new StringBuilder ( ) ; String line = null ; while ( ( line = reader . readLine ( ) ) != null ) { stringbuilder . append ( line + "\n" ) ; } in . close ( ) ; return stringbuilder . toString ( ) ; |
public class AbstractEvaluatorToPartitionStrategy { /** * Get an input split to be assigned to this evaluator .
* Allocates one if its not already allocated
* @ param evaluatorId
* @ return a numberedSplit
* @ throws RuntimeException
* if couldn ' t find any split */
@ Override public NumberedSplit < InputSplit > getInputSplit ( final NodeDescriptor nodeDescriptor , final String evaluatorId ) { } } | synchronized ( evaluatorToSplits ) { if ( evaluatorToSplits . containsKey ( evaluatorId ) ) { LOG . log ( Level . FINE , "Found an already allocated split, {0}" , evaluatorToSplits . toString ( ) ) ; return evaluatorToSplits . get ( evaluatorId ) ; } } // always first try to allocate based on the hostName
final String hostName = nodeDescriptor . getName ( ) ; LOG . log ( Level . FINE , "Allocated split not found, trying on {0}" , hostName ) ; if ( locationToSplits . containsKey ( hostName ) ) { LOG . log ( Level . FINE , "Found splits possibly hosted for {0} at {1}" , new Object [ ] { evaluatorId , hostName } ) ; final NumberedSplit < InputSplit > split = allocateSplit ( evaluatorId , locationToSplits . get ( hostName ) ) ; if ( split != null ) { return split ; } } LOG . log ( Level . FINE , "{0} does not host any splits or someone else took splits hosted here. Picking other ones" , hostName ) ; final NumberedSplit < InputSplit > split = tryAllocate ( nodeDescriptor , evaluatorId ) ; if ( split == null ) { throw new RuntimeException ( "Unable to find an input split to evaluator " + evaluatorId ) ; } else { LOG . log ( Level . FINE , evaluatorToSplits . toString ( ) ) ; } return split ; |
public class MultiException { /** * Throw a MultiException .
* If this multi exception is empty then no action is taken . If it
* contains a single exception that is thrown , otherwise the this
* multi exception is thrown .
* @ exception Exception the Error or Exception if nested is 1 , or the
* MultiException itself if nested is more than 1. */
public void ifExceptionThrow ( ) throws Exception { } } | if ( nested == null || nested . isEmpty ( ) ) { return ; } if ( nested . size ( ) == 1 ) { Throwable th = nested . get ( 0 ) ; if ( th instanceof Error ) { throw ( Error ) th ; } if ( th instanceof Exception ) { throw ( Exception ) th ; } } throw this ; |
public class PrettyTime { /** * / * [ deutsch ]
* < p > Formatiert die gesamte angegebene Dauer . < / p >
* < p > Eine lokalisierte Ausgabe ist nur f & uuml ; r die Zeiteinheiten
* { @ link CalendarUnit # YEARS } , { @ link CalendarUnit # MONTHS } ,
* { @ link CalendarUnit # WEEKS } , { @ link CalendarUnit # DAYS } und
* alle { @ link ClockUnit } - Instanzen vorhanden . Bei Bedarf werden
* andere Einheiten zu diesen normalisiert . < / p >
* @ param duration object representing a duration which might contain
* several units and quantities
* @ param width text width ( ABBREVIATED as synonym for SHORT )
* @ return formatted list output
* @ since 1.2 */
public String print ( Duration < ? > duration , TextWidth width ) { } } | return this . print ( duration , width , false , Integer . MAX_VALUE ) ; |
public class DockerUtils { /** * Builds a container name .
* @ param scopedInstancePath a scoped instance path
* @ param applicationName an application name
* @ return a non - null string */
public static String buildContainerNameFrom ( String scopedInstancePath , String applicationName ) { } } | String containerName = scopedInstancePath + "_from_" + applicationName ; containerName = containerName . replaceFirst ( "^/" , "" ) . replace ( "/" , "-" ) . replaceAll ( "\\s+" , "_" ) ; // Prevent container names from being too long ( see # 480)
if ( containerName . length ( ) > 61 ) containerName = containerName . substring ( 0 , 61 ) ; return containerName ; |
public class ZoomerCompat { /** * Computes the current zoom level , returning true if the zoom is still active and false if the zoom has finished .
* @ see android . widget . Scroller # computeScrollOffset ( ) */
public boolean computeZoom ( ) { } } | if ( mFinished ) { return false ; } long tRTC = SystemClock . elapsedRealtime ( ) - mStartRTC ; if ( tRTC >= mAnimationDurationMillis ) { mFinished = true ; mCurrentZoom = mEndZoom ; return false ; } float t = tRTC * 1f / mAnimationDurationMillis ; mCurrentZoom = mEndZoom * mInterpolator . getInterpolation ( t ) ; return true ; |
public class CmsListManager { /** * Unlocks the current resource in case it has been locked by previous actions . < p > */
void unlockCurrent ( ) { } } | if ( m_currentResource != null ) { if ( ( m_lockAction != null ) && m_lockAction . getChange ( ) . equals ( LockChange . locked ) ) { CmsLockUtil . tryUnlock ( A_CmsUI . getCmsObject ( ) , m_currentResource ) ; } } m_lockAction = null ; |
public class TapConnectionProvider { /** * Add a connection observer .
* If connections are already established , your observer will be called with
* the address and - 1.
* @ param obs the ConnectionObserver you wish to add
* @ return true if the observer was added . */
public boolean addObserver ( ConnectionObserver obs ) { } } | boolean rv = conn . addObserver ( obs ) ; if ( rv ) { for ( MemcachedNode node : conn . getLocator ( ) . getAll ( ) ) { if ( node . isActive ( ) ) { obs . connectionEstablished ( node . getSocketAddress ( ) , - 1 ) ; } } } return rv ; |
public class MutableLong { /** * Use the supplied function to perform a lazy transform operation when getValue is called
* < pre >
* { @ code
* MutableLong mutable = MutableLong . fromExternal ( ( ) - > ! this . value , val - > ! this . value ) ;
* Mutable < Long > withOverride = mutable . mapInputToObj ( b - > {
* if ( override )
* return 10.0;
* return b ;
* < / pre >
* @ param fn Map function to be applied to the input when set is called
* @ return Mutable that lazily applies the provided function when set is called to the input value */
public < T1 > Mutable < T1 > mapInputToObj ( final Function < T1 , Long > fn ) { } } | final MutableLong host = this ; return new Mutable < T1 > ( ) { @ Override public Mutable < T1 > set ( final T1 value ) { host . set ( fn . apply ( value ) ) ; return this ; } } ; |
public class ByteBufPayload { /** * Static factory method for a text payload . Mainly looks better than " new ByteBufPayload ( data ,
* metadata ) "
* @ param data the data of the payload .
* @ param metadata the metadata for the payload .
* @ return a payload . */
public static Payload create ( String data , @ Nullable String metadata ) { } } | return create ( ByteBufUtil . writeUtf8 ( ByteBufAllocator . DEFAULT , data ) , metadata == null ? null : ByteBufUtil . writeUtf8 ( ByteBufAllocator . DEFAULT , metadata ) ) ; |
public class ModelsEngine { /** * Calculate the map of netnumbering .
* @ param flowGC the map of flowdirection .
* @ param netGC the map of network .
* @ param tcaGC the optional map of tca .
* @ param tcaThreshold the threshold on the tca .
* @ param pointsFC optional feature collection of points in which to split the net .
* @ param pm the monitor .
* @ return the raster of netnumbering .
* @ throws Exception */
public static WritableRaster netNumbering ( GridCoverage2D flowGC , GridCoverage2D netGC , GridCoverage2D tcaGC , int tcaThreshold , SimpleFeatureCollection pointsFC , IHMProgressMonitor pm ) throws Exception { } } | RegionMap regionMap = CoverageUtilities . getRegionParamsFromGridCoverage ( flowGC ) ; int cols = regionMap . getCols ( ) ; int rows = regionMap . getRows ( ) ; WritableRaster netnumWR = CoverageUtilities . createWritableRaster ( cols , rows , Integer . class , null , null ) ; WritableRandomIter netnumIter = RandomIterFactory . createWritable ( netnumWR , null ) ; RandomIter flowIter = CoverageUtilities . getRandomIterator ( flowGC ) ; RandomIter netIter = CoverageUtilities . getRandomIterator ( netGC ) ; RandomIter tcaIter = null ; if ( tcaGC != null ) tcaIter = CoverageUtilities . getRandomIterator ( tcaGC ) ; /* * split nodes are points that create new numbering :
* - first points upstream on net
* - confluences
* - supplied points */
List < FlowNode > splitNodes = new ArrayList < FlowNode > ( ) ; List < Boolean > splitNodesIsNetStart = new ArrayList < Boolean > ( ) ; // SUPPLIED POINTS
if ( pointsFC != null ) { Envelope envelope = regionMap . toEnvelope ( ) ; GridGeometry2D gridGeometry = flowGC . getGridGeometry ( ) ; SimpleFeatureIterator pointsIter = pointsFC . features ( ) ; // snap points on net if necessary
while ( pointsIter . hasNext ( ) ) { SimpleFeature pointFeature = pointsIter . next ( ) ; Coordinate pointCoordinate = ( ( Geometry ) pointFeature . getDefaultGeometry ( ) ) . getCoordinate ( ) ; if ( envelope . contains ( pointCoordinate ) ) { GridCoordinates2D gridCoordinate = gridGeometry . worldToGrid ( new DirectPosition2D ( pointCoordinate . x , pointCoordinate . y ) ) ; GridNode netNode = new GridNode ( netIter , cols , rows , - 1 , - 1 , gridCoordinate . x , gridCoordinate . y ) ; FlowNode flowNode = new FlowNode ( flowIter , cols , rows , gridCoordinate . x , gridCoordinate . y ) ; while ( ! netNode . isValid ( ) ) { flowNode = flowNode . goDownstream ( ) ; if ( flowNode == null ) break ; netNode = new GridNode ( netIter , cols , rows , - 1 , - 1 , flowNode . col , flowNode . row ) ; } if ( flowNode != null ) { splitNodes . add ( flowNode ) ; splitNodesIsNetStart . add ( false ) ; } } } pointsIter . close ( ) ; } // FIND CONFLUENCES AND NETWORK STARTING POINTS ( MOST UPSTREAM )
pm . beginTask ( "Find confluences..." , rows ) ; for ( int r = 0 ; r < rows ; r ++ ) { for ( int c = 0 ; c < cols ; c ++ ) { GridNode netNode = new GridNode ( netIter , cols , rows , - 1 , - 1 , c , r ) ; if ( netNode . isValid ( ) ) { List < GridNode > validSurroundingNodes = netNode . getValidSurroundingNodes ( ) ; FlowNode currentflowNode = new FlowNode ( flowIter , cols , rows , c , r ) ; int enteringCount = 0 ; for ( GridNode gridNode : validSurroundingNodes ) { FlowNode tmpNode = new FlowNode ( flowIter , cols , rows , gridNode . col , gridNode . row ) ; List < FlowNode > enteringNodes = currentflowNode . getEnteringNodes ( ) ; if ( enteringNodes . contains ( tmpNode ) ) { enteringCount ++ ; } } if ( enteringCount != 1 ) { // starting ( = = 0 ) + confluences ( > 1)
splitNodes . add ( currentflowNode ) ; if ( enteringCount == 0 ) { splitNodesIsNetStart . add ( true ) ; } else { splitNodesIsNetStart . add ( false ) ; } } } } pm . worked ( 1 ) ; } pm . done ( ) ; pm . message ( "Found split points: " + splitNodes . size ( ) ) ; int channel = 1 ; pm . beginTask ( "Numbering network..." , splitNodes . size ( ) ) ; for ( int i = 0 ; i < splitNodes . size ( ) ; i ++ ) { FlowNode splitNode = splitNodes . get ( i ) ; boolean isNetStart = splitNodesIsNetStart . get ( i ) ; // we simply go down to the next split with one number
splitNode . setIntValueInMap ( netnumIter , channel ) ; // if it is a net start , check the tca if it exists
if ( isNetStart ) { int netStartTca = splitNode . getIntValueFromMap ( tcaIter ) ; if ( ! isNovalue ( netStartTca ) && netStartTca > tcaThreshold ) { channel ++ ; } } FlowNode nextNode = splitNode . goDownstream ( ) ; int startTca = intNovalue ; if ( nextNode != null ) startTca = nextNode . getIntValueFromMap ( tcaIter ) ; while ( nextNode != null && ! splitNodes . contains ( nextNode ) ) { nextNode . setIntValueInMap ( netnumIter , channel ) ; nextNode = nextNode . goDownstream ( ) ; int endTca = intNovalue ; if ( nextNode != null ) endTca = nextNode . getIntValueFromMap ( tcaIter ) ; if ( ! isNovalue ( startTca ) && ! isNovalue ( endTca ) ) { int diffTca = endTca - startTca ; if ( diffTca > tcaThreshold ) { startTca = endTca ; channel ++ ; } } } channel ++ ; pm . worked ( 1 ) ; } pm . done ( ) ; return netnumWR ; |
public class MTable { /** * Get the record count .
* Just returns the count of the default key area .
* @ return The record count ( or - 1 if unknown ) . */
public int getRecordCount ( FieldTable table ) { } } | int iKeyOrder = table . getRecord ( ) . getDefaultOrder ( ) ; if ( iKeyOrder == - 1 ) iKeyOrder = Constants . MAIN_KEY_AREA ; return this . getPKeyArea ( iKeyOrder ) . getRecordCount ( ) ; |
public class UserTransactionRegistryImpl { /** * { @ inheritDoc } */
public void addProvider ( UserTransactionProvider provider ) { } } | if ( provider != null ) { provider . setUserTransactionRegistry ( this ) ; providers . add ( provider ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.