signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class KeyEncoder { /** * Encodes the given optional BigInteger into a variable amount of bytes
* for descending order . If the BigInteger is null , exactly 1 byte is
* written . Otherwise , the amount written can be determined by calling
* calculateEncodedLength .
* @ param value BigInteger value to encode , may be null
* @ param dst destination for encoded bytes
* @ param dstOffset offset into destination array
* @ return amount of bytes written
* @ since 1.2 */
public static int encodeDesc ( BigInteger value , byte [ ] dst , int dstOffset ) { } }
|
/* Encoding of first byte :
0x00 : null high ( unused )
0x01 : positive signum ; four bytes follow for value length
0x02 . . 0x7f : positive signum ; value length 7e range , 1 . . 126
0x80 . . 0xfd : negative signum ; value length 7e range , 1 . . 126
0xfe : negative signum ; four bytes follow for value length
0xff : null low */
if ( value == null ) { dst [ dstOffset ] = NULL_BYTE_LOW ; return 1 ; } byte [ ] bytes = value . toByteArray ( ) ; // Always at least one .
int bytesLength = bytes . length ; int headerSize ; if ( bytesLength < 0x7f ) { if ( value . signum ( ) < 0 ) { dst [ dstOffset ] = ( byte ) ( bytesLength + 0x7f ) ; } else { dst [ dstOffset ] = ( byte ) ( 0x80 - bytesLength ) ; } headerSize = 1 ; } else { dst [ dstOffset ] = ( byte ) ( value . signum ( ) < 0 ? 0xfe : 1 ) ; int encodedLen = value . signum ( ) < 0 ? bytesLength : - bytesLength ; DataEncoder . encode ( encodedLen , dst , dstOffset + 1 ) ; headerSize = 5 ; } dstOffset += headerSize ; for ( int i = 0 ; i < bytesLength ; i ++ ) { dst [ dstOffset + i ] = ( byte ) ~ bytes [ i ] ; } return headerSize + bytesLength ;
|
public class FrameworkUtils { /** * find work object specified by name , create and attach it if not exists */
static Object workObject ( Map < String , Object > workList , String name , boolean isArray ) { } }
|
logger . trace ( "get working object for {}" , name ) ; if ( workList . get ( name ) != null ) return workList . get ( name ) ; else { String [ ] parts = splitName ( name ) ; // parts : ( parent , name , isArray )
Map < String , Object > parentObj = ( Map < String , Object > ) workObject ( workList , parts [ 0 ] , false ) ; Object theObj = isArray ? new ArrayList < String > ( ) : new HashMap < String , Object > ( ) ; parentObj . put ( parts [ 1 ] , theObj ) ; workList . put ( name , theObj ) ; return theObj ; }
|
public class SimpleASCIITableImpl { /** * Each string item rendering requires the border and a space on both sides .
* 12 3 12 3 12 34
* abc venkat last
* @ param colCount
* @ param colMaxLenList
* @ param data
* @ return */
private String getRowLineBuf ( int colCount , List < Integer > colMaxLenList , String [ ] [ ] data ) { } }
|
StringBuilder rowBuilder = new StringBuilder ( ) ; int colWidth = 0 ; for ( int i = 0 ; i < colCount ; i ++ ) { colWidth = colMaxLenList . get ( i ) + 3 ; for ( int j = 0 ; j < colWidth ; j ++ ) { if ( j == 0 ) { rowBuilder . append ( "+" ) ; } else if ( ( i + 1 == colCount && j + 1 == colWidth ) ) { // for last column close the border
rowBuilder . append ( "-+" ) ; } else { rowBuilder . append ( "-" ) ; } } } return rowBuilder . append ( "\n" ) . toString ( ) ;
|
public class Cache { /** * List all objects in the cache .
* @ return the list */
@ Override public List < ApiType > list ( ) { } }
|
lock . lock ( ) ; try { List < ApiType > itemList = new ArrayList < > ( this . items . size ( ) ) ; for ( Map . Entry < String , ApiType > entry : this . items . entrySet ( ) ) { itemList . add ( entry . getValue ( ) ) ; } return itemList ; } finally { lock . unlock ( ) ; }
|
public class ErrorPageFilter { /** * Return the description for the given request . By default this method will return a
* description based on the request { @ code servletPath } and { @ code pathInfo } .
* @ param request the source request
* @ return the description
* @ since 1.5.0 */
protected String getDescription ( HttpServletRequest request ) { } }
|
String pathInfo = ( request . getPathInfo ( ) != null ) ? request . getPathInfo ( ) : "" ; return "[" + request . getServletPath ( ) + pathInfo + "]" ;
|
public class Http2ClientChannel { /** * Adds a in - flight message .
* @ param streamId stream id
* @ param inFlightMessage { @ link OutboundMsgHolder } which holds the in - flight message */
public void putInFlightMessage ( int streamId , OutboundMsgHolder inFlightMessage ) { } }
|
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "In flight message added to channel: {} with stream id: {} " , this , streamId ) ; } inFlightMessages . put ( streamId , inFlightMessage ) ;
|
public class ApiUtilDAODefaultImpl { public int pending_asynch_call ( final DeviceProxy dev , final int reply_model ) { } }
|
int cnt = 0 ; final Enumeration _enum = async_request_table . keys ( ) ; while ( _enum . hasMoreElements ( ) ) { int n = ( Integer ) _enum . nextElement ( ) ; final AsyncCallObject aco = async_request_table . get ( n ) ; if ( aco . dev == dev && ( reply_model == ApiDefs . ALL_ASYNCH || aco . reply_model == reply_model ) ) { cnt ++ ; } } return cnt ;
|
public class X509Factory { /** * Returns a ( possibly empty ) collection view of X . 509 CRLs read
* from the given input stream < code > is < / code > .
* @ param is the input stream with the CRLs .
* @ return a ( possibly empty ) collection view of X . 509 CRL objects
* initialized with the data from the input stream .
* @ exception CRLException on parsing errors . */
public Collection < ? extends java . security . cert . CRL > engineGenerateCRLs ( InputStream is ) throws CRLException { } }
|
if ( is == null ) { throw new CRLException ( "Missing input stream" ) ; } try { return parseX509orPKCS7CRL ( is ) ; } catch ( IOException ioe ) { throw new CRLException ( ioe . getMessage ( ) ) ; }
|
public class DescribeRepositoriesRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeRepositoriesRequest describeRepositoriesRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( describeRepositoriesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeRepositoriesRequest . getRegistryId ( ) , REGISTRYID_BINDING ) ; protocolMarshaller . marshall ( describeRepositoriesRequest . getRepositoryNames ( ) , REPOSITORYNAMES_BINDING ) ; protocolMarshaller . marshall ( describeRepositoriesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeRepositoriesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Runner { /** * Write the following job details as a JSON encoded file : runtime environment
* job ID , runtime , parameters , and accumulators .
* @ param env the execution environment
* @ param jobDetailsPath filesystem path to write job details
* @ throws IOException on error writing to jobDetailsPath */
private static void writeJobDetails ( ExecutionEnvironment env , String jobDetailsPath ) throws IOException { } }
|
JobExecutionResult result = env . getLastJobExecutionResult ( ) ; File jsonFile = new File ( jobDetailsPath ) ; try ( JsonGenerator json = new JsonFactory ( ) . createGenerator ( jsonFile , JsonEncoding . UTF8 ) ) { json . writeStartObject ( ) ; json . writeObjectFieldStart ( "Apache Flink" ) ; json . writeStringField ( "version" , EnvironmentInformation . getVersion ( ) ) ; json . writeStringField ( "commit ID" , EnvironmentInformation . getRevisionInformation ( ) . commitId ) ; json . writeStringField ( "commit date" , EnvironmentInformation . getRevisionInformation ( ) . commitDate ) ; json . writeEndObject ( ) ; json . writeStringField ( "job_id" , result . getJobID ( ) . toString ( ) ) ; json . writeNumberField ( "runtime_ms" , result . getNetRuntime ( ) ) ; json . writeObjectFieldStart ( "parameters" ) ; for ( Map . Entry < String , String > entry : env . getConfig ( ) . getGlobalJobParameters ( ) . toMap ( ) . entrySet ( ) ) { json . writeStringField ( entry . getKey ( ) , entry . getValue ( ) ) ; } json . writeEndObject ( ) ; json . writeObjectFieldStart ( "accumulators" ) ; for ( Map . Entry < String , Object > entry : result . getAllAccumulatorResults ( ) . entrySet ( ) ) { json . writeStringField ( entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; } json . writeEndObject ( ) ; json . writeEndObject ( ) ; }
|
public class PermissionAwareCrudService { /** * This method returns a { @ link Map } that maps { @ link PersistentObject } s
* to PermissionCollections for the passed { @ link User } . I . e . the keySet
* of the map is the collection of all { @ link PersistentObject } s where the
* user has at least one permission and the corresponding value contains
* the { @ link PermissionCollection } for the passed user on the entity .
* @ param user
* @ return */
@ PreAuthorize ( "hasRole(@configHolder.getSuperAdminRoleName()) or hasPermission(#user, 'READ')" ) @ Transactional ( readOnly = true ) public Map < PersistentObject , PermissionCollection > findAllUserPermissionsOfUser ( User user ) { } }
|
return dao . findAllUserPermissionsOfUser ( user ) ;
|
public class AppIdNamespace { /** * Converts an encoded appId / namespace to { @ link AppIdNamespace } .
* < p > Only one form of an appId / namespace pair will be allowed . i . e . " app ! "
* is an illegal form and must be encoded as " app " .
* < p > An appId / namespace pair may contain at most one " ! " character .
* @ param encodedAppIdNamespace The encoded application Id / namespace string . */
public static AppIdNamespace parseEncodedAppIdNamespace ( String encodedAppIdNamespace ) { } }
|
if ( encodedAppIdNamespace == null ) { throw new IllegalArgumentException ( "appIdNamespaceString may not be null" ) ; } int index = encodedAppIdNamespace . indexOf ( NamespaceResources . NAMESPACE_SEPARATOR ) ; if ( index == - 1 ) { return new AppIdNamespace ( encodedAppIdNamespace , "" ) ; } String appId = encodedAppIdNamespace . substring ( 0 , index ) ; String namespace = encodedAppIdNamespace . substring ( index + 1 ) ; if ( namespace . length ( ) == 0 ) { throw new IllegalArgumentException ( "encodedAppIdNamespace with empty namespace may not contain a '" + NamespaceResources . NAMESPACE_SEPARATOR + "'" ) ; } return new AppIdNamespace ( appId , namespace ) ;
|
public class GetLifecyclePoliciesResult { /** * Summary information about the lifecycle policies .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setPolicies ( java . util . Collection ) } or { @ link # withPolicies ( java . util . Collection ) } if you want to override
* the existing values .
* @ param policies
* Summary information about the lifecycle policies .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetLifecyclePoliciesResult withPolicies ( LifecyclePolicySummary ... policies ) { } }
|
if ( this . policies == null ) { setPolicies ( new java . util . ArrayList < LifecyclePolicySummary > ( policies . length ) ) ; } for ( LifecyclePolicySummary ele : policies ) { this . policies . add ( ele ) ; } return this ;
|
public class LdaGibbsSampler { /** * Main method : Select initial state ? Repeat a large number of times : 1.
* Select an element 2 . Update conditional on other elements . If
* appropriate , output summary for each run .
* @ param K
* number of topics
* @ param alpha
* symmetric prior parameter on document - - topic associations
* @ param beta
* symmetric prior parameter on topic - - term associations */
private void gibbs ( int K , float alpha , float beta ) { } }
|
this . K = K ; this . alpha = alpha ; this . beta = beta ; // init sampler statistics
if ( SAMPLE_LAG > 0 ) { thetasum = new float [ documents . length ] [ K ] ; phisum = new float [ K ] [ V ] ; numstats = 0 ; } // initial state of the Markov chain :
initialState ( K ) ; System . out . println ( "Sampling " + ITERATIONS + " iterations with burn-in of " + BURN_IN + " (B/S=" + THIN_INTERVAL + ")." ) ; for ( int i = 0 ; i < ITERATIONS ; i ++ ) { // for all z _ i
for ( int m = 0 ; m < z . length ; m ++ ) { for ( int n = 0 ; n < z [ m ] . length ; n ++ ) { // ( z _ i = z [ m ] [ n ] )
// sample from p ( z _ i | z _ - i , w )
int topic = sampleFullConditional ( m , n ) ; z [ m ] [ n ] = topic ; } } if ( ( i < BURN_IN ) && ( i % THIN_INTERVAL == 0 ) ) { System . out . print ( "B" ) ; dispcol ++ ; } // display progress
if ( ( i > BURN_IN ) && ( i % THIN_INTERVAL == 0 ) ) { System . out . print ( "S" ) ; dispcol ++ ; } // get statistics after burn - in
if ( ( i > BURN_IN ) && ( SAMPLE_LAG > 0 ) && ( i % SAMPLE_LAG == 0 ) ) { updateParams ( ) ; System . out . print ( "|" ) ; if ( i % THIN_INTERVAL != 0 ) dispcol ++ ; } if ( dispcol >= 100 ) { System . out . println ( ) ; dispcol = 0 ; } }
|
public class Config { /** * Returns the { @ code channel } as a { @ link ConfigurableChannel } .
* @ throws IllegalArgumentException if { @ code channel } was not created by Lyra */
public static ConfigurableChannel of ( Channel channel ) { } }
|
Assert . isTrue ( channel instanceof ConfigurableChannel , "The channel {} was not created by Lyra" , channel ) ; return ( ConfigurableChannel ) channel ;
|
public class LogWriter { /** * Initializes the log writer on basis of the given log format . < br >
* It ensures the proper creation of the output file and writes the file header .
* @ param logFormat
* @ throws IOException if output file creation or header writing cause an exception .
* @ throws CompatibilityException if the charset of the log writer is not supported by the log format .
* @ throws PerspectiveException if the log format does not support the writers ' log perspective . */
protected final void initialize ( AbstractLogFormat logFormat ) throws PerspectiveException , IOException , CompatibilityException { } }
|
setLogFormat ( logFormat ) ; try { setEOLString ( EOLType . LF ) ; } catch ( ParameterException e ) { // Is only thrown if setEOLString ( ) is called with a null - parameter .
// Cannot happen , since EOLType . LF is not null
throw new RuntimeException ( e ) ; }
|
public class EventsHelper { /** * Bind a function to the unload event of each matched element .
* @ param jsScope
* Scope to use
* @ return the jQuery code */
public static ChainableStatement unload ( JsScope jsScope ) { } }
|
return new DefaultChainableStatement ( StateEvent . UNLOAD . getEventLabel ( ) , jsScope . render ( ) ) ;
|
public class FixedLengthRecordSorter { @ Override public int compare ( int i , int j ) { } }
|
final int segmentNumberI = i / this . recordsPerSegment ; final int segmentOffsetI = ( i % this . recordsPerSegment ) * this . recordSize ; final int segmentNumberJ = j / this . recordsPerSegment ; final int segmentOffsetJ = ( j % this . recordsPerSegment ) * this . recordSize ; return compare ( segmentNumberI , segmentOffsetI , segmentNumberJ , segmentOffsetJ ) ;
|
public class SelfAssignment { /** * We expect that the lhs is a field and the rhs is an identifier , specifically a parameter to the
* method . We base our suggested fixes on this expectation .
* < p > Case 1 : If lhs is a field and rhs is an identifier , find a method parameter of the same type
* and similar name and suggest it as the rhs . ( Guess that they have misspelled the identifier . )
* < p > Case 2 : If lhs is a field and rhs is not an identifier , find a method parameter of the same
* type and similar name and suggest it as the rhs .
* < p > Case 3 : If lhs is not a field and rhs is an identifier , find a class field of the same type
* and similar name and suggest it as the lhs .
* < p > Case 4 : Otherwise suggest deleting the assignment . */
public Description describeForAssignment ( AssignmentTree assignmentTree , VisitorState state ) { } }
|
// the statement that is the parent of the self - assignment expression
Tree parent = state . getPath ( ) . getParentPath ( ) . getLeaf ( ) ; // default fix is to delete assignment
Fix fix = SuggestedFix . delete ( parent ) ; ExpressionTree lhs = assignmentTree . getVariable ( ) ; ExpressionTree rhs = assignmentTree . getExpression ( ) ; // if this is a method invocation , they must be calling checkNotNull ( )
if ( assignmentTree . getExpression ( ) . getKind ( ) == METHOD_INVOCATION ) { // change the default fix to be " checkNotNull ( x ) " instead of " x = checkNotNull ( x ) "
fix = SuggestedFix . replace ( assignmentTree , state . getSourceForNode ( rhs ) ) ; // new rhs is first argument to checkNotNull ( )
rhs = stripNullCheck ( rhs , state ) ; } rhs = skipCast ( rhs ) ; ImmutableList < Fix > exploratoryFieldFixes = ImmutableList . of ( ) ; if ( lhs . getKind ( ) == MEMBER_SELECT ) { // find a method parameter of the same type and similar name and suggest it
// as the rhs
// rhs should be either identifier or field access
Preconditions . checkState ( rhs . getKind ( ) == IDENTIFIER || rhs . getKind ( ) == MEMBER_SELECT ) ; Type rhsType = ASTHelpers . getType ( rhs ) ; exploratoryFieldFixes = ReplacementVariableFinder . fixesByReplacingExpressionWithMethodParameter ( rhs , varDecl -> ASTHelpers . isSameType ( rhsType , varDecl . type , state ) , state ) ; } else if ( rhs . getKind ( ) == IDENTIFIER ) { // find a field of the same type and similar name and suggest it as the lhs
// lhs should be identifier
Preconditions . checkState ( lhs . getKind ( ) == IDENTIFIER ) ; Type lhsType = ASTHelpers . getType ( lhs ) ; exploratoryFieldFixes = ReplacementVariableFinder . fixesByReplacingExpressionWithLocallyDeclaredField ( lhs , var -> ! Flags . isStatic ( var . sym ) && ( var . sym . flags ( ) & Flags . FINAL ) == 0 && ASTHelpers . isSameType ( lhsType , var . type , state ) , state ) ; } if ( exploratoryFieldFixes . isEmpty ( ) ) { return describeMatch ( assignmentTree , fix ) ; } return buildDescription ( assignmentTree ) . addAllFixes ( exploratoryFieldFixes ) . build ( ) ;
|
public class AWSElasticBeanstalkClient { /** * Checks if the specified CNAME is available .
* @ param checkDNSAvailabilityRequest
* Results message indicating whether a CNAME is available .
* @ return Result of the CheckDNSAvailability operation returned by the service .
* @ sample AWSElasticBeanstalk . CheckDNSAvailability
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticbeanstalk - 2010-12-01 / CheckDNSAvailability "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CheckDNSAvailabilityResult checkDNSAvailability ( CheckDNSAvailabilityRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCheckDNSAvailability ( request ) ;
|
public class InjectorConfiguration { /** * Defines that instead of the class / interface passed in abstractDefinition , the class specified in
* implementationDefinition should be used . The specified replacement class must be defined as injectable .
* @ param abstractDefinition the abstract class or interface to replace .
* @ param implementationDefinition the implementation class
* @ param < TAbstract > type of the abstract class / interface
* @ param < TImplementation > type if the implementation
* @ return a modified copy of this injection configuration . */
public < TAbstract , TImplementation extends TAbstract > InjectorConfiguration withScopedAlias ( Class scope , Class < TAbstract > abstractDefinition , Class < TImplementation > implementationDefinition ) { } }
|
if ( abstractDefinition . equals ( Injector . class ) ) { throw new DependencyInjectionFailedException ( "Cowardly refusing to define a global alias for Injector since that would lead to a Service Locator pattern. If you need the injector, please define it on a per-class or per-method basis." ) ; } // noinspection unchecked
return new InjectorConfiguration ( scopes , definedClasses , factories , factoryClasses , sharedClasses , sharedInstances , aliases . withModified ( scope , ( value ) -> value . with ( abstractDefinition , implementationDefinition ) ) , collectedAliases , namedParameterValues ) ;
|
public class UnderFileSystemConfiguration { /** * Creates a new instance from the current configuration and adds in new properties .
* @ param mountConf the mount specific configuration map
* @ return the updated configuration object */
public UnderFileSystemConfiguration createMountSpecificConf ( Map < String , String > mountConf ) { } }
|
UnderFileSystemConfiguration ufsConf = new UnderFileSystemConfiguration ( mProperties . copy ( ) ) ; ufsConf . mProperties . merge ( mountConf , Source . MOUNT_OPTION ) ; ufsConf . mReadOnly = mReadOnly ; ufsConf . mShared = mShared ; return ufsConf ;
|
public class UIInput { /** * < p > Remove a { @ link Validator } instance from the set associated with
* this { @ link UIInput } , if it was previously associated .
* Otherwise , do nothing . < / p >
* @ param validator The { @ link Validator } to remove */
public void removeValidator ( Validator validator ) { } }
|
if ( validator == null ) { return ; } if ( validators != null ) { validators . remove ( validator ) ; }
|
public class PartitionLevelWatermarker { /** * Sets the actual high watermark by reading the expected high watermark
* { @ inheritDoc }
* @ see org . apache . gobblin . data . management . conversion . hive . watermarker . HiveSourceWatermarker # setActualHighWatermark ( org . apache . gobblin . configuration . WorkUnitState ) */
@ Override public void setActualHighWatermark ( WorkUnitState wus ) { } }
|
if ( Boolean . valueOf ( wus . getPropAsBoolean ( IS_WATERMARK_WORKUNIT_KEY ) ) ) { wus . setActualHighWatermark ( wus . getWorkunit ( ) . getExpectedHighWatermark ( MultiKeyValueLongWatermark . class ) ) ; } else { wus . setActualHighWatermark ( wus . getWorkunit ( ) . getExpectedHighWatermark ( LongWatermark . class ) ) ; }
|
public class Utils { /** * Access the specified class as a resource accessible through the specified loader and return the bytes . The
* classname should be ' dot ' separated ( eg . com . foo . Bar ) and not suffixed . class
* @ param loader the classloader against which getResourceAsStream ( ) will be invoked
* @ param slashedclassname the dot separated classname without . class suffix
* @ return the byte data defining that class */
public static byte [ ] loadSlashedClassAsBytes ( ClassLoader loader , String slashedclassname ) { } }
|
if ( GlobalConfiguration . assertsMode ) { if ( slashedclassname . endsWith ( ".class" ) ) { throw new IllegalStateException ( ".class suffixed name should not be passed:" + slashedclassname ) ; } if ( slashedclassname . indexOf ( '.' ) != - 1 ) { throw new IllegalStateException ( "Should be a slashed name, no dots:" + slashedclassname ) ; } } InputStream is = loader . getResourceAsStream ( slashedclassname + ".class" ) ; if ( is == null ) { throw new UnableToLoadClassException ( slashedclassname ) ; } return Utils . loadBytesFromStream ( is ) ;
|
public class CompareExpression { /** * { @ inheritDoc } */
@ Override public Condition build ( ) { } }
|
switch ( type ) { case EQUAL : return new Equal ( trigger , value ) ; case NOT_EQUAL : return new NotEqual ( trigger , value ) ; case LESS_THAN : return new LessThan ( trigger , value ) ; case LESS_THAN_OR_EQUAL : return new LessThanOrEqual ( trigger , value ) ; case GREATER_THAN : return new GreaterThan ( trigger , value ) ; case GREATER_THAN_OR_EQUAL : return new GreaterThanOrEqual ( trigger , value ) ; case MATCH : return new Match ( trigger , ( String ) value ) ; default : throw new IllegalStateException ( "Unknown compare type " + type ) ; }
|
public class LuceneIndex { /** * Deletes all the { @ link Document } s satisfying the specified { @ link Query } .
* @ param query The { @ link Query } to identify the documents to be deleted . */
public void delete ( Query query ) { } }
|
Log . debug ( "Deleting by query %s" , query ) ; try { indexWriter . deleteDocuments ( query ) ; } catch ( IOException e ) { Log . error ( e , "Error while deleting by query %s" , query ) ; throw new RuntimeException ( e ) ; }
|
public class AppServiceEnvironmentsInner { /** * Resume an App Service Environment .
* Resume an App Service Environment .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service Environment .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void syncVirtualNetworkInfo ( String resourceGroupName , String name ) { } }
|
syncVirtualNetworkInfoWithServiceResponseAsync ( resourceGroupName , name ) . toBlocking ( ) . single ( ) . body ( ) ;
|
public class AWSGreengrassClient { /** * Retrieves the role associated with a particular group .
* @ param getAssociatedRoleRequest
* @ return Result of the GetAssociatedRole operation returned by the service .
* @ throws BadRequestException
* invalid request
* @ throws InternalServerErrorException
* server error
* @ sample AWSGreengrass . GetAssociatedRole
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / greengrass - 2017-06-07 / GetAssociatedRole " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public GetAssociatedRoleResult getAssociatedRole ( GetAssociatedRoleRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetAssociatedRole ( request ) ;
|
public class CPDefinitionGroupedEntryPersistenceImpl { /** * Returns all the cp definition grouped entries .
* @ return the cp definition grouped entries */
@ Override public List < CPDefinitionGroupedEntry > findAll ( ) { } }
|
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
|
public class LogicalZipFile { /** * Extract a value from the manifest , and return the value as a string , along with the index after the
* terminating newline . Manifest files support three different line terminator types , and entries can be split
* across lines with a line terminator followed by a space .
* @ param manifest
* the manifest bytes
* @ param startIdx
* the start index of the manifest value
* @ return the manifest value */
private static Entry < String , Integer > getManifestValue ( final byte [ ] manifest , final int startIdx ) { } }
|
// See if manifest entry is split across multiple lines
int curr = startIdx ; final int len = manifest . length ; while ( curr < len && manifest [ curr ] == ( byte ) ' ' ) { // Skip initial spaces
curr ++ ; } final int firstNonSpaceIdx = curr ; boolean isMultiLine = false ; for ( ; curr < len && ! isMultiLine ; curr ++ ) { final byte b = manifest [ curr ] ; if ( b == ( byte ) '\r' && curr < len - 1 && manifest [ curr + 1 ] == ( byte ) '\n' ) { if ( curr < len - 2 && manifest [ curr + 2 ] == ( byte ) ' ' ) { isMultiLine = true ; } break ; } else if ( b == ( byte ) '\r' || b == ( byte ) '\n' ) { if ( curr < len - 1 && manifest [ curr + 1 ] == ( byte ) ' ' ) { isMultiLine = true ; } break ; } } String val ; if ( ! isMultiLine ) { // Fast path for single - line value
val = new String ( manifest , firstNonSpaceIdx , curr - firstNonSpaceIdx , StandardCharsets . UTF_8 ) ; } else { // Skip ( newline + space ) sequences in multi - line values
final ByteArrayOutputStream buf = new ByteArrayOutputStream ( ) ; curr = firstNonSpaceIdx ; for ( ; curr < len ; curr ++ ) { final byte b = manifest [ curr ] ; boolean isLineEnd ; if ( b == ( byte ) '\r' && curr < len - 1 && manifest [ curr + 1 ] == ( byte ) '\n' ) { // CRLF
curr += 2 ; isLineEnd = true ; } else if ( b == '\r' || b == '\n' ) { // CR or LF
curr += 1 ; isLineEnd = true ; } else { buf . write ( b ) ; isLineEnd = false ; } if ( isLineEnd && curr < len && manifest [ curr ] != ( byte ) ' ' ) { // Value ends if line break is not followed by a space
break ; } // If line break was followed by a space , then the curr + + in the for loop header will skip it
} try { val = buf . toString ( "UTF-8" ) ; } catch ( final UnsupportedEncodingException e ) { // Should not happen
throw ClassGraphException . newClassGraphException ( "UTF-8 encoding unsupported" , e ) ; } } return new SimpleEntry < > ( val . endsWith ( " " ) ? val . trim ( ) : val , curr ) ;
|
public class CommentImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
|
switch ( featureID ) { case AfplibPackage . COMMENT__COMMENT : return getComment ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
|
public class SocketIOServer { /** * Starts Socket . IO server with current configuration settings .
* @ throws IllegalStateException
* if server already started */
public synchronized void start ( ) { } }
|
if ( isStarted ( ) ) { throw new IllegalStateException ( "Failed to start Socket.IO server: server already started" ) ; } log . info ( "Socket.IO server starting" ) ; // Configure heartbeat scheduler
timer = new HashedWheelTimer ( ) ; timer . start ( ) ; SocketIOHeartbeatScheduler . setHashedWheelTimer ( timer ) ; SocketIOHeartbeatScheduler . setHeartbeatInterval ( configuration . getHeartbeatInterval ( ) ) ; SocketIOHeartbeatScheduler . setHeartbeatTimeout ( configuration . getHeartbeatTimeout ( ) ) ; // Configure and bind server
ServerBootstrapFactory bootstrapFactory = serverBootstrapFactory != null ? serverBootstrapFactory : new DefaultServerBootstrapFactory ( configuration ) ; bootstrap = bootstrapFactory . createServerBootstrap ( ) ; bootstrap . childHandler ( new SocketIOChannelInitializer ( configuration , listener , pipelineModifier ) ) ; bootstrap . bind ( configuration . getPort ( ) ) . syncUninterruptibly ( ) ; state = State . STARTED ; log . info ( "Socket.IO server started: {}" , configuration ) ;
|
public class NikeFS2LazyRandomAccessStorageImpl { /** * ( non - Javadoc )
* @ see
* org . processmining . framework . log . rfb . fsio . FS2RandomAccessStorage # write
* ( byte [ ] , int , int ) */
@ Override public synchronized void write ( byte [ ] b , int off , int len ) throws IOException { } }
|
consolidateSoftCopy ( ) ; alertSoftCopies ( ) ; super . write ( b , off , len ) ;
|
public class Utils { /** * Compare two docs fast if they are the same , excluding exclusions
* @ return true if documents are different */
public static boolean fastCompareDocs ( JsonNode sourceDocument , JsonNode destinationDocument , List < String > exclusionPaths , boolean ignoreTimestampMSDiffs ) { } }
|
try { JsonDiff diff = new JsonDiff ( ) ; diff . setOption ( JsonDiff . Option . ARRAY_ORDER_INSIGNIFICANT ) ; diff . setOption ( JsonDiff . Option . RETURN_LEAVES_ONLY ) ; diff . setFilter ( new AbstractFieldFilter ( ) { public boolean includeField ( List < String > fieldName ) { return ! fieldName . get ( fieldName . size ( ) - 1 ) . endsWith ( "#" ) ; } } ) ; List < JsonDelta > list = diff . computeDiff ( sourceDocument , destinationDocument ) ; for ( JsonDelta x : list ) { String field = x . getField ( ) ; if ( ! isExcluded ( exclusionPaths , field ) ) { if ( reallyDifferent ( x . getNode1 ( ) , x . getNode2 ( ) , ignoreTimestampMSDiffs ) ) { return true ; } } } } catch ( Exception e ) { LOGGER . error ( "Cannot compare docs:{}" , e , e ) ; } return false ;
|
public class NamespaceBlock { /** * Creates a new instance of { @ link NamespaceBlock } using
* { @ link Properties block properties } .
* @ param resourceLocation the { @ link String resource location } of the
* namespace used for exception reporting ; which cannot be { @ code null }
* @ param blockProperties the { @ link Properties block properties } to
* populate the { @ link NamespaceBlock namespace block } with , which
* cannot be { @ code null }
* @ return a new { @ link NamespaceBlock namespace block } instance , which
* will not be { @ code null }
* @ throws BELDataMissingPropertyException Thrown if a property was
* required but not provided
* @ throws BELDataConversionException Thrown if a property value could not
* be converted to the right type
* @ throws InvalidArgument Thrown if { @ code ResourceLocation } or
* { @ code blockProperties } is { @ code null } */
public static NamespaceBlock create ( final String resourceLocation , final Properties blockProperties ) throws BELDataMissingPropertyException , BELDataConversionException { } }
|
if ( resourceLocation == null ) { throw new InvalidArgument ( "resourceLocation" , resourceLocation ) ; } if ( blockProperties == null ) { throw new InvalidArgument ( "blockProperties" , blockProperties ) ; } // handle blank keyword
String keyword = blockProperties . getProperty ( PROPERTY_KEYWORD ) ; if ( StringUtils . isBlank ( keyword ) ) { throw new BELDataMissingPropertyException ( resourceLocation , BLOCK_NAME , PROPERTY_KEYWORD ) ; } // handle blank name
String name = blockProperties . getProperty ( PROPERTY_NAME ) ; if ( StringUtils . isBlank ( name ) ) { throw new BELDataMissingPropertyException ( resourceLocation , BLOCK_NAME , PROPERTY_NAME ) ; } // handle blank domain
String domain = blockProperties . getProperty ( PROPERTY_DOMAIN ) ; if ( StringUtils . isBlank ( domain ) ) { throw new BELDataMissingPropertyException ( resourceLocation , BLOCK_NAME , PROPERTY_DOMAIN ) ; } // handle blank created date time
String createdDateTime = blockProperties . getProperty ( PROPERTY_CREATED_DATE_TIME ) ; if ( StringUtils . isBlank ( createdDateTime ) ) { throw new BELDataMissingPropertyException ( resourceLocation , BLOCK_NAME , PROPERTY_CREATED_DATE_TIME ) ; } // retrieve optional properties
String species = blockProperties . getProperty ( PROPERTY_SPECIES ) ; String description = blockProperties . getProperty ( PROPERTY_DESCRIPTION ) ; String version = blockProperties . getProperty ( PROPERTY_VERSION ) ; String queryValueURL = blockProperties . getProperty ( PROPERTY_QUERY_VALUE_URL ) ; // convert to proper type
return new NamespaceBlock ( typeConvert ( resourceLocation , PROPERTY_KEYWORD , keyword , String . class ) , typeConvert ( resourceLocation , PROPERTY_NAME , name , String . class ) , typeConvert ( resourceLocation , PROPERTY_DOMAIN , domain , String . class ) , typeConvert ( resourceLocation , PROPERTY_CREATED_DATE_TIME , createdDateTime , Date . class ) , typeConvert ( resourceLocation , PROPERTY_DESCRIPTION , description , String . class ) , typeConvert ( resourceLocation , PROPERTY_SPECIES , species , String . class ) , typeConvert ( resourceLocation , PROPERTY_VERSION , version , String . class ) , typeConvert ( resourceLocation , PROPERTY_QUERY_VALUE_URL , queryValueURL , URL . class ) ) ;
|
public class Serializer { /** * Serializes output to a core data type object
* @ param out
* Output writer
* @ param any
* Object to serialize */
public static void serialize ( Output out , Object any ) { } }
|
Serializer . serialize ( out , null , null , null , any ) ;
|
public class AbstractReferencedValueMap { /** * Reallocates the array being used within toArray when the iterator
* returned more elements than expected , and finishes filling it from
* the iterator .
* @ param < T > the type of the elements in the array .
* @ param array the array , replete with previously stored elements
* @ param it the in - progress iterator over this collection
* @ return array containing the elements in the given array , plus any
* further elements returned by the iterator , trimmed to size */
@ SuppressWarnings ( "unchecked" ) static < T > T [ ] finishToArray ( T [ ] array , Iterator < ? > it ) { } }
|
T [ ] rp = array ; int i = rp . length ; while ( it . hasNext ( ) ) { final int cap = rp . length ; if ( i == cap ) { int newCap = ( ( cap / 2 ) + 1 ) * 3 ; if ( newCap <= cap ) { // integer overflow
if ( cap == Integer . MAX_VALUE ) { throw new OutOfMemoryError ( ) ; } newCap = Integer . MAX_VALUE ; } rp = Arrays . copyOf ( rp , newCap ) ; } rp [ ++ i ] = ( T ) it . next ( ) ; } // trim if overallocated
return ( i == rp . length ) ? rp : Arrays . copyOf ( rp , i ) ;
|
public class BurpSuiteTab { /** * Highlights the tab using Burp ' s color scheme . The highlight disappears
* after 3 seconds . */
public void highlight ( ) { } }
|
final JTabbedPane parent = ( JTabbedPane ) this . getUiComponent ( ) . getParent ( ) ; // search through tabs until we find this one
for ( int i = 0 ; i < parent . getTabCount ( ) ; i ++ ) { String title = parent . getTitleAt ( i ) ; if ( getTabCaption ( ) . equals ( title ) ) { // found this tab
// create new colored label and set it into the tab
final JLabel label = new JLabel ( getTabCaption ( ) ) ; label . setForeground ( new Color ( 0xff6633 ) ) ; parent . setTabComponentAt ( i , label ) ; // schedule a task to change back to original color
Timer timer = new Timer ( ) ; TimerTask task = new TimerTask ( ) { @ Override public void run ( ) { label . setForeground ( Color . black ) ; } } ; timer . schedule ( task , 3000 ) ; break ; } }
|
public class TypeAnnotationPosition { /** * Create a { @ code TypeAnnotationPosition } for a throws clause .
* @ param location The type path .
* @ param onLambda The lambda for this variable .
* @ param type _ index The index of the exception .
* @ param pos The position from the associated tree node . */
public static TypeAnnotationPosition methodThrows ( final List < TypePathEntry > location , final JCLambda onLambda , final int type_index , final int pos ) { } }
|
return new TypeAnnotationPosition ( TargetType . THROWS , pos , Integer . MIN_VALUE , onLambda , type_index , Integer . MIN_VALUE , location ) ;
|
public class PhotosLicensesApi { /** * Sets the license for a photo .
* < br >
* This method requires authentication with ' write ' permission .
* @ param photoId ( Required ) The photo to update the license for .
* @ param licenseId ( Required ) The license to apply , or 0 ( zero ) to remove the current license .
* Note : as of this writing the " no known copyright restrictions " license ( 7 ) is not a valid argument .
* @ return object with the status of the requested operation .
* @ throws JinxException if required parameters are missing , or if there are any errors .
* @ see < a href = " https : / / www . flickr . com / services / api / flickr . photos . licenses . setLicense . html " > flickr . photos . licenses . setLicense < / a > */
public Response setLicense ( String photoId , Integer licenseId ) throws JinxException { } }
|
JinxUtils . validateParams ( photoId , licenseId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photos.licenses.setLicense" ) ; params . put ( "photo_id" , photoId ) ; params . put ( "license_id" , licenseId . toString ( ) ) ; return jinx . flickrPost ( params , Response . class ) ;
|
public class FSNamesystem { /** * Persist all metadata about this file .
* @ param src The string representation of the path
* @ param clientName The string representation of the client
* @ throws IOException if path does not exist */
void fsync ( String src , String clientName ) throws IOException { } }
|
NameNode . stateChangeLog . info ( "BLOCK* NameSystem.fsync: file " + src + " for " + clientName ) ; writeLock ( ) ; try { if ( isInSafeMode ( ) ) { throw new SafeModeException ( "Cannot fsync file " + src , safeMode ) ; } INodeFileUnderConstruction pendingFile = checkLease ( src , clientName ) ; // If the block has a length of zero , set it to size 1 so
// that lease recovery will not discard it .
Block last = pendingFile . getLastBlock ( ) ; if ( last . getNumBytes ( ) == 0 ) { last . setNumBytes ( 1 ) ; } dir . persistBlocks ( src , pendingFile ) ; } finally { writeUnlock ( ) ; } getEditLog ( ) . logSync ( ) ;
|
public class Origination { /** * The call distribution properties defined for your SIP hosts . Valid range : Minimum value of 1 . Maximum value of
* 20.
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRoutes ( java . util . Collection ) } or { @ link # withRoutes ( java . util . Collection ) } if you want to override the
* existing values .
* @ param routes
* The call distribution properties defined for your SIP hosts . Valid range : Minimum value of 1 . Maximum
* value of 20.
* @ return Returns a reference to this object so that method calls can be chained together . */
public Origination withRoutes ( OriginationRoute ... routes ) { } }
|
if ( this . routes == null ) { setRoutes ( new java . util . ArrayList < OriginationRoute > ( routes . length ) ) ; } for ( OriginationRoute ele : routes ) { this . routes . add ( ele ) ; } return this ;
|
public class BeanMappingExecutor { /** * 获取set操作的BatchExecutor */
private static BatchExecutor getSetBatchExecutor ( BeanMappingParam param , BeanMappingObject config ) { } }
|
BatchExecutor executor = config . getSetBatchExecutor ( ) ; if ( executor != null ) { // 如果已经生成 , 则直接返回
return executor ; } if ( canBatch ( config . getBehavior ( ) ) == false ) { config . setBatch ( false ) ; return null ; } // 处理target操作数据搜集
List < String > targetFields = new ArrayList < String > ( ) ; List < Class > targetArgs = new ArrayList < Class > ( ) ; Class locatorClass = param . getTargetRef ( ) . getClass ( ) ; // 检查一下locatorClass
for ( BeanMappingField beanField : config . getBeanFields ( ) ) { String targetField = beanField . getTargetField ( ) . getName ( ) ; Class targetArg = beanField . getTargetField ( ) . getClazz ( ) ; if ( StringUtils . isEmpty ( targetField ) || targetArg == null ) { return null ; // 直接不予处理
} Class selfLocatorClass = beanField . getTargetField ( ) . getLocatorClass ( ) ; if ( selfLocatorClass != null && selfLocatorClass != locatorClass ) { config . setBatch ( false ) ; // 直接改写为false , 发现locatorClass存在于不同的class
return null ; } if ( canBatch ( beanField . getBehavior ( ) ) == false ) { config . setBatch ( false ) ; return null ; } SetExecutor set = beanField . getSetExecutor ( ) ; // 只针对property进行batch优化
if ( set != null && ( set instanceof FastPropertySetExecutor || set instanceof PropertySetExecutor ) == false ) { config . setBatch ( false ) ; return null ; } // 搜集信息
targetFields . add ( targetField ) ; targetArgs . add ( targetArg ) ; } // 生成下target批量处理器
executor = Uberspector . getInstance ( ) . getBatchExecutor ( locatorClass , targetFields . toArray ( new String [ targetFields . size ( ) ] ) , targetArgs . toArray ( new Class [ targetArgs . size ( ) ] ) ) ; if ( config . getBehavior ( ) . isDebug ( ) && logger . isDebugEnabled ( ) ) { logger . debug ( "TargetClass[" + param . getTargetRef ( ) . getClass ( ) + "]SetBatchExecutor is init" ) ; } config . setSetBatchExecutor ( executor ) ; return executor ;
|
public class CloseDownServiceImp { /** * ( non - Javadoc )
* @ see com . popbill . api . CloseDownService # CheckCorpNum */
@ Override public CorpState [ ] CheckCorpNum ( String MemberCorpNum , String [ ] CorpNumList ) throws PopbillException { } }
|
String PostData = toJsonString ( CorpNumList ) ; return httppost ( "/CloseDown" , MemberCorpNum , PostData , null , CorpState [ ] . class ) ;
|
public class RowKey { /** * Get the value of the specified column .
* @ param columnName the name of the column
* @ return the corresponding value of the column , { @ code null } if the column does not exist in the row key */
public Object getColumnValue ( String columnName ) { } }
|
for ( int j = 0 ; j < columnNames . length ; j ++ ) { if ( columnNames [ j ] . equals ( columnName ) ) { return columnValues [ j ] ; } } return null ;
|
public class AWSKafkaClient { /** * Returns a list of the broker nodes in the cluster .
* @ param listNodesRequest
* @ return Result of the ListNodes operation returned by the service .
* @ throws NotFoundException
* The resource could not be found due to incorrect input . Correct your request and then retry it .
* @ throws BadRequestException
* Bad request due to incorrect input . Correct your request and then retry it .
* @ throws InternalServerErrorException
* There was an unexpected internal server error . Retrying your request might resolve the issue .
* @ throws ForbiddenException
* Access forbidden . Check your credentials and then retry your request .
* @ sample AWSKafka . ListNodes
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kafka - 2018-11-14 / ListNodes " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListNodesResult listNodes ( ListNodesRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeListNodes ( request ) ;
|
public class JmsSessionImpl { /** * Indicates if the session is managed . < p >
* ( i . e . running in the application server ) . */
boolean isManaged ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "isManaged" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "isManaged" , isManaged ) ; return isManaged ;
|
public class Buffer { /** * Copy { @ code byteCount } bytes from this , starting at { @ code offset } , to
* { @ code out } . */
public Buffer copyTo ( OutputStream out , long offset , long byteCount ) throws IOException { } }
|
if ( out == null ) throw new IllegalArgumentException ( "out == null" ) ; checkOffsetAndCount ( size , offset , byteCount ) ; if ( byteCount == 0 ) return this ; // Skip segments that we aren ' t copying from .
Segment s = head ; for ( ; offset >= ( s . limit - s . pos ) ; s = s . next ) { offset -= ( s . limit - s . pos ) ; } // Copy from one segment at a time .
for ( ; byteCount > 0 ; s = s . next ) { int pos = ( int ) ( s . pos + offset ) ; int toCopy = ( int ) Math . min ( s . limit - pos , byteCount ) ; out . write ( s . data , pos , toCopy ) ; byteCount -= toCopy ; offset = 0 ; } return this ;
|
public class JavaParser { /** * Parses the Java code contained in a { @ link File } and returns a
* { @ link CompilationUnit } that represents it .
* @ param file
* { @ link File } containing Java source code
* @ param encoding
* encoding of the source code
* @ return CompilationUnit representing the Java source code
* @ throws ParseException
* if the source code has parser errors
* @ throws IOException */
public static CompilationUnit parse ( File file , String encoding ) throws ParseException , IOException { } }
|
FileInputStream in = new FileInputStream ( file ) ; try { return parse ( in , encoding ) ; } finally { in . close ( ) ; }
|
public class LibUtils { /** * Obtain an input stream to the resource with the given name , and write
* it to the specified file ( which may not be < code > null < / code > , and
* may not exist yet )
* @ param resourceName The name of the resource
* @ param file The file to write to
* @ throws NullPointerException If the given file is < code > null < / code >
* @ throws IllegalArgumentException If the given file already exists
* @ throws IOException If an IO error occurs */
private static void writeResourceToFile ( String resourceName , File file ) throws IOException { } }
|
if ( file == null ) { throw new NullPointerException ( "Target file may not be null" ) ; } if ( file . exists ( ) ) { throw new IllegalArgumentException ( "Target file already exists: " + file ) ; } InputStream inputStream = LibUtils . class . getResourceAsStream ( resourceName ) ; if ( inputStream == null ) { throw new IOException ( "No resource found with name '" + resourceName + "'" ) ; } OutputStream outputStream = null ; try { outputStream = new FileOutputStream ( file ) ; byte [ ] buffer = new byte [ 32768 ] ; while ( true ) { int read = inputStream . read ( buffer ) ; if ( read < 0 ) { break ; } outputStream . write ( buffer , 0 , read ) ; } outputStream . flush ( ) ; } finally { if ( outputStream != null ) { try { outputStream . close ( ) ; } catch ( IOException e ) { logger . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } } try { inputStream . close ( ) ; } catch ( IOException e ) { logger . log ( Level . SEVERE , e . getMessage ( ) , e ) ; } }
|
public class ObjectOperation { /** * Returns a new instance of MapperConstructor
* @ param dName destination name
* @ return the mapper */
private MapperConstructor getMapper ( String dName ) { } }
|
return new MapperConstructor ( destinationType ( ) , sourceType ( ) , dName , dName , getSName ( ) , configChosen , xml , methodsToGenerate ) ;
|
public class Strings { /** * Compare two strings .
* Both or one of them may be null .
* @ param me
* @ param you
* @ return true if object equals or intern = = , else false . */
public static boolean compare ( final String me , final String you ) { } }
|
// If both null or intern equals
if ( me == you ) return true ; // if me null and you are not
if ( me == null && you != null ) return false ; // me will not be null , test for equality
return me . equals ( you ) ;
|
public class GetResourceMetricsRequest { /** * An array of one or more queries to perform . Each query must specify a Performance Insights metric , and can
* optionally specify aggregation and filtering criteria .
* @ param metricQueries
* An array of one or more queries to perform . Each query must specify a Performance Insights metric , and can
* optionally specify aggregation and filtering criteria . */
public void setMetricQueries ( java . util . Collection < MetricQuery > metricQueries ) { } }
|
if ( metricQueries == null ) { this . metricQueries = null ; return ; } this . metricQueries = new java . util . ArrayList < MetricQuery > ( metricQueries ) ;
|
public class CPRulePersistenceImpl { /** * Returns the cp rule with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found .
* @ param primaryKey the primary key of the cp rule
* @ return the cp rule
* @ throws NoSuchCPRuleException if a cp rule with the primary key could not be found */
@ Override public CPRule findByPrimaryKey ( Serializable primaryKey ) throws NoSuchCPRuleException { } }
|
CPRule cpRule = fetchByPrimaryKey ( primaryKey ) ; if ( cpRule == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchCPRuleException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return cpRule ;
|
public class ClusterNodeInfo { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } }
|
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case NAME : return isSetName ( ) ; case ADDRESS : return isSetAddress ( ) ; case TOTAL : return isSetTotal ( ) ; case FREE : return isSetFree ( ) ; case RESOURCE_INFOS : return isSetResourceInfos ( ) ; } throw new IllegalStateException ( ) ;
|
public class StockholmFileParser { /** * # = GS & lt ; seqname & gt ; & lt ; feature & gt ; & lt ; Generic per - Sequence annotation , free text & gt ;
* @ param line
* the line to be parsed */
private void handleSequenceAnnotation ( String seqName , String featureName , String value ) { } }
|
if ( featureName . equals ( GS_ACCESSION_NUMBER ) ) { stockholmStructure . addGSAccessionNumber ( seqName , value ) ; } else if ( featureName . equals ( GS_DESCRIPTION ) ) { stockholmStructure . addGSDescription ( seqName , value ) ; } else if ( featureName . equals ( GS_DATABASE_REFERENCE ) ) { stockholmStructure . addGSdbReference ( seqName , value ) ; } else if ( featureName . equals ( GS_ORGANISM_SPECIES ) ) { stockholmStructure . addGSOrganismSpecies ( seqName , value ) ; } else if ( featureName . equals ( GS_ORGANISM_CLASSIFICATION ) ) { stockholmStructure . addGSOrganismClassification ( seqName , value ) ; } else if ( featureName . equals ( GS_LOOK ) ) { stockholmStructure . addGSLook ( seqName , value ) ; } else { // unknown feature
logger . warn ( "Unknown Sequence Feature [{}].\nPlease contact the Biojava team." , featureName ) ; }
|
public class ReplayRelay { /** * Creates a size - bounded replay relay .
* In this setting , the { @ code ReplayRelay } holds at most { @ code size } items in its internal buffer and
* discards the oldest item .
* When observers subscribe to a terminated { @ code ReplayRelay } , they are guaranteed to see at most
* { @ code size } { @ code onNext } events followed by a termination event .
* If an observer subscribes while the { @ code ReplayRelay } is active , it will observe all items in the
* buffer at that point in time and each item observed afterwards , even if the buffer evicts items due to
* the size constraint in the mean time . In other words , once an Observer subscribes , it will receive items
* without gaps in the sequence .
* @ param maxSize
* the maximum number of buffered items */
@ CheckReturnValue @ NonNull public static < T > ReplayRelay < T > createWithSize ( int maxSize ) { } }
|
return new ReplayRelay < T > ( new SizeBoundReplayBuffer < T > ( maxSize ) ) ;
|
public class TwitterExample { public static void main ( String [ ] args ) throws Exception { } }
|
// Checking input parameters
final ParameterTool params = ParameterTool . fromArgs ( args ) ; System . out . println ( "Usage: TwitterExample [--output <path>] " + "[--twitter-source.consumerKey <key> --twitter-source.consumerSecret <secret> --twitter-source.token <token> --twitter-source.tokenSecret <tokenSecret>]" ) ; // set up the execution environment
StreamExecutionEnvironment env = StreamExecutionEnvironment . getExecutionEnvironment ( ) ; // make parameters available in the web interface
env . getConfig ( ) . setGlobalJobParameters ( params ) ; env . setParallelism ( params . getInt ( "parallelism" , 1 ) ) ; // get input data
DataStream < String > streamSource ; if ( params . has ( TwitterSource . CONSUMER_KEY ) && params . has ( TwitterSource . CONSUMER_SECRET ) && params . has ( TwitterSource . TOKEN ) && params . has ( TwitterSource . TOKEN_SECRET ) ) { streamSource = env . addSource ( new TwitterSource ( params . getProperties ( ) ) ) ; } else { System . out . println ( "Executing TwitterStream example with default props." ) ; System . out . println ( "Use --twitter-source.consumerKey <key> --twitter-source.consumerSecret <secret> " + "--twitter-source.token <token> --twitter-source.tokenSecret <tokenSecret> specify the authentication info." ) ; // get default test text data
streamSource = env . fromElements ( TwitterExampleData . TEXTS ) ; } DataStream < Tuple2 < String , Integer > > tweets = streamSource // selecting English tweets and splitting to ( word , 1)
. flatMap ( new SelectEnglishAndTokenizeFlatMap ( ) ) // group by words and sum their occurrences
. keyBy ( 0 ) . sum ( 1 ) ; // emit result
if ( params . has ( "output" ) ) { tweets . writeAsText ( params . get ( "output" ) ) ; } else { System . out . println ( "Printing result to stdout. Use --output to specify output path." ) ; tweets . print ( ) ; } // execute program
env . execute ( "Twitter Streaming Example" ) ;
|
public class StatementServiceImp { /** * ( non - Javadoc )
* @ see com . popbill . api . StatementService # updateEmailConfig ( java . lang . String , java . lang . String , java . lang . Boolean ) */
@ Override public Response updateEmailConfig ( String CorpNum , String EmailType , Boolean SendYN ) throws PopbillException { } }
|
return updateEmailConfig ( CorpNum , EmailType , SendYN , null ) ;
|
public class SherlockListActivity { @ Override public void addContentView ( View view , LayoutParams params ) { } }
|
getSherlock ( ) . addContentView ( view , params ) ;
|
public class DeltaIterator { /** * converts utf - 8 encoded hex to int by building it digit by digit . */
private int getNumBlocks ( R delta ) { } }
|
ByteBuffer content = getValue ( delta ) ; int numBlocks = 0 ; // build numBlocks by adding together each hex digit
for ( int i = 0 ; i < _prefixLength ; i ++ ) { byte b = content . get ( i ) ; numBlocks = numBlocks << 4 | ( b <= '9' ? b - '0' : b - 'A' + 10 ) ; } return numBlocks ;
|
public class BeangleFreemarkerManager { /** * The default template loader is a MultiTemplateLoader which includes
* BeangleClassTemplateLoader ( classpath : ) and a WebappTemplateLoader
* ( webapp : ) and FileTemplateLoader ( file : ) . All template path described
* in init parameter templatePath or TemplatePlath
* The ClassTemplateLoader will resolve fully qualified template includes that begin with a slash .
* for example / com / company / template / common . ftl
* The WebappTemplateLoader attempts to resolve templates relative to the web root folder */
@ Override protected TemplateLoader createTemplateLoader ( ServletContext servletContext , String templatePath ) { } }
|
// construct a FileTemplateLoader for the init - param ' TemplatePath '
String [ ] paths = split ( templatePath , "," ) ; List < TemplateLoader > loaders = CollectUtils . newArrayList ( ) ; for ( String path : paths ) { if ( path . startsWith ( "class://" ) ) { loaders . add ( new BeangleClassTemplateLoader ( substringAfter ( path , "class://" ) ) ) ; } else if ( path . startsWith ( "file://" ) ) { try { loaders . add ( new FileTemplateLoader ( new File ( substringAfter ( path , "file://" ) ) ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "templatePath: " + path + " cannot be accessed" , e ) ; } } else if ( path . startsWith ( "webapp://" ) ) { loaders . add ( new WebappTemplateLoader ( servletContext , substringAfter ( path , "webapp://" ) ) ) ; } else { throw new RuntimeException ( "templatePath: " + path + " is not well-formed. Use [class://|file://|webapp://] seperated with ," ) ; } } return new MultiTemplateLoader ( loaders . toArray ( new TemplateLoader [ loaders . size ( ) ] ) ) ;
|
public class GraphViz { /** * Writes the graph ' s image in a file .
* @ param img
* A byte array containing the image of the graph .
* @ param file
* Name of the file to where we want to write .
* @ return Success : 1 , Failure : - 1 */
public int writeGraphToFile ( byte [ ] img , String file ) { } }
|
File to = new File ( file ) ; return writeGraphToFile ( img , to ) ;
|
public class DisassemblyTool { /** * Disassembles a class file , sending the results to standard out .
* < pre >
* DisassemblyTool [ - f & lt ; format style & gt ; ] & lt ; file or class name & gt ;
* < / pre >
* The format style may be " assembly " ( the default ) or " builder " . */
public static void main ( String [ ] args ) throws Exception { } }
|
if ( args . length == 0 ) { System . out . println ( "DisassemblyTool [-f <format style>] <file or class name>" ) ; System . out . println ( ) ; System . out . println ( "The format style may be \"assembly\" (the default) or \"builder\"" ) ; return ; } String style ; String name ; if ( "-f" . equals ( args [ 0 ] ) ) { style = args [ 1 ] ; name = args [ 2 ] ; } else { style = "assembly" ; name = args [ 0 ] ; } ClassFileDataLoader loader ; InputStream in ; try { final File file = new File ( name ) ; in = new FileInputStream ( file ) ; loader = new ClassFileDataLoader ( ) { public InputStream getClassData ( String name ) throws IOException { name = name . substring ( name . lastIndexOf ( '.' ) + 1 ) ; File f = new File ( file . getParentFile ( ) , name + ".class" ) ; if ( f . exists ( ) ) { return new FileInputStream ( f ) ; } return null ; } } ; } catch ( FileNotFoundException e ) { if ( name . endsWith ( ".class" ) ) { System . err . println ( e ) ; return ; } loader = new ResourceClassFileDataLoader ( ) ; in = loader . getClassData ( name ) ; if ( in == null ) { System . err . println ( e ) ; return ; } } in = new BufferedInputStream ( in ) ; ClassFile cf = ClassFile . readFrom ( in , loader , null ) ; PrintWriter out = new PrintWriter ( System . out ) ; Printer p ; if ( style == null || style . equals ( "assembly" ) ) { p = new AssemblyStylePrinter ( ) ; } else if ( style . equals ( "builder" ) ) { p = new BuilderStylePrinter ( ) ; } else { System . err . println ( "Unknown format style: " + style ) ; return ; } p . disassemble ( cf , out ) ; out . flush ( ) ;
|
public class CmsAppHierarchyBuilder { /** * Builds the tree of categories and apps . < p >
* This tree will only include those categories which are reachable by following the parent chain of
* an available app configuration up to the root category ( null ) .
* @ return the root node of the tree */
public CmsAppCategoryNode buildHierarchy ( ) { } }
|
// STEP 0 : Initialize everything and sort categories by priority
Collections . sort ( m_appCategoryList , new Comparator < I_CmsAppCategory > ( ) { public int compare ( I_CmsAppCategory cat1 , I_CmsAppCategory cat2 ) { return ComparisonChain . start ( ) . compare ( cat1 . getPriority ( ) , cat2 . getPriority ( ) ) . result ( ) ; } } ) ; m_rootNode = new CmsAppCategoryNode ( ) ; m_nodes . clear ( ) ; m_nodes . put ( null , m_rootNode ) ; // STEP 1 : Create a node for each category
for ( I_CmsAppCategory category : m_appCategoryList ) { m_nodes . put ( category . getId ( ) , new CmsAppCategoryNode ( category ) ) ; } // STEP 2 : Assign category nodes to nodes for their parent category
for ( CmsAppCategoryNode node : m_nodes . values ( ) ) { if ( node != m_rootNode ) { addNodeToItsParent ( node ) ; } } // STEP 3 : Assign app configs to category nodes
for ( I_CmsWorkplaceAppConfiguration appConfig : m_appConfigs ) { addAppConfigToCategory ( appConfig ) ; } // STEP 4 : Validate whether there are unused categories / apps
Set < String > usedNodes = findReachableNodes ( m_rootNode , new HashSet < String > ( ) ) ; if ( usedNodes . size ( ) < m_nodes . size ( ) ) { LOG . warn ( "Unused app categories: " + Sets . difference ( m_nodes . keySet ( ) , usedNodes ) ) ; } Set < String > unusedApps = Sets . newHashSet ( ) ; for ( I_CmsWorkplaceAppConfiguration appConfig : m_appConfigs ) { if ( ! usedNodes . contains ( appConfig . getAppCategory ( ) ) ) { unusedApps . add ( appConfig . getId ( ) ) ; } } if ( unusedApps . size ( ) > 0 ) { LOG . warn ( "Unused apps: " + unusedApps ) ; } // STEP 5 : Remove parts of the hierarchy which don ' t contain any apps
m_rootNode . removeApplessSubtrees ( ) ; // STEP 6 : Sort all categories and app configurations for each node
m_rootNode . sortRecursively ( ) ; return m_rootNode ;
|
public class MySQLPacketPayload { /** * Read lenenc integer from byte buffers .
* @ see < a href = " https : / / dev . mysql . com / doc / internals / en / integer . html # packet - Protocol : : LengthEncodedInteger " > LengthEncodedInteger < / a >
* @ return lenenc integer */
public long readIntLenenc ( ) { } }
|
int firstByte = readInt1 ( ) ; if ( firstByte < 0xfb ) { return firstByte ; } if ( 0xfb == firstByte ) { return 0 ; } if ( 0xfc == firstByte ) { return byteBuf . readShortLE ( ) ; } if ( 0xfd == firstByte ) { return byteBuf . readMediumLE ( ) ; } return byteBuf . readLongLE ( ) ;
|
public class AbstractDataDistributionType { /** * { @ inheritDoc } */
public Node getOrCreateDataNode ( Node rootNode , String dataId , String nodeType ) throws RepositoryException { } }
|
return getOrCreateDataNode ( rootNode , dataId , nodeType , null ) ;
|
public class ControlBeanContext { /** * / * package */
String getControlID ( ) { } }
|
if ( _controlID != null || _bean == null ) return _controlID ; // Initially set to the local beans relative ID
String id = _bean . getLocalID ( ) ; // If there is a parent context , prepend its ID and the ID separator
BeanContext bc = getBeanContext ( ) ; if ( bc != null && bc instanceof ControlBeanContext ) { String parentID = ( ( ControlBeanContext ) bc ) . getControlID ( ) ; if ( parentID != null ) { id = parentID + org . apache . beehive . controls . api . bean . ControlBean . IDSeparator + id ; } } // Cache the computed value
_controlID = id ; return id ;
|
public class Strman { /** * Count the number of times substr appears in value
* @ param value input
* @ param subStr search string
* @ param caseSensitive whether search should be case sensitive
* @ param allowOverlapping boolean to take into account overlapping
* @ return count of times substring exists */
public static long countSubstr ( final String value , final String subStr , final boolean caseSensitive , boolean allowOverlapping ) { } }
|
validate ( value , NULL_STRING_PREDICATE , NULL_STRING_MSG_SUPPLIER ) ; return countSubstr ( caseSensitive ? value : value . toLowerCase ( ) , caseSensitive ? subStr : subStr . toLowerCase ( ) , allowOverlapping , 0L ) ;
|
public class DeviceUpdate { /** * Get the raw data bytes of the device update packet .
* @ return the data sent by the device to update its status */
public byte [ ] getPacketBytes ( ) { } }
|
byte [ ] result = new byte [ packetBytes . length ] ; System . arraycopy ( packetBytes , 0 , result , 0 , packetBytes . length ) ; return result ;
|
public class SecurityFunctions { /** * Applies an AES decryption on the byte array { @ code cipherBytes } with they given key . The key has to be the same
* key used during encryption , else null is returned
* @ param cipherBytes the byte array to decrypt
* @ param key the key to use during the decryption
* @ return the decrypted string if everything was successful , else null */
public String decryptAES ( byte [ ] cipherBytes , SecretKey key ) { } }
|
String plainText = null ; Security . addProvider ( new org . bouncycastle . jce . provider . BouncyCastleProvider ( ) ) ; try { Cipher cipher = Cipher . getInstance ( "AES" , "BC" ) ; cipher . init ( Cipher . DECRYPT_MODE , key ) ; byte [ ] bytePlainText = cipher . doFinal ( cipherBytes ) ; plainText = new String ( bytePlainText , "UTF-8" ) ; } catch ( IllegalBlockSizeException | InvalidKeyException | NoSuchAlgorithmException | BadPaddingException | NoSuchPaddingException | UnsupportedEncodingException | NoSuchProviderException e ) { logger . error ( "Unable to apply AES decryption" , e ) ; } return plainText ;
|
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 2817:1 : ruleXSetLiteral returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) ; */
public final EObject ruleXSetLiteral ( ) throws RecognitionException { } }
|
EObject current = null ; Token otherlv_1 = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; Token otherlv_6 = null ; EObject lv_elements_3_0 = null ; EObject lv_elements_5_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 2823:2 : ( ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) )
// InternalXbaseWithAnnotations . g : 2824:2 : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' )
{ // InternalXbaseWithAnnotations . g : 2824:2 : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' )
// InternalXbaseWithAnnotations . g : 2825:3 : ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } '
{ // InternalXbaseWithAnnotations . g : 2825:3 : ( )
// InternalXbaseWithAnnotations . g : 2826:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXSetLiteralAccess ( ) . getXSetLiteralAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 18 , FOLLOW_39 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXSetLiteralAccess ( ) . getNumberSignKeyword_1 ( ) ) ; } otherlv_2 = ( Token ) match ( input , 55 , FOLLOW_40 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXSetLiteralAccess ( ) . getLeftCurlyBracketKeyword_2 ( ) ) ; } // InternalXbaseWithAnnotations . g : 2840:3 : ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ?
int alt49 = 2 ; int LA49_0 = input . LA ( 1 ) ; if ( ( ( LA49_0 >= RULE_STRING && LA49_0 <= RULE_ID ) || LA49_0 == 14 || ( LA49_0 >= 18 && LA49_0 <= 19 ) || LA49_0 == 26 || ( LA49_0 >= 42 && LA49_0 <= 43 ) || LA49_0 == 48 || LA49_0 == 55 || LA49_0 == 59 || LA49_0 == 61 || ( LA49_0 >= 65 && LA49_0 <= 67 ) || ( LA49_0 >= 70 && LA49_0 <= 82 ) || LA49_0 == 84 ) ) { alt49 = 1 ; } switch ( alt49 ) { case 1 : // InternalXbaseWithAnnotations . g : 2841:4 : ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) *
{ // InternalXbaseWithAnnotations . g : 2841:4 : ( ( lv _ elements _ 3_0 = ruleXExpression ) )
// InternalXbaseWithAnnotations . g : 2842:5 : ( lv _ elements _ 3_0 = ruleXExpression )
{ // InternalXbaseWithAnnotations . g : 2842:5 : ( lv _ elements _ 3_0 = ruleXExpression )
// InternalXbaseWithAnnotations . g : 2843:6 : lv _ elements _ 3_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSetLiteralAccess ( ) . getElementsXExpressionParserRuleCall_3_0_0 ( ) ) ; } pushFollow ( FOLLOW_41 ) ; lv_elements_3_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSetLiteralRule ( ) ) ; } add ( current , "elements" , lv_elements_3_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 2860:4 : ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) *
loop48 : do { int alt48 = 2 ; int LA48_0 = input . LA ( 1 ) ; if ( ( LA48_0 == 15 ) ) { alt48 = 1 ; } switch ( alt48 ) { case 1 : // InternalXbaseWithAnnotations . g : 2861:5 : otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) )
{ otherlv_4 = ( Token ) match ( input , 15 , FOLLOW_9 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXSetLiteralAccess ( ) . getCommaKeyword_3_1_0 ( ) ) ; } // InternalXbaseWithAnnotations . g : 2865:5 : ( ( lv _ elements _ 5_0 = ruleXExpression ) )
// InternalXbaseWithAnnotations . g : 2866:6 : ( lv _ elements _ 5_0 = ruleXExpression )
{ // InternalXbaseWithAnnotations . g : 2866:6 : ( lv _ elements _ 5_0 = ruleXExpression )
// InternalXbaseWithAnnotations . g : 2867:7 : lv _ elements _ 5_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSetLiteralAccess ( ) . getElementsXExpressionParserRuleCall_3_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_41 ) ; lv_elements_5_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSetLiteralRule ( ) ) ; } add ( current , "elements" , lv_elements_5_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop48 ; } } while ( true ) ; } break ; } otherlv_6 = ( Token ) match ( input , 56 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXSetLiteralAccess ( ) . getRightCurlyBracketKeyword_4 ( ) ) ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class ServiceCatalog { /** * < p > Setter for item . < / p >
* @ param pItem reference */
@ Override public final void setItem ( final ServiceToSale pItem ) { } }
|
this . item = pItem ; if ( getItsId ( ) == null ) { setItsId ( new ServiceCatalogId ( ) ) ; } getItsId ( ) . setItem ( this . item ) ;
|
public class ExpressionFilter { /** * Determines if event matches the filter .
* @ param event logging event ;
* @ return { @ link Filter # NEUTRAL } is there is no string match . */
public int decide ( final LoggingEvent event ) { } }
|
if ( expressionRule . evaluate ( event , null ) ) { if ( acceptOnMatch ) { return Filter . ACCEPT ; } else { return Filter . DENY ; } } return Filter . NEUTRAL ;
|
public class GaliosFieldTableOps { /** * Performs polynomial division using a synthetic division algorithm .
* < p > Coefficients for largest powers are first , e . g . 2 * x * * 3 + 8 * x * * 2 + 1 = [ 2,8,0,1 ] < / p >
* @ param dividend ( Input ) Polynomial dividend
* @ param divisor ( Input ) Polynomial divisor
* @ param quotient ( Output ) Division ' s quotient
* @ param remainder ( Output ) Divisions ' s remainder */
public void polyDivide ( GrowQueue_I8 dividend , GrowQueue_I8 divisor , GrowQueue_I8 quotient , GrowQueue_I8 remainder ) { } }
|
// handle special case
if ( divisor . size > dividend . size ) { remainder . setTo ( dividend ) ; quotient . resize ( 0 ) ; return ; } else { remainder . resize ( divisor . size - 1 ) ; quotient . setTo ( dividend ) ; } int normalizer = divisor . data [ 0 ] & 0xFF ; int N = dividend . size - divisor . size + 1 ; for ( int i = 0 ; i < N ; i ++ ) { quotient . data [ i ] = ( byte ) divide ( quotient . data [ i ] & 0xFF , normalizer ) ; int coef = quotient . data [ i ] & 0xFF ; if ( coef != 0 ) { // division by zero is undefined .
for ( int j = 1 ; j < divisor . size ; j ++ ) { // skip the first coeffient in synthetic division
int div_j = divisor . data [ j ] & 0xFF ; if ( div_j != 0 ) { // log ( 0 ) is undefined .
quotient . data [ i + j ] ^= multiply ( div_j , coef ) ; } } } } // quotient currently contains the quotient and remainder . Copy remainder into it ' s own polynomial
System . arraycopy ( quotient . data , quotient . size - remainder . size , remainder . data , 0 , remainder . size ) ; quotient . size -= remainder . size ;
|
public class PhysicalEntityWrapper { /** * Get all related Conversions of the given Interaction set .
* @ param inters Interactions to query
* @ return Related Conversions */
private Set < Conversion > getRelatedConversions ( Collection < Interaction > inters ) { } }
|
Set < Conversion > set = new HashSet < Conversion > ( ) ; for ( Interaction inter : inters ) { if ( inter instanceof Conversion ) { set . add ( ( Conversion ) inter ) ; } else if ( inter instanceof Control ) { getRelatedConversions ( ( Control ) inter , set ) ; } } return set ;
|
public class CertUtil { /** * 用配置文件acp _ sdk . properties配置路径 加载银联公钥上级证书 ( 中级证书 ) */
private static void initEncryptCert ( ) { } }
|
LogUtil . writeLog ( "加载敏感信息加密证书==>" + SDKConfig . getConfig ( ) . getEncryptCertPath ( ) ) ; if ( ! isEmpty ( SDKConfig . getConfig ( ) . getEncryptCertPath ( ) ) ) { encryptCert = initCert ( SDKConfig . getConfig ( ) . getEncryptCertPath ( ) ) ; LogUtil . writeLog ( "Load EncryptCert Successful" ) ; } else { LogUtil . writeLog ( "WARN: acpsdk.encryptCert.path is empty" ) ; }
|
public class BlockBasedDataStoreTools { /** * Delete the filesystem of a store */
public static void delete ( String baseName , File dataFolder , boolean force ) throws DataStoreException { } }
|
File [ ] journalFiles = findJournalFiles ( baseName , dataFolder ) ; if ( journalFiles . length > 0 ) { if ( force ) { for ( int i = 0 ; i < journalFiles . length ; i ++ ) { if ( ! journalFiles [ i ] . delete ( ) ) throw new DataStoreException ( "Cannot delete file : " + journalFiles [ i ] . getAbsolutePath ( ) ) ; } } else throw new DataStoreException ( "Journal file exist : " + journalFiles [ 0 ] . getAbsolutePath ( ) ) ; } File atFile = new File ( dataFolder , baseName + AbstractBlockBasedDataStore . ALLOCATION_TABLE_SUFFIX ) ; if ( atFile . exists ( ) ) if ( ! atFile . delete ( ) ) throw new DataStoreException ( "Cannot delete file : " + atFile . getAbsolutePath ( ) ) ; File dataFile = new File ( dataFolder , baseName + AbstractBlockBasedDataStore . DATA_FILE_SUFFIX ) ; if ( dataFile . exists ( ) ) if ( ! dataFile . delete ( ) ) throw new DataStoreException ( "Cannot delete file : " + dataFile . getAbsolutePath ( ) ) ;
|
public class CProductPersistenceImpl { /** * Removes all the c products where groupId = & # 63 ; from the database .
* @ param groupId the group ID */
@ Override public void removeByGroupId ( long groupId ) { } }
|
for ( CProduct cProduct : findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cProduct ) ; }
|
public class PresentationManager { /** * Executes a custom loader . */
private void customLoad ( ) { } }
|
final String s = getCfg ( ) . getProperty ( "custom-loader" ) ; if ( s != null ) { try { final CustomLoader customLoader = ( CustomLoader ) Class . forName ( s ) . newInstance ( ) ; logItem ( "Custom Loader" , s , "*" ) ; customLoader . execute ( this ) ; } catch ( ClassNotFoundException e ) { error = true ; logItem ( "Custom Loader" , s , "?" ) ; } catch ( Exception e ) { error = true ; error ( e ) ; logItem ( "Custom Loader Failed" , s , "!" ) ; } }
|
public class Collections { /** * Returns a set backed by the specified map . The resulting set displays
* the same ordering , concurrency , and performance characteristics as the
* backing map . In essence , this factory method provides a { @ link Set }
* implementation corresponding to any { @ link Map } implementation . There
* is no need to use this method on a { @ link Map } implementation that
* already has a corresponding { @ link Set } implementation ( such as { @ link
* HashMap } or { @ link TreeMap } ) .
* < p > Each method invocation on the set returned by this method results in
* exactly one method invocation on the backing map or its < tt > keySet < / tt >
* view , with one exception . The < tt > addAll < / tt > method is implemented
* as a sequence of < tt > put < / tt > invocations on the backing map .
* < p > The specified map must be empty at the time this method is invoked ,
* and should not be accessed directly after this method returns . These
* conditions are ensured if the map is created empty , passed directly
* to this method , and no reference to the map is retained , as illustrated
* in the following code fragment :
* < pre >
* Set & lt ; Object & gt ; weakHashSet = Collections . newSetFromMap (
* new WeakHashMap & lt ; Object , Boolean & gt ; ( ) ) ;
* < / pre >
* @ param < E > the class of the map keys and of the objects in the
* returned set
* @ param map the backing map
* @ return the set backed by the map
* @ throws IllegalArgumentException if < tt > map < / tt > is not empty
* @ since 1.6 */
public static < E > Set < E > newSetFromMap ( Map < E , Boolean > map ) { } }
|
return new SetFromMap < > ( map ) ;
|
public class ProjectedCentroid { /** * Static Constructor from a relation .
* @ param dims Dimensions to use ( indexed with 0)
* @ param relation Relation to process
* @ param ids IDs to process
* @ return Centroid */
public static ProjectedCentroid make ( long [ ] dims , Relation < ? extends NumberVector > relation , DBIDs ids ) { } }
|
ProjectedCentroid c = new ProjectedCentroid ( dims , RelationUtil . dimensionality ( relation ) ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { c . put ( relation . get ( iter ) ) ; } return c ;
|
public class TokenCompleteTextView { /** * Add the TextChangedListeners */
protected void addListeners ( ) { } }
|
Editable text = getText ( ) ; if ( text != null ) { text . setSpan ( spanWatcher , 0 , text . length ( ) , Spanned . SPAN_INCLUSIVE_INCLUSIVE ) ; addTextChangedListener ( textWatcher ) ; }
|
public class HijrahDate { /** * Returns month - of - year . 0 - based .
* @ param dayOfYear day - of - year
* @ param year a year
* @ return month - of - year */
private static int getMonthOfYear ( int dayOfYear , int year ) { } }
|
Integer [ ] newMonths = getAdjustedMonthDays ( year ) ; if ( dayOfYear >= 0 ) { for ( int i = 0 ; i < newMonths . length ; i ++ ) { if ( dayOfYear < newMonths [ i ] . intValue ( ) ) { return i - 1 ; } } return 11 ; } else { dayOfYear = ( isLeapYear ( year ) ? ( dayOfYear + 355 ) : ( dayOfYear + 354 ) ) ; for ( int i = 0 ; i < newMonths . length ; i ++ ) { if ( dayOfYear < newMonths [ i ] . intValue ( ) ) { return i - 1 ; } } return 11 ; }
|
public class cudaResourceViewFormat { /** * Returns the String identifying the given cudaResourceViewFormat
* @ param m The cudaResourceViewFormat
* @ return The String identifying the given cudaResourceViewFormat */
public static String stringFor ( int m ) { } }
|
switch ( m ) { case cudaResViewFormatNone : return "cudaResViewFormatNone" ; case cudaResViewFormatUnsignedChar1 : return "cudaResViewFormatUnsignedChar1" ; case cudaResViewFormatUnsignedChar2 : return "cudaResViewFormatUnsignedChar2" ; case cudaResViewFormatUnsignedChar4 : return "cudaResViewFormatUnsignedChar4" ; case cudaResViewFormatSignedChar1 : return "cudaResViewFormatSignedChar1" ; case cudaResViewFormatSignedChar2 : return "cudaResViewFormatSignedChar2" ; case cudaResViewFormatSignedChar4 : return "cudaResViewFormatSignedChar4" ; case cudaResViewFormatUnsignedShort1 : return "cudaResViewFormatUnsignedShort1" ; case cudaResViewFormatUnsignedShort2 : return "cudaResViewFormatUnsignedShort2" ; case cudaResViewFormatUnsignedShort4 : return "cudaResViewFormatUnsignedShort4" ; case cudaResViewFormatSignedShort1 : return "cudaResViewFormatSignedShort1" ; case cudaResViewFormatSignedShort2 : return "cudaResViewFormatSignedShort2" ; case cudaResViewFormatSignedShort4 : return "cudaResViewFormatSignedShort4" ; case cudaResViewFormatUnsignedInt1 : return "cudaResViewFormatUnsignedInt1" ; case cudaResViewFormatUnsignedInt2 : return "cudaResViewFormatUnsignedInt2" ; case cudaResViewFormatUnsignedInt4 : return "cudaResViewFormatUnsignedInt4" ; case cudaResViewFormatSignedInt1 : return "cudaResViewFormatSignedInt1" ; case cudaResViewFormatSignedInt2 : return "cudaResViewFormatSignedInt2" ; case cudaResViewFormatSignedInt4 : return "cudaResViewFormatSignedInt4" ; case cudaResViewFormatHalf1 : return "cudaResViewFormatHalf1" ; case cudaResViewFormatHalf2 : return "cudaResViewFormatHalf2" ; case cudaResViewFormatHalf4 : return "cudaResViewFormatHalf4" ; case cudaResViewFormatFloat1 : return "cudaResViewFormatFloat1" ; case cudaResViewFormatFloat2 : return "cudaResViewFormatFloat2" ; case cudaResViewFormatFloat4 : return "cudaResViewFormatFloat4" ; case cudaResViewFormatUnsignedBlockCompressed1 : return "cudaResViewFormatUnsignedBlockCompressed1" ; case cudaResViewFormatUnsignedBlockCompressed2 : return "cudaResViewFormatUnsignedBlockCompressed2" ; case cudaResViewFormatUnsignedBlockCompressed3 : return "cudaResViewFormatUnsignedBlockCompressed3" ; case cudaResViewFormatUnsignedBlockCompressed4 : return "cudaResViewFormatUnsignedBlockCompressed4" ; case cudaResViewFormatSignedBlockCompressed4 : return "cudaResViewFormatSignedBlockCompressed4" ; case cudaResViewFormatUnsignedBlockCompressed5 : return "cudaResViewFormatUnsignedBlockCompressed5" ; case cudaResViewFormatSignedBlockCompressed5 : return "cudaResViewFormatSignedBlockCompressed5" ; case cudaResViewFormatUnsignedBlockCompressed6H : return "cudaResViewFormatUnsignedBlockCompressed6H" ; case cudaResViewFormatSignedBlockCompressed6H : return "cudaResViewFormatSignedBlockCompressed6H" ; case cudaResViewFormatUnsignedBlockCompressed7 : return "cudaResViewFormatUnsignedBlockCompressed7" ; } return "INVALID cudaResourceViewFormat: " + m ;
|
public class ViewQuery { /** * Sets the response in the event of an error .
* See the " OnError " enum for more details on the available options .
* @ param onError The appropriate error handling type .
* @ return the { @ link ViewQuery } object for proper chaining . */
public ViewQuery onError ( final OnError onError ) { } }
|
params [ PARAM_ONERROR_OFFSET ] = "on_error" ; params [ PARAM_ONERROR_OFFSET + 1 ] = onError . identifier ( ) ; return this ;
|
public class RabbitMqUtils { /** * Declares the global exchanges ( those that do not depend on an application ) .
* It includes the DM exchange and the one for inter - application exchanges .
* @ param channel the RabbitMQ channel
* @ throws IOException if an error occurs */
public static void declareGlobalExchanges ( String domain , Channel channel ) throws IOException { } }
|
// " topic " is a keyword for RabbitMQ .
channel . exchangeDeclare ( buildExchangeNameForTheDm ( domain ) , "topic" ) ; channel . exchangeDeclare ( buildExchangeNameForInterApp ( domain ) , "topic" ) ;
|
public class Fingerprint { /** * / * package */
static @ CheckForNull Fingerprint load ( @ Nonnull File file ) throws IOException { } }
|
XmlFile configFile = getConfigFile ( file ) ; if ( ! configFile . exists ( ) ) return null ; long start = 0 ; if ( logger . isLoggable ( Level . FINE ) ) start = System . currentTimeMillis ( ) ; try { Object loaded = configFile . read ( ) ; if ( ! ( loaded instanceof Fingerprint ) ) { throw new IOException ( "Unexpected Fingerprint type. Expected " + Fingerprint . class + " or subclass but got " + ( loaded != null ? loaded . getClass ( ) : "null" ) ) ; } Fingerprint f = ( Fingerprint ) loaded ; if ( logger . isLoggable ( Level . FINE ) ) logger . fine ( "Loading fingerprint " + file + " took " + ( System . currentTimeMillis ( ) - start ) + "ms" ) ; if ( f . facets == null ) f . facets = new PersistedList < > ( f ) ; for ( FingerprintFacet facet : f . facets ) facet . _setOwner ( f ) ; return f ; } catch ( IOException e ) { if ( file . exists ( ) && file . length ( ) == 0 ) { // Despite the use of AtomicFile , there are reports indicating that people often see
// empty XML file , presumably either due to file system corruption ( perhaps by sudden
// power loss , etc . ) or abnormal program termination .
// generally we don ' t want to wipe out user data just because we can ' t load it ,
// but if the file size is 0 , which is what ' s reported in HUDSON - 2012 , then it seems
// like recovering it silently by deleting the file is not a bad idea .
logger . log ( Level . WARNING , "Size zero fingerprint. Disk corruption? {0}" , configFile ) ; file . delete ( ) ; return null ; } String parseError = messageOfParseException ( e ) ; if ( parseError != null ) { logger . log ( Level . WARNING , "Malformed XML in {0}: {1}" , new Object [ ] { configFile , parseError } ) ; file . delete ( ) ; return null ; } logger . log ( Level . WARNING , "Failed to load " + configFile , e ) ; throw e ; }
|
public class SarlCapacityImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case SarlPackage . SARL_CAPACITY__EXTENDS : return extends_ != null && ! extends_ . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
|
public class ServiceBuilder { /** * Attaches the given DurableDataLogFactory creator to this ServiceBuilder . The given Function will only not be invoked
* right away ; it will be called when needed .
* @ param dataLogFactoryCreator The Function to attach .
* @ return This ServiceBuilder . */
public ServiceBuilder withDataLogFactory ( Function < ComponentSetup , DurableDataLogFactory > dataLogFactoryCreator ) { } }
|
Preconditions . checkNotNull ( dataLogFactoryCreator , "dataLogFactoryCreator" ) ; this . dataLogFactoryCreator = dataLogFactoryCreator ; return this ;
|
public class SoyNodeCompiler { /** * Computes a single range argument .
* @ param varName The variable name to use if this value should be stored in a local
* @ param expression The expression
* @ param defaultValue The value to use if there is no expression
* @ param scope The current variable scope to add variables to
* @ param initStatements Initializing statements , if any . */
private Expression computeRangeValue ( SyntheticVarName varName , Optional < ExprNode > expression , int defaultValue , Scope scope , final ImmutableList . Builder < Statement > initStatements ) { } }
|
if ( ! expression . isPresent ( ) ) { return constant ( defaultValue ) ; } else if ( expression . get ( ) instanceof IntegerNode && ( ( IntegerNode ) expression . get ( ) ) . isInt ( ) ) { int value = Ints . checkedCast ( ( ( IntegerNode ) expression . get ( ) ) . getValue ( ) ) ; return constant ( value ) ; } else { Label startDetachPoint = new Label ( ) ; // Note : If the value of rangeArgs . start ( ) is above 32 bits , Ints . checkedCast ( ) will fail at
// runtime with IllegalArgumentException .
Expression startExpression = MethodRef . INTS_CHECKED_CAST . invoke ( exprCompiler . compile ( expression . get ( ) , startDetachPoint ) . unboxAsLong ( ) ) ; if ( ! startExpression . isCheap ( ) ) { // bounce it into a local variable
Variable startVar = scope . createSynthetic ( varName , startExpression , STORE ) ; initStatements . add ( startVar . initializer ( ) . labelStart ( startDetachPoint ) ) ; startExpression = startVar . local ( ) ; } return startExpression ; }
|
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcMedicalDeviceTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
|
return instanceValue == null ? null : instanceValue . toString ( ) ;
|
public class X509CertSelector { /** * Sets the subjectPublicKey criterion . The { @ code X509Certificate }
* must contain the specified subject public key . If { @ code null } ,
* no subjectPublicKey check will be done .
* Because this method allows the public key to be specified as a byte
* array , it may be used for unknown key types .
* If { @ code key } is not { @ code null } , it should contain a
* single DER encoded SubjectPublicKeyInfo structure , as defined in X . 509.
* The ASN . 1 notation for this structure is as follows .
* < pre > { @ code
* SubjectPublicKeyInfo : : = SEQUENCE {
* algorithm AlgorithmIdentifier ,
* subjectPublicKey BIT STRING }
* AlgorithmIdentifier : : = SEQUENCE {
* algorithm OBJECT IDENTIFIER ,
* parameters ANY DEFINED BY algorithm OPTIONAL }
* - - contains a value of the type
* - - registered for use with the
* - - algorithm object identifier value
* } < / pre >
* Note that the byte array supplied here is cloned to protect against
* subsequent modifications .
* @ param key a byte array containing the subject public key in ASN . 1 DER
* form ( or { @ code null } )
* @ throws IOException if an encoding error occurs ( incorrect form for
* subject public key )
* @ see # getSubjectPublicKey */
public void setSubjectPublicKey ( byte [ ] key ) throws IOException { } }
|
if ( key == null ) { subjectPublicKey = null ; subjectPublicKeyBytes = null ; } else { subjectPublicKeyBytes = key . clone ( ) ; subjectPublicKey = X509Key . parse ( new DerValue ( subjectPublicKeyBytes ) ) ; }
|
public class BaseTypeConverterRegistrar { /** * Register all type converters for the 15 base types : < br >
* < ul >
* < li > Boolean < / li >
* < li > Byte < / li >
* < li > Character < / li >
* < li > Double < / li >
* < li > Float < / li >
* < li > Integer < / li >
* < li > Long < / li >
* < li > Short < / li >
* < li > String < / li >
* < li > BigDecimal < / li >
* < li > BigInteger < / li >
* < li > AtomicBoolean < / li >
* < li > AtomicInteger < / li >
* < li > AtomicLong < / li >
* < li > StringBuffer < / li >
* < li > StringBuilder < / li >
* < / ul > */
public void registerTypeConverter ( @ Nonnull final ITypeConverterRegistry aRegistry ) { } }
|
// to Boolean
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Boolean . class , aSource -> Boolean . valueOf ( aSource . intValue ( ) != 0 ) ) ; aRegistry . registerTypeConverter ( Character . class , Boolean . class , aSource -> Boolean . valueOf ( aSource . charValue ( ) != 0 ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Boolean . class , aSource -> StringParser . parseBoolObj ( aSource , ( Boolean ) null ) ) ; // to Byte
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Byte . class , aSource -> Byte . valueOf ( aSource . byteValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Byte . class , aSource -> Byte . valueOf ( aSource . booleanValue ( ) ? ( byte ) 1 : ( byte ) 0 ) ) ; aRegistry . registerTypeConverter ( Character . class , Byte . class , aSource -> Byte . valueOf ( ( byte ) aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Byte . class , aSource -> StringParser . parseByteObj ( aSource , ( Byte ) null ) ) ; // to Character
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Character . class , aSource -> Character . valueOf ( ( char ) aSource . intValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Character . class , aSource -> Character . valueOf ( aSource . booleanValue ( ) ? ( char ) 1 : ( char ) 0 ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Character . class , aSource -> { final String sSource = aSource . toString ( ) ; return sSource . length ( ) == 1 ? Character . valueOf ( sSource . charAt ( 0 ) ) : null ; } ) ; // to Double
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Double . class , aSource -> Double . valueOf ( aSource . doubleValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Double . class , aSource -> Double . valueOf ( aSource . booleanValue ( ) ? 1d : 0d ) ) ; aRegistry . registerTypeConverter ( Character . class , Double . class , aSource -> Double . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Double . class , aSource -> StringParser . parseDoubleObj ( aSource , ( Double ) null ) ) ; // to Float
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Float . class , aSource -> Float . valueOf ( aSource . floatValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Float . class , aSource -> Float . valueOf ( aSource . booleanValue ( ) ? 1f : 0f ) ) ; aRegistry . registerTypeConverter ( Character . class , Float . class , aSource -> Float . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Float . class , aSource -> StringParser . parseFloatObj ( aSource , ( Float ) null ) ) ; // to Integer
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Integer . class , aSource -> Integer . valueOf ( aSource . intValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Integer . class , aSource -> Integer . valueOf ( aSource . booleanValue ( ) ? 1 : 0 ) ) ; aRegistry . registerTypeConverter ( Character . class , Integer . class , aSource -> Integer . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverter ( String . class , Integer . class , aSource -> { final BigDecimal aBD = StringParser . parseBigDecimal ( aSource , ( BigDecimal ) null ) ; return aBD == null ? null : Integer . valueOf ( aBD . intValue ( ) ) ; } ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Integer . class , aSource -> StringParser . parseIntObj ( aSource , ( Integer ) null ) ) ; // to Long
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Long . class , aSource -> Long . valueOf ( aSource . longValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Long . class , aSource -> Long . valueOf ( aSource . booleanValue ( ) ? 1L : 0L ) ) ; aRegistry . registerTypeConverter ( Character . class , Long . class , aSource -> Long . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverter ( String . class , Long . class , aSource -> { final BigDecimal aBD = StringParser . parseBigDecimal ( aSource , ( BigDecimal ) null ) ; return aBD == null ? null : Long . valueOf ( aBD . longValue ( ) ) ; } ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Long . class , aSource -> StringParser . parseLongObj ( aSource , ( Long ) null ) ) ; // to Short
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , Short . class , aSource -> Short . valueOf ( aSource . shortValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , Short . class , aSource -> Short . valueOf ( aSource . booleanValue ( ) ? ( short ) 1 : ( short ) 0 ) ) ; aRegistry . registerTypeConverter ( Character . class , Short . class , aSource -> Short . valueOf ( ( short ) aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverter ( String . class , Short . class , aSource -> { final BigDecimal aBD = StringParser . parseBigDecimal ( aSource , ( BigDecimal ) null ) ; return aBD == null ? null : Short . valueOf ( aBD . shortValue ( ) ) ; } ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( Short . class , aSource -> StringParser . parseShortObj ( aSource , ( Short ) null ) ) ; // to String
aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( String . class , Object :: toString ) ; // to BigDecimal
aRegistry . registerTypeConverter ( BigInteger . class , BigDecimal . class , BigDecimal :: new ) ; aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . doubleValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , BigDecimal . class , aSource -> aSource . booleanValue ( ) ? BigDecimal . ONE : BigDecimal . ZERO ) ; aRegistry . registerTypeConverter ( Character . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverter ( Byte . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . intValue ( ) ) ) ; aRegistry . registerTypeConverter ( Integer . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . intValue ( ) ) ) ; aRegistry . registerTypeConverter ( Long . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . longValue ( ) ) ) ; aRegistry . registerTypeConverter ( Short . class , BigDecimal . class , aSource -> BigDecimal . valueOf ( aSource . shortValue ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( BigDecimal . class , aSource -> StringParser . parseBigDecimal ( aSource . toString ( ) , ( BigDecimal ) null ) ) ; // to BigInteger
aRegistry . registerTypeConverter ( BigDecimal . class , BigInteger . class , BigDecimal :: toBigInteger ) ; aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Number . class , BigInteger . class , aSource -> BigInteger . valueOf ( aSource . longValue ( ) ) ) ; aRegistry . registerTypeConverter ( Boolean . class , BigInteger . class , aSource -> aSource . booleanValue ( ) ? BigInteger . ONE : BigInteger . ZERO ) ; aRegistry . registerTypeConverter ( Character . class , BigInteger . class , aSource -> BigInteger . valueOf ( aSource . charValue ( ) ) ) ; aRegistry . registerTypeConverter ( String . class , BigInteger . class , aSource -> { final BigDecimal aBD = StringParser . parseBigDecimal ( aSource , ( BigDecimal ) null ) ; return aBD == null ? null : aBD . toBigInteger ( ) ; } ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( BigInteger . class , aSource -> StringParser . parseBigInteger ( aSource . toString ( ) , ( BigInteger ) null ) ) ; // AtomicBoolean
aRegistry . registerTypeConverterRuleFixedSourceAnyDestination ( AtomicBoolean . class , aSource -> Boolean . valueOf ( aSource . get ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( AtomicBoolean . class , aSource -> new AtomicBoolean ( TypeConverter . convert ( aSource , Boolean . class ) . booleanValue ( ) ) ) ; // AtomicInteger
aRegistry . registerTypeConverterRuleFixedSourceAnyDestination ( AtomicInteger . class , aSource -> Integer . valueOf ( aSource . get ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( AtomicInteger . class , aSource -> new AtomicInteger ( TypeConverter . convert ( aSource , Integer . class ) . intValue ( ) ) ) ; // AtomicLong
aRegistry . registerTypeConverterRuleFixedSourceAnyDestination ( AtomicLong . class , aSource -> Long . valueOf ( aSource . get ( ) ) ) ; aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( AtomicLong . class , aSource -> new AtomicLong ( TypeConverter . convert ( aSource , Long . class ) . longValue ( ) ) ) ; // to StringBuilder
aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( StringBuilder . class , aSource -> { if ( aSource instanceof CharSequence ) return new StringBuilder ( ( CharSequence ) aSource ) ; return new StringBuilder ( TypeConverter . convert ( aSource , String . class ) ) ; } ) ; // to StringBuffer
aRegistry . registerTypeConverterRuleAnySourceFixedDestination ( StringBuffer . class , aSource -> { if ( aSource instanceof CharSequence ) return new StringBuffer ( ( CharSequence ) aSource ) ; return new StringBuffer ( TypeConverter . convert ( aSource , String . class ) ) ; } ) ; // Enum
/* * We need to append the Enum class name , otherwise we cannot resolve it !
* Use the colon as it is not allowed in class names . */
aRegistry . registerTypeConverterRuleAssignableSourceFixedDestination ( Enum . class , String . class , aSource -> aSource . getClass ( ) . getName ( ) + ':' + aSource . name ( ) ) ; aRegistry . registerTypeConverterRuleFixedSourceAssignableDestination ( String . class , Enum . class , x -> { /* * Split class name and enum value name */
final ICommonsList < String > aParts = StringHelper . getExploded ( ':' , x , 2 ) ; try { /* * Resolve any enum class . Note : The explicit EChange is just here ,
* because an explicit enum type is needed . It must of course not only
* be EChange : ) */
final Class < EChange > aClass = GenericReflection . getClassFromName ( aParts . get ( 0 ) ) ; /* * And look up the element by name */
return Enum . valueOf ( aClass , aParts . get ( 1 ) ) ; } catch ( final ClassNotFoundException ex ) { return null ; } } ) ; // String [ ] to any
aRegistry . registerTypeConverterRuleFixedSourceAnyDestination ( String [ ] . class , x -> { if ( x . length == 0 ) return null ; if ( x . length > 1 ) { LoggerFactory . getLogger ( "TypeConverter" ) . warn ( "An array with " + x . length + " items is present; using the first value: " + Arrays . toString ( x ) ) ; } return x [ 0 ] ; } ) ;
|
public class ContainerGroupsInner { /** * Get a list of container groups in the specified subscription and resource group .
* Get a list of container groups in a specified subscription and resource group . This operation returns properties of each container group including containers , image registry credentials , restart policy , IP address type , OS type , state , and volumes .
* @ param resourceGroupName The name of the resource group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ContainerGroupInner & gt ; object */
public Observable < Page < ContainerGroupInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } }
|
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < ContainerGroupInner > > , Page < ContainerGroupInner > > ( ) { @ Override public Page < ContainerGroupInner > call ( ServiceResponse < Page < ContainerGroupInner > > response ) { return response . body ( ) ; } } ) ;
|
public class CPDefinitionSpecificationOptionValueLocalServiceBaseImpl { /** * Adds the cp definition specification option value to the database . Also notifies the appropriate model listeners .
* @ param cpDefinitionSpecificationOptionValue the cp definition specification option value
* @ return the cp definition specification option value that was added */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CPDefinitionSpecificationOptionValue addCPDefinitionSpecificationOptionValue ( CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue ) { } }
|
cpDefinitionSpecificationOptionValue . setNew ( true ) ; return cpDefinitionSpecificationOptionValuePersistence . update ( cpDefinitionSpecificationOptionValue ) ;
|
public class TemporaryZipFile { /** * Creates an entry under the specifeid path with the content from the provided resource .
* @ param pathToFile
* the path to the file in the zip file .
* @ param resource
* the resource providing the content for the file . Must not be null .
* @ throws IOException */
private void addEntry ( final Path pathToFile , final URL resource ) throws IOException { } }
|
final Path parent = pathToFile . getParent ( ) ; if ( parent != null ) { addFolder ( parent ) ; } try ( InputStream inputStream = resource . openStream ( ) ) { Files . copy ( inputStream , pathToFile ) ; }
|
public class JSONArray { /** * Creates a JSONArray . < br >
* Inspects the object type to call the correct JSONArray factory method .
* Accepts JSON formatted strings , arrays and Collections .
* @ param object
* @ throws JSONException if the object can not be converted to a proper
* JSONArray . */
public static JSONArray fromObject ( Object object , JsonConfig jsonConfig ) { } }
|
if ( object instanceof JSONString ) { return _fromJSONString ( ( JSONString ) object , jsonConfig ) ; } else if ( object instanceof JSONArray ) { return _fromJSONArray ( ( JSONArray ) object , jsonConfig ) ; } else if ( object instanceof Collection ) { return _fromCollection ( ( Collection ) object , jsonConfig ) ; } else if ( object instanceof JSONTokener ) { return _fromJSONTokener ( ( JSONTokener ) object , jsonConfig ) ; } else if ( object instanceof String ) { return _fromString ( ( String ) object , jsonConfig ) ; } else if ( object != null && object . getClass ( ) . isArray ( ) ) { Class type = object . getClass ( ) . getComponentType ( ) ; if ( ! type . isPrimitive ( ) ) { return _fromArray ( ( Object [ ] ) object , jsonConfig ) ; } else { if ( type == Boolean . TYPE ) { return _fromArray ( ( boolean [ ] ) object , jsonConfig ) ; } else if ( type == Byte . TYPE ) { return _fromArray ( ( byte [ ] ) object , jsonConfig ) ; } else if ( type == Short . TYPE ) { return _fromArray ( ( short [ ] ) object , jsonConfig ) ; } else if ( type == Integer . TYPE ) { return _fromArray ( ( int [ ] ) object , jsonConfig ) ; } else if ( type == Long . TYPE ) { return _fromArray ( ( long [ ] ) object , jsonConfig ) ; } else if ( type == Float . TYPE ) { return _fromArray ( ( float [ ] ) object , jsonConfig ) ; } else if ( type == Double . TYPE ) { return _fromArray ( ( double [ ] ) object , jsonConfig ) ; } else if ( type == Character . TYPE ) { return _fromArray ( ( char [ ] ) object , jsonConfig ) ; } else { throw new JSONException ( "Unsupported type" ) ; } } } else if ( JSONUtils . isBoolean ( object ) || JSONUtils . isFunction ( object ) || JSONUtils . isNumber ( object ) || JSONUtils . isNull ( object ) || JSONUtils . isString ( object ) || object instanceof JSON ) { fireArrayStartEvent ( jsonConfig ) ; JSONArray jsonArray = new JSONArray ( ) . element ( object , jsonConfig ) ; fireElementAddedEvent ( 0 , jsonArray . get ( 0 ) , jsonConfig ) ; fireArrayStartEvent ( jsonConfig ) ; return jsonArray ; } else if ( JSONUtils . isObject ( object ) ) { fireArrayStartEvent ( jsonConfig ) ; JSONArray jsonArray = new JSONArray ( ) . element ( JSONObject . fromObject ( object , jsonConfig ) ) ; fireElementAddedEvent ( 0 , jsonArray . get ( 0 ) , jsonConfig ) ; fireArrayStartEvent ( jsonConfig ) ; return jsonArray ; } else { throw new JSONException ( "Unsupported type" ) ; }
|
public class GettingStarted { /** * Make a model with 4 online nodes , 6 VMs ( 5 running , 1 ready ) .
* Declare 2 resources . */
private Model makeModel ( ) { } }
|
Model model = new DefaultModel ( ) ; Mapping map = model . getMapping ( ) ; // Create 4 online nodes
for ( int i = 0 ; i < 4 ; i ++ ) { Node n = model . newNode ( ) ; nodes . add ( n ) ; map . addOnlineNode ( n ) ; } // Create 6 VMs : vm0 . . vm5
for ( int i = 0 ; i < 6 ; i ++ ) { VM v = model . newVM ( ) ; vms . add ( v ) ; } // vm2 , vm1 , vm0 , vm3 , vm5 are running on the nodes
map . addRunningVM ( vms . get ( 2 ) , nodes . get ( 0 ) ) ; map . addRunningVM ( vms . get ( 1 ) , nodes . get ( 1 ) ) ; map . addRunningVM ( vms . get ( 0 ) , nodes . get ( 2 ) ) ; map . addRunningVM ( vms . get ( 3 ) , nodes . get ( 2 ) ) ; map . addRunningVM ( vms . get ( 5 ) , nodes . get ( 3 ) ) ; // vm4 is ready to be running on a node .
map . addReadyVM ( vms . get ( 4 ) ) ; // Declare a view to specify the " cpu " physical capacity of the nodes
// and the virtual consumption of the VMs .
// By default , nodes have 8 " cpu " resources
ShareableResource rcCPU = new ShareableResource ( "cpu" , 8 , 0 ) ; rcCPU . setConsumption ( vms . get ( 0 ) , 2 ) ; rcCPU . setConsumption ( vms . get ( 1 ) , 3 ) ; rcCPU . setConsumption ( vms . get ( 2 ) , 4 ) ; rcCPU . setConsumption ( vms . get ( 3 ) , 3 ) ; rcCPU . setConsumption ( vms . get ( 5 ) , 5 ) ; // By default , nodes have 7 " mem " resources
ShareableResource rcMem = new ShareableResource ( "mem" , 7 , 0 ) ; rcMem . setConsumption ( vms . get ( 0 ) , 2 ) ; rcMem . setConsumption ( vms . get ( 1 ) , 2 ) ; rcMem . setConsumption ( vms . get ( 2 ) , 4 ) ; rcMem . setConsumption ( vms . get ( 3 ) , 3 ) ; rcMem . setConsumption ( vms . get ( 5 ) , 4 ) ; // Attach the resources
model . attach ( rcCPU ) ; model . attach ( rcMem ) ; return model ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.