signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class RectifyFundamental { /** * Finds the values of a , b , c which minimize
* sum ( a * x ( + ) _ i + b * y ( + ) _ i + c - x ( - ) _ i ) ^ 2
* See page 306
* @ return Affine transform */
private SimpleMatrix computeAffineH ( List < AssociatedPair > observations , DMatrixRMaj H , DMatrixRMaj Hzero ) { } } | SimpleMatrix A = new SimpleMatrix ( observations . size ( ) , 3 ) ; SimpleMatrix b = new SimpleMatrix ( A . numRows ( ) , 1 ) ; Point2D_F64 c = new Point2D_F64 ( ) ; Point2D_F64 k = new Point2D_F64 ( ) ; for ( int i = 0 ; i < observations . size ( ) ; i ++ ) { AssociatedPair a = observations . get ( i ) ; GeometryMath_F64 . mult ( Hzero , a . p1 , k ) ; GeometryMath_F64 . mult ( H , a . p2 , c ) ; A . setRow ( i , 0 , k . x , k . y , 1 ) ; b . set ( i , 0 , c . x ) ; } SimpleMatrix x = A . solve ( b ) ; SimpleMatrix Ha = SimpleMatrix . identity ( 3 ) ; Ha . setRow ( 0 , 0 , x . getDDRM ( ) . data ) ; return Ha ; |
public class DefaultShardManagerBuilder { /** * Removes all provided listeners from the list of listeners .
* @ param listeners
* The listener ( s ) to remove from the list .
* @ return The DefaultShardManagerBuilder instance . Useful for chaining .
* @ see net . dv8tion . jda . core . JDA # removeEventListener ( Object . . . ) JDA . removeEventListener ( Object . . . ) */
public DefaultShardManagerBuilder removeEventListeners ( final Collection < Object > listeners ) { } } | Checks . noneNull ( listeners , "listeners" ) ; this . listeners . removeAll ( listeners ) ; return this ; |
public class DateTimeExtensions { /** * Creates a { @ link java . time . YearMonth } at the provided { @ link java . time . Year } .
* @ param self a Month
* @ param year a Year
* @ return a YearMonth
* @ since 2.5.0 */
public static YearMonth leftShift ( final Month self , Year year ) { } } | return YearMonth . of ( year . getValue ( ) , self ) ; |
public class ProcessDefines { /** * Records the fact that because of the current node in the node traversal , the define can ' t ever
* be assigned again .
* @ param info Represents the define variable .
* @ param t The current traversal . */
private static void setDefineInfoNotAssignable ( DefineInfo info , NodeTraversal t ) { } } | info . setNotAssignable ( format ( REASON_DEFINE_NOT_ASSIGNABLE , t . getLineNumber ( ) , t . getSourceName ( ) ) ) ; |
public class AWSStorageGatewayClient { /** * Returns information about the upload buffer of a gateway . This operation is supported for the stored volume ,
* cached volume and tape gateway types .
* The response includes disk IDs that are configured as upload buffer space , and it includes the amount of upload
* buffer space allocated and used .
* @ param describeUploadBufferRequest
* @ return Result of the DescribeUploadBuffer operation returned by the service .
* @ throws InvalidGatewayRequestException
* An exception occurred because an invalid gateway request was issued to the service . For more information ,
* see the error and message fields .
* @ throws InternalServerErrorException
* An internal server error has occurred during the request . For more information , see the error and message
* fields .
* @ sample AWSStorageGateway . DescribeUploadBuffer
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / DescribeUploadBuffer "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeUploadBufferResult describeUploadBuffer ( DescribeUploadBufferRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeUploadBuffer ( request ) ; |
public class CommerceAccountUserRelUtil { /** * Returns the first commerce account user rel in the ordered set where commerceAccountUserId = & # 63 ; .
* @ param commerceAccountUserId the commerce account user ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce account user rel , or < code > null < / code > if a matching commerce account user rel could not be found */
public static CommerceAccountUserRel fetchByCommerceAccountUserId_First ( long commerceAccountUserId , OrderByComparator < CommerceAccountUserRel > orderByComparator ) { } } | return getPersistence ( ) . fetchByCommerceAccountUserId_First ( commerceAccountUserId , orderByComparator ) ; |
public class XfdfReader { /** * Called when a start tag is found .
* @ param tag the tag name
* @ param h the tag ' s attributes */
public void startElement ( String tag , HashMap h ) { } } | if ( ! foundRoot ) { if ( ! tag . equals ( "xfdf" ) ) { throw new RuntimeException ( "Root element is not Bookmark." ) ; } else { foundRoot = true ; } } if ( tag . equals ( "xfdf" ) ) { } else if ( tag . equals ( "f" ) ) { fileSpec = ( String ) h . get ( "href" ) ; } else if ( tag . equals ( "fields" ) ) { fields = new HashMap ( ) ; // init it !
listFields = new HashMap ( ) ; } else if ( tag . equals ( "field" ) ) { String fName = ( String ) h . get ( "name" ) ; fieldNames . push ( fName ) ; } else if ( tag . equals ( "value" ) ) { fieldValues . push ( "" ) ; } |
public class GetTemplateResult { /** * The stage of the template that you can retrieve . For stacks , the < code > Original < / code > and < code > Processed < / code >
* templates are always available . For change sets , the < code > Original < / code > template is always available . After
* AWS CloudFormation finishes creating the change set , the < code > Processed < / code > template becomes available .
* @ param stagesAvailable
* The stage of the template that you can retrieve . For stacks , the < code > Original < / code > and
* < code > Processed < / code > templates are always available . For change sets , the < code > Original < / code > template
* is always available . After AWS CloudFormation finishes creating the change set , the < code > Processed < / code >
* template becomes available .
* @ see TemplateStage */
public void setStagesAvailable ( java . util . Collection < String > stagesAvailable ) { } } | if ( stagesAvailable == null ) { this . stagesAvailable = null ; return ; } this . stagesAvailable = new com . amazonaws . internal . SdkInternalList < String > ( stagesAvailable ) ; |
public class AuthenticateUserHelper { /** * Authenticate the given user and return an authenticated Subject .
* @ param authenticationService service to authenticate a user , must not be null
* @ param userName the user to authenticate , must not be null
* @ param jaasEntryName the optional JAAS configuration entry name . The system . DEFAULT JAAS entry name will be used if null or empty String is passed
* @ param customCacheKey The custom cache key to look up the subject
* @ return the authenticated subject
* @ throws AuthenticationException if there was a problem authenticating the user , or if the userName or authenticationService is null */
public Subject authenticateUser ( AuthenticationService authenticationService , String userName , String jaasEntryName , String customCacheKey ) throws AuthenticationException { } } | validateInput ( authenticationService , userName ) ; if ( jaasEntryName == null || jaasEntryName . trim ( ) . isEmpty ( ) ) jaasEntryName = JaasLoginConfigConstants . SYSTEM_DEFAULT ; Subject partialSubject = createPartialSubject ( userName , authenticationService , customCacheKey ) ; return authenticationService . authenticate ( jaasEntryName , partialSubject ) ; |
public class Settings { /** * Loads a property of the type String from the Properties object
* @ param propertyKey
* the property name
* @ return the value */
private String loadStringProperty ( String propertyKey ) { } } | String propValue = prop . getProperty ( propertyKey ) ; if ( propValue != null ) { propValue = propValue . trim ( ) ; } return propValue ; |
public class ZuulFilterChainHandler { /** * channel close . . resulting in an i / o exception */
private boolean isClientChannelClosed ( Throwable cause ) { } } | if ( cause instanceof ClosedChannelException || cause instanceof Errors . NativeIoException ) { LOG . error ( "ZuulFilterChainHandler::isClientChannelClosed - IO Exception" ) ; return true ; } return false ; |
public class FirstPartyAudienceSegmentRule { /** * Gets the customCriteriaRule value for this FirstPartyAudienceSegmentRule .
* @ return customCriteriaRule * Specifies the collection of custom criteria that are part of
* the rule of a
* { @ link FirstPartyAudienceSegment } .
* Once the { @ link FirstPartyAudienceSegment } is updated
* or modified with custom criteria , the
* server may return a normalized , but equivalent representation
* of the custom criteria rule .
* < ul >
* { @ code customCriteriaRule } will have up to three levels
* including itself .
* < li >
* The top level { @ link CustomCriteriaSet } i . e . the { @ code
* customTargeting } object can only
* contain a { @ link CustomCriteriaSet . LogicalOperator # OR }
* of all its children .
* < li >
* The second level of { @ link CustomCriteriaSet } objects
* can only contain
* { @ link CustomCriteriaSet . LogicalOperator # AND } of all
* their children . If a
* { @ link CustomCriteria } is placed on this level , the
* server will wrap it in a
* { @ link CustomCriteriaSet } .
* < li >
* The third level can only comprise of { @ link CustomCriteria }
* objects .
* < / ul >
* The resulting custom criteria rule would be of the
* form : < br >
* < img
* src = " https : / / chart . apis . google . com / chart ? cht = gv & chl = digraph { customTargeting _ LogicalOperator _ OR - % 3ECustomCriteriaSet _ LogicalOperator _ AND _ 1 - % 3ECustomCriteria _ 1 ; CustomCriteriaSet _ LogicalOperator _ AND _ 1 - % 3Eellipsis1 ; customTargeting _ LogicalOperator _ OR - % 3Eellipsis2 ; ellipsis1 [ label = % 22 . . . % 22 , shape = none , fontsize = 32 ] ; ellipsis2 [ label = % 22 . . . % 22 , shape = none , fontsize = 32 ] } & chs = 450x200 " / > */
public com . google . api . ads . admanager . axis . v201902 . CustomCriteriaSet getCustomCriteriaRule ( ) { } } | return customCriteriaRule ; |
public class IPv6Address { /** * Subtraction . Will never underflow , but wraps around when the lowest ip address has been reached .
* @ param value value to substract
* @ return new IPv6 address */
public IPv6Address subtract ( int value ) { } } | final long newLowBits = lowBits - value ; if ( value >= 0 ) { if ( IPv6AddressHelpers . isLessThanUnsigned ( lowBits , newLowBits ) ) { // oops , we subtracted something postive and the result is bigger - > overflow detected ( carry over one bit from high to low )
return new IPv6Address ( highBits - 1 , newLowBits ) ; } else { // no overflow
return new IPv6Address ( highBits , newLowBits ) ; } } else { if ( IPv6AddressHelpers . isLessThanUnsigned ( newLowBits , lowBits ) ) { // oops , we subtracted something negative and the result is smaller - > overflow detected ( carry over one bit from low to high )
return new IPv6Address ( highBits + 1 , newLowBits ) ; } else { // no overflow
return new IPv6Address ( highBits , newLowBits ) ; } } |
public class WebApp { /** * Add a servlet mapping .
* @ param servletMapping to add
* @ throws NullArgumentException if servlet mapping , servlet name or url pattern is null */
public void addServletMapping ( final WebAppServletMapping servletMapping ) { } } | NullArgumentException . validateNotNull ( servletMapping , "Servlet mapping" ) ; NullArgumentException . validateNotNull ( servletMapping . getServletName ( ) , "Servlet name" ) ; NullArgumentException . validateNotNull ( servletMapping . getUrlPattern ( ) , "Url pattern" ) ; Set < WebAppServletMapping > webAppServletMappings = servletMappings . get ( servletMapping . getServletName ( ) ) ; if ( webAppServletMappings == null ) { webAppServletMappings = new HashSet < > ( ) ; servletMappings . put ( servletMapping . getServletName ( ) , webAppServletMappings ) ; } webAppServletMappings . add ( servletMapping ) ; final WebAppServlet servlet = servlets . get ( servletMapping . getServletName ( ) ) ; // can be that the servlet is not yet added
if ( servlet != null ) { servlet . addUrlPattern ( servletMapping . getUrlPattern ( ) ) ; } |
public class JVMCollector { /** * 收集信息 */
public static JvmMData collect ( ) { } } | JvmMData JVMMData = new JvmMData ( ) ; // memory
Map < String , Object > memoryMap = JVMMonitor . getAttribute ( JVMConstants . JMX_JVM_MEMORY_NAME , getAttributeList ( JVMMemoryMBean . class ) ) ; JVMMData . setMemoryMap ( memoryMap ) ; // gc
Map < String , Object > gcMap = JVMMonitor . getAttribute ( JVMConstants . JMX_JVM_GC_NAME , getAttributeList ( JVMGCMBean . class ) ) ; JVMMData . setGcMap ( gcMap ) ; // thread
Map < String , Object > threadMap = JVMMonitor . getAttribute ( JVMConstants . JMX_JVM_THREAD_NAME , getAttributeList ( JVMThreadMBean . class ) ) ; JVMMData . setThreadMap ( threadMap ) ; return JVMMData ; |
public class WebSocketScopeManager { /** * Removes a websocket scope .
* @ param webSocketScope */
public void removeWebSocketScope ( WebSocketScope webSocketScope ) { } } | log . info ( "removeWebSocketScope: {}" , webSocketScope ) ; WebSocketScope wsScope = scopes . remove ( webSocketScope . getPath ( ) ) ; if ( wsScope != null ) { notifyListeners ( WebSocketEvent . SCOPE_REMOVED , wsScope ) ; } |
public class Humanize { /** * Parses the given text with the mask specified .
* @ param mask
* The pattern mask .
* @ param value
* The text to be parsed
* @ return The parsed text
* @ throws ParseException
* @ see MaskFormat */
public static String unmask ( final String mask , final String value ) throws ParseException { } } | return maskFormat ( mask ) . parse ( value ) ; |
public class NoteController { /** * Connects HTML template file with data for the source page .
* @ param idString id URL argument .
* @ param dbName name of database for the lookup .
* @ param model Spring connection between the data model wrapper .
* @ return a string identifying which HTML template to use . */
@ RequestMapping ( "/note" ) public final String note ( @ RequestParam ( value = "id" , required = false , defaultValue = "N0" ) final String idString , @ RequestParam ( value = "db" , required = false , defaultValue = "schoeller" ) final String dbName , final Model model ) { } } | logger . debug ( "Entering source" ) ; final Root root = fetchRoot ( dbName ) ; final RenderingContext context = createRenderingContext ( ) ; final Note note = ( Note ) root . find ( idString ) ; if ( note == null ) { throw new NoteNotFoundException ( "Note " + idString + " not found" , idString , dbName , context ) ; } final GedRenderer < ? > noteRenderer = new GedRendererFactory ( ) . create ( note , context ) ; model . addAttribute ( "filename" , gedbrowserHome + "/" + dbName + ".ged" ) ; model . addAttribute ( "noteString" , note . getString ( ) ) ; model . addAttribute ( "model" , noteRenderer ) ; model . addAttribute ( "appInfo" , appInfo ) ; logger . debug ( "Exiting source" ) ; return "note" ; |
public class PageDto { /** * Returns the elements of the page . < br >
* If the serialized page was read with the Jackson JSON processor , each
* element will be a { @ link LinkedHashMap } .
* @ return the elements of the page */
@ XmlElementWrapper ( name = "entries" ) @ XmlElement ( name = "entry" , nillable = true , type = Object . class ) @ JsonProperty ( value = "entries" ) @ ApiModelProperty ( value = "The elements of the page." ) @ Override public List < Object > getEntries ( ) { } } | return entries ; |
public class DatabaseDAODefaultImpl { public void put_class_attribute_property ( Database database , String classname , DbAttribute [ ] attr ) throws DevFailed { } } | if ( ! database . isAccess_checked ( ) ) checkAccess ( database ) ; DeviceData argIn = new DeviceData ( ) ; argIn . insert ( ApiUtil . toStringArray ( classname , attr , 2 ) ) ; command_inout ( database , "DbPutClassAttributeProperty2" , argIn ) ; |
public class ResponseUtil { /** * the default rule set for general import an export features */
public static MappingRules getDefaultJsonMapping ( ) { } } | return new MappingRules ( MappingRules . getDefaultMappingRules ( ) . MAPPING_NODE_FILTER , MappingRules . MAPPING_EXPORT_FILTER , MappingRules . MAPPING_IMPORT_FILTER , new MappingRules . PropertyFormat ( MappingRules . PropertyFormat . Scope . definition , MappingRules . PropertyFormat . Binary . link ) , 0 , MappingRules . ChangeRule . update ) ; |
public class AssetsConfiguration { /** * A series of mappings from resource paths ( in the classpath )
* to the uri path that hosts the resource
* @ return The resourcePathToUriMappings . */
public Map < String , String > getResourcePathToUriMappings ( ) { } } | if ( resourcePathToUriMappings == null ) { ImmutableMap . Builder < String , String > mapBuilder = ImmutableMap . < String , String > builder ( ) ; // Ensure that resourcePath and uri ends with a ' / '
for ( Map . Entry < String , String > mapping : mappings ( ) . entrySet ( ) ) { mapBuilder . put ( ensureEndsWithSlash ( mapping . getKey ( ) ) , ensureEndsWithSlash ( mapping . getValue ( ) ) ) ; } resourcePathToUriMappings = mapBuilder . build ( ) ; } return resourcePathToUriMappings ; |
public class DoublesUnionImpl { /** * Returns a Heap DoublesUnion object that has been initialized with the data from the given
* Memory image of a DoublesSketch . The srcMem object will not be modified and a reference to
* it is not retained . The < i > maxK < / i > of the resulting union will be that obtained from
* the sketch Memory image .
* @ param srcMem a Memory image of a quantiles DoublesSketch
* @ return a DoublesUnion object */
static DoublesUnionImpl heapifyInstance ( final Memory srcMem ) { } } | final HeapUpdateDoublesSketch sketch = HeapUpdateDoublesSketch . heapifyInstance ( srcMem ) ; final DoublesUnionImpl union = new DoublesUnionImpl ( sketch . getK ( ) ) ; union . gadget_ = sketch ; return union ; |
public class Collections3 { /** * Gets the only element from a given map or throws an exception
* @ param map the map to get only entry from
* @ param < K > the key type
* @ param < V > the value type
* @ return the single entry contained in the map
* @ throws NoSuchElementException if the map is empty
* @ throws IllegalArgumentException if the map contains multiple entries
* @ see com . google . common . collect . Iterables # getOnlyElement */
@ Beta public static < K , V > Map . Entry < K , V > getOnlyEntry ( Map < K , V > map ) { } } | checkArgument ( map != null , "Expected non-null map" ) ; return getOnlyElement ( map . entrySet ( ) ) ; |
public class BindDaoBuilder { /** * ( non - Javadoc )
* @ see
* com . abubusoft . kripton . processor . sqlite . model . SQLiteModelElementVisitor #
* visit ( com . abubusoft . kripton . processor . sqlite . model . SQLiteDaoDefinition ) */
@ Override public void visit ( SQLiteDaoDefinition value ) throws Exception { } } | currentDaoDefinition = value ; // check if we need to generate or not
if ( value . getElement ( ) . getAnnotation ( BindDaoMany2Many . class ) != null && value . getElement ( ) . getAnnotation ( BindGeneratedDao . class ) == null ) { return ; } String classTableName = daoName ( value ) ; PackageElement pkg = elementUtils . getPackageOf ( value . getElement ( ) ) ; String packageName = pkg . isUnnamed ( ) ? "" : pkg . getQualifiedName ( ) . toString ( ) ; AnnotationProcessorUtilis . infoOnGeneratedClasses ( BindDao . class , packageName , classTableName ) ; builder = TypeSpec . classBuilder ( classTableName ) . superclass ( Dao . class ) . addSuperinterface ( typeName ( value . getElement ( ) ) ) . addModifiers ( Modifier . PUBLIC ) ; for ( TypeName item : value . implementedInterface ) { builder . addSuperinterface ( item ) ; } BindTypeContext context = new BindTypeContext ( builder , TypeUtility . typeName ( packageName , classTableName ) , Modifier . PRIVATE ) ; String entityName = BindDataSourceSubProcessor . generateEntityName ( value , value . getEntity ( ) ) ; // javadoc for class
builder . addJavadoc ( "<p>" ) ; builder . addJavadoc ( "\nDAO implementation for entity <code>$L</code>, based on interface <code>$L</code>\n" , entityName , value . getElement ( ) . getSimpleName ( ) . toString ( ) ) ; builder . addJavadoc ( "</p>\n\n" ) ; JavadocUtility . generateJavadocGeneratedBy ( builder ) ; builder . addJavadoc ( " @see $T\n" , TypeUtility . className ( value . getEntityClassName ( ) ) ) ; builder . addJavadoc ( " @see $T\n" , TypeUtility . className ( value . getElement ( ) . getQualifiedName ( ) . toString ( ) ) ) ; builder . addJavadoc ( " @see $T\n" , BindTableGenerator . tableClassName ( value , value . getEntity ( ) ) ) ; { // constructor
MethodSpec . Builder methodBuilder = MethodSpec . constructorBuilder ( ) . addModifiers ( Modifier . PUBLIC ) . addParameter ( BindDaoFactoryBuilder . generateDaoFactoryClassName ( value . getParent ( ) ) , "daoFactory" ) ; methodBuilder . addStatement ( "super(daoFactory.context())" ) ; if ( value . hasRelations ( ) ) { methodBuilder . addStatement ( "this.daoFactory=daoFactory" ) ; builder . addField ( BindDaoFactoryBuilder . generateDaoFactoryClassName ( value . getParent ( ) ) , "daoFactory" , Modifier . PRIVATE ) ; } builder . addMethod ( methodBuilder . build ( ) ) ; } // define column typeName set
for ( SQLiteModelMethod item : value . getCollection ( ) ) { item . accept ( this ) ; } // generate live data support methods
if ( value . hasLiveData ( ) ) { // method sendEvent
{ MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( METHOD_NAME_REGISTRY_EVENT ) . addModifiers ( Modifier . PROTECTED ) . addParameter ( Integer . TYPE , "affectedRows" ) ; methodBuilder . beginControlFlow ( "if (affectedRows==0)" ) ; methodBuilder . addStatement ( "return" ) ; methodBuilder . endControlFlow ( ) ; methodBuilder . beginControlFlow ( "if (_context.isInSession())" ) ; methodBuilder . addStatement ( "_context.registrySQLEvent($T.$L)" , BindDataSourceBuilder . generateDataSourceName ( value . getParent ( ) ) , value . daoUidName ) ; methodBuilder . nextControlFlow ( "else" ) ; methodBuilder . addStatement ( "invalidateLiveData()" ) ; methodBuilder . endControlFlow ( ) ; builder . addMethod ( methodBuilder . build ( ) ) ; } // field liveDatas
{ FieldSpec . Builder liveDataBuilder = FieldSpec . builder ( ParameterizedTypeName . get ( ClassName . get ( Collection . class ) , ParameterizedTypeName . get ( ClassName . get ( WeakReference . class ) , // ParameterizedTypeName . get ( ClassName . get ( LiveDataHandler . class ) ,
// WildcardTypeName . subtypeOf ( Object . class ) ) ) ) ,
ClassName . get ( LiveDataHandler . class ) ) ) , "liveDatas" ) . addModifiers ( Modifier . STATIC ) . initializer ( CodeBlock . builder ( ) . add ( "new $T()" , ParameterizedTypeName . get ( ClassName . get ( CopyOnWriteArraySet . class ) , ParameterizedTypeName . get ( ClassName . get ( WeakReference . class ) , ClassName . get ( LiveDataHandler . class ) ) ) ) . build ( ) ) ; builder . addField ( liveDataBuilder . build ( ) ) ; } // registryLiveData
{ // . addParameter ( ParameterizedTypeName . get ( ClassName . get ( LiveDataHandler . class ) ,
// WildcardTypeName . subtypeOf ( Object . class ) ) , " value " ) ;
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( METHOD_NAME_REGISTRY_LIVE_DATA ) . addModifiers ( Modifier . PROTECTED ) . addParameter ( ClassName . get ( LiveDataHandler . class ) , "value" ) ; methodBuilder . addStatement ( "liveDatas.add(new $T(value))" , ParameterizedTypeName . get ( ClassName . get ( WeakReference . class ) , ClassName . get ( LiveDataHandler . class ) ) ) ; builder . addMethod ( methodBuilder . build ( ) ) ; } // invalidateLiveData
{ // check datasource and dao package must be the same , otherwise
// invalidate must be public
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( METHOD_NAME_INVALIDATE_LIVE_DATA ) . addJavadoc ( "<p>Invalidate livedata.</p>\n\n" ) . addModifiers ( Modifier . PUBLIC ) ; methodBuilder . beginControlFlow ( "for ($T item: liveDatas)" , ParameterizedTypeName . get ( ClassName . get ( WeakReference . class ) , // ParameterizedTypeName . get ( ClassName . get ( KriptonLiveDataManager . getInstance ( ) . getLiveDataHandlerClazz ( ) ) ,
// WildcardTypeName . subtypeOf ( Object . class ) ) ) ) ;
ClassName . get ( LiveDataHandler . class ) ) ) ; methodBuilder . beginControlFlow ( "if (item.get()!=null)" ) ; methodBuilder . addStatement ( "item.get().invalidate()" ) ; methodBuilder . endControlFlow ( ) ; methodBuilder . endControlFlow ( ) ; builder . addMethod ( methodBuilder . build ( ) ) ; } } // generate serializer params
for ( Entry < TypeName , String > item : currentDaoDefinition . managedParams . entrySet ( ) ) { BindTransformer . checkIfIsInUnsupportedPackage ( item . getKey ( ) ) ; ManagedPropertyPersistenceHelper . generateParamSerializer ( context , item . getValue ( ) , item . getKey ( ) , PersistType . BYTE ) ; ManagedPropertyPersistenceHelper . generateParamParser ( context , item . getValue ( ) , item . getKey ( ) , PersistType . BYTE ) ; } // generate subject
if ( currentDaoDefinition . getParent ( ) . generateRx ) { ParameterizedTypeName subjectTypeName = ParameterizedTypeName . get ( ClassName . get ( PublishSubject . class ) , ClassName . get ( SQLiteEvent . class ) ) ; // subject
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "getSubject" ) . addModifiers ( Modifier . PUBLIC ) ; methodBuilder . addStatement ( "return subject" ) . returns ( subjectTypeName ) ; builder . addMethod ( methodBuilder . build ( ) ) ; // subject instance
FieldSpec . Builder fieldBuilder = FieldSpec . builder ( subjectTypeName , "subject" , Modifier . PRIVATE , Modifier . FINAL , Modifier . STATIC ) . initializer ( "$T.create()" , ClassName . get ( PublishSubject . class ) ) ; builder . addField ( fieldBuilder . build ( ) ) ; } // generate prepared statement cleaner
{ MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "clearCompiledStatements" ) . addModifiers ( Modifier . PUBLIC , Modifier . STATIC ) . returns ( Void . TYPE ) ; for ( String item : value . preparedStatementNames ) { methodBuilder . beginControlFlow ( "if ($L!=null)" , item ) ; methodBuilder . addStatement ( "$L.close()" , item ) ; methodBuilder . addStatement ( "$L=null" , item ) ; methodBuilder . endControlFlow ( ) ; } builder . addMethod ( methodBuilder . build ( ) ) ; } TypeSpec typeSpec = builder . build ( ) ; JavaWriterHelper . writeJava2File ( filer , packageName , typeSpec ) ; |
public class AbstractBeanDefinition { /** * Obtains a value for the given method argument .
* @ param resolutionContext The resolution context
* @ param context The bean context
* @ param methodIndex The method index
* @ param argIndex The argument index
* @ return The value */
@ Internal @ UsedByGeneratedCode protected final boolean containsValueForMethodArgument ( BeanResolutionContext resolutionContext , BeanContext context , int methodIndex , int argIndex ) { } } | if ( context instanceof ApplicationContext ) { MethodInjectionPoint injectionPoint = methodInjectionPoints . get ( methodIndex ) ; Argument argument = injectionPoint . getArguments ( ) [ argIndex ] ; String valueAnnStr = argument . getAnnotationMetadata ( ) . getValue ( Value . class , String . class ) . orElse ( null ) ; String valString = resolvePropertyValueName ( resolutionContext , injectionPoint . getAnnotationMetadata ( ) , argument , valueAnnStr ) ; ApplicationContext applicationContext = ( ApplicationContext ) context ; Class type = argument . getType ( ) ; boolean isConfigProps = type . isAnnotationPresent ( ConfigurationProperties . class ) ; boolean result = isConfigProps || Map . class . isAssignableFrom ( type ) ? applicationContext . containsProperties ( valString ) : applicationContext . containsProperty ( valString ) ; if ( ! result && isConfigurationProperties ( ) ) { String cliOption = resolveCliOption ( argument . getName ( ) ) ; if ( cliOption != null ) { result = applicationContext . containsProperty ( cliOption ) ; } } if ( result && injectionPoint instanceof MissingMethodInjectionPoint ) { if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( "Bean definition for type [{}] is compiled against an older version and value [{}] can no longer be set for missing method: {}" , getBeanType ( ) , valString , injectionPoint . getName ( ) ) ; } result = false ; } return result ; } return false ; |
public class BaseNCodecInputStream { /** * Reads one < code > byte < / code > from this input stream .
* @ return the byte as an integer in the range 0 to 255 . Returns - 1 if EOF has been reached .
* @ throws IOException
* if an I / O error occurs . */
@ Override public int read ( ) throws IOException { } } | int r = read ( singleByte , 0 , 1 ) ; while ( r == 0 ) { r = read ( singleByte , 0 , 1 ) ; } if ( r > 0 ) { final byte b = singleByte [ 0 ] ; return b < 0 ? 256 + b : b ; } return EOF ; |
public class SwiftInputStream { /** * close the stream
* @ param msg close message
* @ param length length */
private void closeStream ( String msg , long length ) { } } | if ( wrappedStream != null ) { long remaining = remainingInCurrentRequest ( ) ; boolean shouldAbort = remaining > readahead ; if ( ! shouldAbort ) { try { wrappedStream . close ( ) ; } catch ( IOException e ) { LOG . debug ( "When closing {} stream for {}" , uri , msg , e ) ; shouldAbort = true ; } } if ( shouldAbort ) { wrappedStream . abort ( ) ; } LOG . trace ( "Close stream {} {}: {}; streamPos={}, nextReadPos={}," + " request range {}-{} length={}" , uri , ( shouldAbort ? "aborted" : "closed" ) , msg , pos , nextReadPos , contentRangeStart , contentRangeFinish , length ) ; wrappedStream = null ; } |
public class Commit { /** * A list of parent commits for the specified commit . Each parent commit ID is the full commit ID .
* @ param parents
* A list of parent commits for the specified commit . Each parent commit ID is the full commit ID . */
public void setParents ( java . util . Collection < String > parents ) { } } | if ( parents == null ) { this . parents = null ; return ; } this . parents = new java . util . ArrayList < String > ( parents ) ; |
public class MediaClient { /** * Creates a new transcoder job which converts media files in BOS buckets with specified preset , watermarkId , and
* delogoArea .
* @ param pipelineName The name of pipeline used by this job .
* @ param sourceKey The key of the source media file in the bucket specified in the pipeline .
* @ param targetKey The key of the target media file in the bucket specified in the pipeline .
* @ param presetName The name of the preset used by this job .
* @ param watermarkId Single watermarkId associated with the job .
* @ param delogoArea The delogo area ( x , y , width , height ) .
* @ return The newly created job ID . */
public CreateTranscodingJobResponse createTranscodingJob ( String pipelineName , String sourceKey , String targetKey , String presetName , String watermarkId , DelogoArea delogoArea ) { } } | CreateTranscodingJobRequest request = new CreateTranscodingJobRequest ( ) ; request . setPipelineName ( pipelineName ) ; Source source = new Source ( ) ; source . setSourceKey ( sourceKey ) ; request . setSource ( source ) ; Target target = new Target ( ) ; target . setTargetKey ( targetKey ) ; target . setPresetName ( presetName ) ; if ( ! Strings . isNullOrEmpty ( watermarkId ) ) { List < String > watermarkIds = Collections . singletonList ( watermarkId ) ; target . setWatermarkIds ( watermarkIds ) ; } if ( delogoArea != null ) { target . setDelogoArea ( delogoArea ) ; } request . setTarget ( target ) ; return createTranscodingJob ( request ) ; |
public class CmsSearchConfiguration { /** * Propagates the names of all facets to each single facet . */
private void propagateFacetNames ( ) { } } | // collect all names and configurations
Collection < String > facetNames = new ArrayList < String > ( ) ; Collection < I_CmsSearchConfigurationFacet > facetConfigs = new ArrayList < I_CmsSearchConfigurationFacet > ( ) ; facetNames . addAll ( m_fieldFacets . keySet ( ) ) ; facetConfigs . addAll ( m_fieldFacets . values ( ) ) ; facetNames . addAll ( m_rangeFacets . keySet ( ) ) ; facetConfigs . addAll ( m_rangeFacets . values ( ) ) ; if ( null != m_queryFacet ) { facetNames . add ( m_queryFacet . getName ( ) ) ; facetConfigs . add ( m_queryFacet ) ; } // propagate all names
for ( I_CmsSearchConfigurationFacet facetConfig : facetConfigs ) { facetConfig . propagateAllFacetNames ( facetNames ) ; } |
public class MiniMaxNNChain { /** * Uses NNChain as in " Modern hierarchical , agglomerative clustering
* algorithms " by Daniel Müllner
* @ param mat distance matrix
* @ param prots computed prototypes
* @ param dq distance query of the data set
* @ param builder Result builder
* @ param clusters current clusters */
private void nnChainCore ( MatrixParadigm mat , DBIDArrayMIter prots , DistanceQuery < O > dq , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters ) { } } | final DBIDArrayIter ix = mat . ix ; final double [ ] distances = mat . matrix ; final int size = mat . size ; // The maximum chain size = number of ids + 1
IntegerArray chain = new IntegerArray ( size + 1 ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Running MiniMax-NNChain" , size - 1 , LOG ) : null ; for ( int k = 1 , end = size ; k < size ; k ++ ) { int a = - 1 , b = - 1 ; if ( chain . size ( ) <= 3 ) { // Accessing two arbitrary not yet merged elements could be optimized to
// work in O ( 1 ) like in Müllner ;
// however this usually does not have a huge impact ( empirically just
// about 1/5000 of total performance )
a = NNChain . findUnlinked ( 0 , end , ix , builder ) ; b = NNChain . findUnlinked ( a + 1 , end , ix , builder ) ; chain . clear ( ) ; chain . add ( a ) ; } else { // Chain is expected to look like ( . . . . a , b , c , b ) with b and c merged .
int lastIndex = chain . size ; int c = chain . get ( lastIndex - 2 ) ; b = chain . get ( lastIndex - 3 ) ; a = chain . get ( lastIndex - 4 ) ; // Ensure we had a loop at the end :
assert ( chain . get ( lastIndex - 1 ) == c || chain . get ( lastIndex - 1 ) == b ) ; // if c < b , then we merged b - > c , otherwise c - > b
b = c < b ? c : b ; // Cut the tail :
chain . size -= 3 ; } // For ties , always prefer the second - last element b :
double minDist = mat . get ( a , b ) ; do { int c = b ; final int ta = MatrixParadigm . triangleSize ( a ) ; for ( int i = 0 ; i < a ; i ++ ) { if ( i != b && ! builder . isLinked ( ix . seek ( i ) ) ) { double dist = distances [ ta + i ] ; if ( dist < minDist ) { minDist = dist ; c = i ; } } } for ( int i = a + 1 ; i < size ; i ++ ) { if ( i != b && ! builder . isLinked ( ix . seek ( i ) ) ) { double dist = distances [ MatrixParadigm . triangleSize ( i ) + a ] ; if ( dist < minDist ) { minDist = dist ; c = i ; } } } b = a ; a = c ; chain . add ( a ) ; } while ( chain . size ( ) < 3 || a != chain . get ( chain . size - 1 - 2 ) ) ; // We always merge the larger into the smaller index :
if ( a < b ) { int tmp = a ; a = b ; b = tmp ; } assert ( minDist == mat . get ( a , b ) ) ; assert ( b < a ) ; MiniMax . merge ( size , mat , prots , builder , clusters , dq , a , b ) ; end = AGNES . shrinkActiveSet ( ix , builder , end , a ) ; // Shrink working set
LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; |
public class DSLMapWalker { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 162:1 : consequence _ key : VT _ CONSEQUENCE ; */
public final void consequence_key ( ) throws RecognitionException { } } | try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 163:5 : ( VT _ CONSEQUENCE )
// src / main / resources / org / drools / compiler / lang / dsl / DSLMapWalker . g : 163:7 : VT _ CONSEQUENCE
{ match ( input , VT_CONSEQUENCE , FOLLOW_VT_CONSEQUENCE_in_consequence_key524 ) ; entry_stack . peek ( ) . retval . setSection ( DSLMappingEntry . CONSEQUENCE ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
} |
public class HistoryReference { /** * Delete this HistoryReference from database
* This should typically only be called via the ExtensionHistory . delete ( href ) method */
public void delete ( ) { } } | if ( historyId > 0 ) { try { // ZAP : Support for multiple tags
staticTableTag . deleteTagsForHistoryID ( historyId ) ; staticTableHistory . delete ( historyId ) ; notifyEvent ( HistoryReferenceEventPublisher . EVENT_REMOVED ) ; } catch ( DatabaseException e ) { log . error ( e . getMessage ( ) , e ) ; } } |
public class NodeTypeImpl { /** * { @ inheritDoc } */
public NodeType [ ] getSupertypes ( ) { } } | Set < InternalQName > supers = nodeTypeDataManager . getSupertypes ( nodeTypeData . getName ( ) ) ; NodeType [ ] superTypes = new NodeType [ supers . size ( ) ] ; int i = 0 ; for ( InternalQName nodeTypeName : supers ) { try { superTypes [ i ++ ] = nodeTypeManager . findNodeType ( nodeTypeName ) ; } catch ( NoSuchNodeTypeException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } catch ( RepositoryException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } return superTypes ; |
public class FiniteProgress { /** * Ensure that the progress was completed , to make progress bars disappear
* @ param logger Logger to report to . */
public void ensureCompleted ( Logging logger ) { } } | if ( ! isComplete ( ) ) { logger . warning ( "Progress had not completed automatically as expected: " + getProcessed ( ) + "/" + total , new Throwable ( ) ) ; setProcessed ( getTotal ( ) ) ; logger . progress ( this ) ; } |
public class IPAddress { /** * Creates the normalized string for an address without having to create the address objects first . */
protected static String toNormalizedString ( PrefixConfiguration prefixConfiguration , SegmentValueProvider lowerValueProvider , SegmentValueProvider upperValueProvider , Integer prefixLength , int segmentCount , int bytesPerSegment , int bitsPerSegment , int segmentMaxValue , char separator , int radix , CharSequence zone ) { } } | int length = toNormalizedString ( prefixConfiguration , lowerValueProvider , upperValueProvider , prefixLength , segmentCount , bytesPerSegment , bitsPerSegment , segmentMaxValue , separator , radix , zone , null ) ; StringBuilder builder = new StringBuilder ( length ) ; toNormalizedString ( prefixConfiguration , lowerValueProvider , upperValueProvider , prefixLength , segmentCount , bytesPerSegment , bitsPerSegment , segmentMaxValue , separator , radix , zone , builder ) ; IPAddressSection . checkLengths ( length , builder ) ; return builder . toString ( ) ; |
public class BooleanCondition { /** * { @ inheritDoc } */
@ Override public synchronized BooleanQuery doQuery ( Schema schema ) { } } | int oldMaxClauses = BooleanQuery . getMaxClauseCount ( ) ; BooleanQuery . setMaxClauseCount ( maxClauses ) ; BooleanQuery . Builder builder = new BooleanQuery . Builder ( ) ; must . forEach ( condition -> builder . add ( condition . query ( schema ) , MUST ) ) ; should . forEach ( condition -> builder . add ( condition . query ( schema ) , SHOULD ) ) ; not . forEach ( condition -> builder . add ( condition . query ( schema ) , MUST_NOT ) ) ; if ( must . isEmpty ( ) && should . isEmpty ( ) && ! not . isEmpty ( ) ) { logger . warn ( "Performing resource-intensive pure negation query {}" , this ) ; builder . add ( new MatchAllDocsQuery ( ) , FILTER ) ; } BooleanQuery out = builder . build ( ) ; BooleanQuery . setMaxClauseCount ( oldMaxClauses ) ; return out ; |
public class MPP12Reader { /** * This method extracts and collates the value list information
* for custom column value lists . */
private void processCustomValueLists ( ) throws IOException { } } | DirectoryEntry taskDir = ( DirectoryEntry ) m_projectDir . getEntry ( "TBkndTask" ) ; Props taskProps = new Props12 ( m_inputStreamFactory . getInstance ( taskDir , "Props" ) ) ; CustomFieldValueReader12 reader = new CustomFieldValueReader12 ( m_file . getProjectProperties ( ) , m_file . getCustomFields ( ) , m_outlineCodeVarMeta , m_outlineCodeVarData , m_outlineCodeFixedData , m_outlineCodeFixedData2 , taskProps ) ; reader . process ( ) ; |
public class QuickSelect { /** * QuickSelect is essentially quicksort , except that we only " sort " that half
* of the array that we are interested in .
* Note : the array is < b > modified < / b > by this .
* @ param data Data to process
* @ param rank Rank position that we are interested in ( integer ! )
* @ return Value at the given rank */
public static double quickSelect ( double [ ] data , int rank ) { } } | quickSelect ( data , 0 , data . length , rank ) ; return data [ rank ] ; |
public class AmazonGuardDutyClient { /** * Deletes a Amazon GuardDuty detector specified by the detector ID .
* @ param deleteDetectorRequest
* @ return Result of the DeleteDetector operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ sample AmazonGuardDuty . DeleteDetector
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / guardduty - 2017-11-28 / DeleteDetector " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DeleteDetectorResult deleteDetector ( DeleteDetectorRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteDetector ( request ) ; |
public class NfsFileBase { /** * ( non - Javadoc )
* @ see
* com . emc . ecs . nfsclient . nfs . NfsFile # makeMkdirRequest ( com . emc . ecs . nfsclient .
* nfs . NfsSetAttributes ) */
public NfsMkdirRequest makeMkdirRequest ( NfsSetAttributes attributes ) throws IOException { } } | return getNfs ( ) . makeMkdirRequest ( getParentFile ( ) . getFileHandle ( ) , getName ( ) , attributes ) ; |
public class ClassCache { /** * Initializes the cache . */
protected void initialize ( ClassTraversal traversal ) { } } | Listener listener ; listener = new Listener ( ) ; traversal . traverse ( listener ) ; m_NameCache = listener . getNameCache ( ) ; |
public class JdbcWriter { /** * Resets the database connection after an error , if automatic reconnection is enabled . */
private void resetConnection ( ) { } } | if ( reconnect ) { closeConnectionSilently ( ) ; statement = null ; lostCount = batch ? batchCount : 1 ; batchCount = 0 ; reconnectTimestamp = 0 ; } |
import java . util . ArrayList ; import java . util . List ; public class UpdatedRunLengthEncoding { /** * Function to represent the revised run - length encoding from a list .
* Example :
* > > > updated _ run _ length _ encoding ( [ 1 , 1 , 2 , 3 , 4 , 4 , 5 , 1 ] )
* [ [ 2 , 1 ] , 2 , 3 , [ 2 , 4 ] , 5 , 1]
* > > > updated _ run _ length _ encoding ( ' automatically ' )
* [ ' a ' , ' u ' , ' t ' , ' o ' , ' m ' , ' a ' , ' t ' , ' i ' , ' c ' , ' a ' , [ 2 , ' l ' ] , ' y ' ]
* > > > updated _ run _ length _ encoding ( ' python ' )
* [ ' p ' , ' y ' , ' t ' , ' h ' , ' o ' , ' n ' ] */
public static List < Object > updatedRunLengthEncoding ( List < Object > inputList ) { } } | List < Object > resultList = new ArrayList < > ( ) ; int count = 1 ; for ( int i = 1 ; i <= inputList . size ( ) ; i ++ ) { if ( i < inputList . size ( ) && inputList . get ( i ) . equals ( inputList . get ( i - 1 ) ) ) { count ++ ; } else if ( count > 1 ) { List < Object > temp = new ArrayList < > ( ) ; temp . add ( count ) ; temp . add ( inputList . get ( i - 1 ) ) ; resultList . add ( temp ) ; count = 1 ; } else { resultList . add ( inputList . get ( i - 1 ) ) ; } } return resultList ; |
public class AbstractListenerLmlTag { /** * Invoked after template parsing . Hooks up the listener to actors registered by " attachTo " attribute .
* @ param parser parsed the template .
* @ param parsingResult parsed actors .
* @ return { @ link LmlParserListener # REMOVE } by default . See { @ link # setKeepListener ( boolean ) } . */
@ Override public boolean onEvent ( final LmlParser parser , final Array < Actor > parsingResult ) { } } | final ObjectMap < String , Actor > actorsByIds = parser . getActorsMappedByIds ( ) ; for ( final String id : ids ) { final Actor actor = actorsByIds . get ( id ) ; if ( actor != null ) { attachListener ( actor ) ; } else if ( ! keep ) { parser . throwErrorIfStrict ( "Unknown ID: '" + id + "'. Cannot attach listener." ) ; } } return keep ; |
public class BTreeIndex { /** * Processes a Page Split result . The first split page will replace the existing page , while the remaining pages
* will need to be inserted as children into the parent .
* @ param splitResult The result of the original BTreePage ' s splitIfNecessary ( ) call .
* @ param context Processing context . */
private void processSplitPage ( List < BTreePage > splitResult , PageModificationContext context ) { } } | PageWrapper originalPage = context . getPageWrapper ( ) ; for ( int i = 0 ; i < splitResult . size ( ) ; i ++ ) { val page = splitResult . get ( i ) ; ByteArraySegment newPageKey ; long newOffset ; long minOffset ; PageWrapper processedPage ; if ( i == 0 ) { // The original page will be replaced by the first split . Nothing changes about its pointer key .
originalPage . setPage ( page ) ; newPageKey = originalPage . getPageKey ( ) ; context . getPageCollection ( ) . complete ( originalPage ) ; processedPage = originalPage ; } else { // Insert the new pages and assign them new virtual offsets . Each page will use its first
// Key as a Page Key .
newPageKey = page . getKeyAt ( 0 ) ; processedPage = PageWrapper . wrapNew ( page , originalPage . getParent ( ) , new PagePointer ( newPageKey , PagePointer . NO_OFFSET , page . getLength ( ) ) ) ; context . getPageCollection ( ) . insert ( processedPage ) ; context . getPageCollection ( ) . complete ( processedPage ) ; } // Fetch new offset , and update minimum offsets .
newOffset = processedPage . getOffset ( ) ; minOffset = calculateMinOffset ( processedPage ) ; processedPage . setMinOffset ( minOffset ) ; // Record changes .
context . updatePagePointer ( new PagePointer ( newPageKey , newOffset , page . getLength ( ) , minOffset ) ) ; } |
public class CTFileQueryBond { /** * Create a CTFileQueryBond of the specified type ( from the MDL spec ) . The
* bond copies the atoms and sets the type using the value ' type ' , 5 = single
* or double , 8 = any , etc .
* @ param bond an existing bond
* @ param type the specified type
* @ return a new CTFileQueryBond */
public static CTFileQueryBond ofType ( IBond bond , int type ) { } } | CTFileQueryBond queryBond = new CTFileQueryBond ( bond . getBuilder ( ) ) ; queryBond . setOrder ( Order . UNSET ) ; queryBond . setAtoms ( new IAtom [ ] { bond . getBegin ( ) , bond . getEnd ( ) } ) ; switch ( type ) { case 1 : queryBond . setType ( Type . SINGLE ) ; break ; case 2 : queryBond . setType ( Type . DOUBLE ) ; break ; case 3 : queryBond . setType ( Type . TRIPLE ) ; break ; case 4 : queryBond . setType ( Type . AROMATIC ) ; break ; case 5 : queryBond . setType ( Type . SINGLE_OR_DOUBLE ) ; break ; case 6 : queryBond . setType ( Type . SINGLE_OR_AROMATIC ) ; break ; case 7 : queryBond . setType ( Type . DOUBLE_OR_AROMATIC ) ; break ; case 8 : queryBond . setType ( Type . ANY ) ; break ; default : throw new IllegalArgumentException ( "Unknown bond type: " + type ) ; } return queryBond ; |
public class RedisMonitor { /** * 内存信息
* @ return */
public static JSONArray monitorForMemory ( ) { } } | Map < String , Cache > CACHE = Redis . unmodifiableCache ( ) ; JSONArray monitors = new JSONArray ( ) ; if ( CACHE == null || CACHE . isEmpty ( ) ) return monitors ; JSONObject monitor = new JSONObject ( ) ; monitor . put ( "application" , EnvUtil . getApplication ( ) ) ; monitor . put ( "nodeId" , LocalNodeManager . LOCAL_NODE_ID ) ; for ( Map . Entry < String , Cache > entry : CACHE . entrySet ( ) ) { Cache cache = entry . getValue ( ) ; if ( cache == null ) continue ; try ( Jedis jedis = cache . getResource ( ) ) { monitor . put ( "instance" , cache . getName ( ) ) ; String info = ServerOperate . info ( jedis , "memory" ) ; double used_memory = Double . parseDouble ( ServerOperate . getAttributeInInfo ( info , "used_memory" ) ) ; // Redis 分配的内存总量
monitors . add ( new JSONObject ( ) { { putAll ( monitor ) ; put ( "value" , used_memory / 1024 / 1024 ) ; // N / 1024 ( KB ) / 1024 ( MB )
put ( "name" , "UsedMemory" ) ; } } ) ; // double used _ memory _ peak = Double . parseDouble ( ServerOperate . getAttributeInInfo ( info , " used _ memory _ peak " ) ) ; / / Redis 的内存消耗峰值
// monitors . add ( new JSONObject ( ) { {
// putAll ( monitor ) ;
// put ( " value " , used _ memory _ peak / 1024 / 1024 ) ;
// put ( " name " , " UsedMemoryPeak " ) ;
} catch ( Exception e ) { LOG . error ( String . format ( "Jedis Pool: %s (Grafana) monitor 出现异常" , entry . getKey ( ) ) , e ) ; } } return monitors ; |
public class GroupReduceFunction { /** * The combine methods pre - reduces elements . It may be called on subsets of the data
* before the actual reduce function . This is often helpful to lower data volume prior
* to reorganizing the data in an expensive way , as might be required for the final
* reduce function .
* This method is only ever invoked when the subclass of { @ link GroupReduceFunction }
* adds the { @ link Combinable } annotation , or if the < i > combinable < / i > flag is set when defining
* the < i > reduceGroup < i > operation via
* { @ link eu . stratosphere . api . java . operators . ReduceGroupOperator # setCombinable ( boolean ) } .
* Since the reduce function will be called on the result of this method , it is important that this
* method returns the same data type as it consumes . By default , this method only calls the
* { @ link # reduce ( Iterator , Collector ) } method . If the behavior in the pre - reducing is different
* from the final reduce function ( for example because the reduce function changes the data type ) ,
* this method must be overwritten , or the execution will fail .
* @ param values The iterator returning the group of values to be reduced .
* @ param out The collector to emit the returned values .
* @ throws Exception This method may throw exceptions . Throwing an exception will cause the operation
* to fail and may trigger recovery . */
@ Override public void combine ( Iterator < IN > values , Collector < IN > out ) throws Exception { } } | @ SuppressWarnings ( "unchecked" ) Collector < OUT > c = ( Collector < OUT > ) out ; reduce ( values , c ) ; |
public class MousePlugin { /** * Method called when mouse down occur on the element .
* You should not override this method . Instead , override { @ link # mouseStart ( Element , GqEvent ) }
* method . */
protected boolean mouseDown ( Element element , GqEvent event ) { } } | // test if an other plugin handle the mouseStart
if ( isEventAlreadyHandled ( event ) ) { return false ; } if ( started ) { // case where we missed a mouseup
mouseUp ( element , event ) ; } // calculate all interesting variables
reset ( event ) ; if ( notHandleMouseDown ( element , event ) ) { return true ; } if ( delayConditionMet ( ) && distanceConditionMet ( event ) ) { started = mouseStart ( element , event ) ; if ( ! started ) { event . getOriginalEvent ( ) . preventDefault ( ) ; return true ; } } bindOtherEvents ( element ) ; if ( ! touchSupported ) { // click event are not triggered if we call preventDefault on touchstart event .
event . getOriginalEvent ( ) . preventDefault ( ) ; } markEventAsHandled ( event ) ; return true ; |
public class ClosureRewriteModule { /** * In module " foo . Bar " , rewrite " exports = Bar " to " var module $ exports $ foo $ Bar = Bar " . */
private void maybeUpdateExportDeclaration ( NodeTraversal t , Node n ) { } } | if ( ! currentScript . isModule || ! n . getString ( ) . equals ( "exports" ) || ! isAssignTarget ( n ) ) { return ; } Node assignNode = n . getParent ( ) ; if ( ! currentScript . declareLegacyNamespace && currentScript . defaultExportLocalName != null ) { assignNode . getParent ( ) . detach ( ) ; return ; } // Rewrite " exports = . . . " as " var module $ exports $ foo $ Bar = . . . "
Node rhs = assignNode . getLastChild ( ) ; Node jsdocNode ; if ( currentScript . declareLegacyNamespace ) { Node legacyQname = NodeUtil . newQName ( compiler , currentScript . legacyNamespace ) . srcrefTree ( n ) ; assignNode . replaceChild ( n , legacyQname ) ; jsdocNode = assignNode ; } else { rhs . detach ( ) ; Node exprResultNode = assignNode . getParent ( ) ; Node binaryNamespaceName = IR . name ( currentScript . getBinaryNamespace ( ) ) ; binaryNamespaceName . setOriginalName ( currentScript . legacyNamespace ) ; Node exportsObjectCreationNode = IR . var ( binaryNamespaceName , rhs ) ; exportsObjectCreationNode . useSourceInfoIfMissingFromForTree ( exprResultNode ) ; exportsObjectCreationNode . putBooleanProp ( Node . IS_NAMESPACE , true ) ; exprResultNode . replaceWith ( exportsObjectCreationNode ) ; jsdocNode = exportsObjectCreationNode ; currentScript . hasCreatedExportObject = true ; } markConstAndCopyJsDoc ( assignNode , jsdocNode ) ; compiler . reportChangeToEnclosingScope ( jsdocNode ) ; maybeUpdateExportObjectLiteral ( t , rhs ) ; return ; |
public class Shutterbug { /** * To be used when screen shooting the page
* and need to scroll while making screen shots , either vertically or
* horizontally or both directions ( Chrome ) .
* @ param driver WebDriver instance
* @ param scroll ScrollStrategy How you need to scroll
* @ param useDevicePixelRatio whether or not take into account device pixel ratio
* @ return PageSnapshot instance */
public static PageSnapshot shootPage ( WebDriver driver , ScrollStrategy scroll , boolean useDevicePixelRatio ) { } } | return shootPage ( driver , scroll , 0 , useDevicePixelRatio ) ; |
public class ColumnWithIdComparator { /** * Compares two columns given by their names .
* @ param objA The name of the first column
* @ param objB The name of the second column
* @ return
* @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */
public int compare ( Object objA , Object objB ) { } } | String idAStr = _table . getColumn ( ( String ) objA ) . getProperty ( "id" ) ; String idBStr = _table . getColumn ( ( String ) objB ) . getProperty ( "id" ) ; int idA ; int idB ; try { idA = Integer . parseInt ( idAStr ) ; } catch ( Exception ex ) { return 1 ; } try { idB = Integer . parseInt ( idBStr ) ; } catch ( Exception ex ) { return - 1 ; } return idA < idB ? - 1 : ( idA > idB ? 1 : 0 ) ; |
public class Region { /** * The Availability Zones for databases . Follows the format < code > us - east - 2a < / code > ( case - sensitive ) .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRelationalDatabaseAvailabilityZones ( java . util . Collection ) } or
* { @ link # withRelationalDatabaseAvailabilityZones ( java . util . Collection ) } if you want to override the existing
* values .
* @ param relationalDatabaseAvailabilityZones
* The Availability Zones for databases . Follows the format < code > us - east - 2a < / code > ( case - sensitive ) .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Region withRelationalDatabaseAvailabilityZones ( AvailabilityZone ... relationalDatabaseAvailabilityZones ) { } } | if ( this . relationalDatabaseAvailabilityZones == null ) { setRelationalDatabaseAvailabilityZones ( new java . util . ArrayList < AvailabilityZone > ( relationalDatabaseAvailabilityZones . length ) ) ; } for ( AvailabilityZone ele : relationalDatabaseAvailabilityZones ) { this . relationalDatabaseAvailabilityZones . add ( ele ) ; } return this ; |
public class Transforms { /** * 1 if less than or equal to 0 otherwise ( at each element )
* @ param first
* @ param ndArray
* @ return */
public static INDArray lessThanOrEqual ( INDArray first , INDArray ndArray ) { } } | return lessThanOrEqual ( first , ndArray , true ) ; |
public class DefaultErrorHandler { /** * Returns TRUE in case status code of response starts with 4 or 5 */
@ Override public boolean hasError ( Response < ByteSource > rs ) { } } | StatusType statusType = StatusType . valueOf ( rs . getStatus ( ) ) ; return ( statusType == StatusType . CLIENT_ERROR || statusType == StatusType . SERVER_ERROR ) ; |
public class SslHandler { /** * Notify all the handshake futures about the successfully handshake */
private void setHandshakeSuccess ( ) { } } | handshakePromise . trySuccess ( ctx . channel ( ) ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "{} HANDSHAKEN: {}" , ctx . channel ( ) , engine . getSession ( ) . getCipherSuite ( ) ) ; } ctx . fireUserEventTriggered ( SslHandshakeCompletionEvent . SUCCESS ) ; if ( readDuringHandshake && ! ctx . channel ( ) . config ( ) . isAutoRead ( ) ) { readDuringHandshake = false ; ctx . read ( ) ; } |
public class DescribeSpotPriceHistoryRequest { /** * Filters the results by the specified instance types .
* @ param instanceTypes
* Filters the results by the specified instance types .
* @ see InstanceType */
public void setInstanceTypes ( java . util . Collection < String > instanceTypes ) { } } | if ( instanceTypes == null ) { this . instanceTypes = null ; return ; } this . instanceTypes = new com . amazonaws . internal . SdkInternalList < String > ( instanceTypes ) ; |
public class ProtocolDecoderException { /** * Returns the message and the hexdump of the unknown part . */
@ Override public String getMessage ( ) { } } | String message = super . getMessage ( ) ; if ( message == null ) { message = "" ; } if ( hexdump != null ) { return message + ( message . length ( ) > 0 ? " " : "" ) + "(Hexdump: " + hexdump + ')' ; } return message ; |
public class DirectoryConfig { /** * The distinguished names of the organizational units for computer accounts .
* @ param organizationalUnitDistinguishedNames
* The distinguished names of the organizational units for computer accounts . */
public void setOrganizationalUnitDistinguishedNames ( java . util . Collection < String > organizationalUnitDistinguishedNames ) { } } | if ( organizationalUnitDistinguishedNames == null ) { this . organizationalUnitDistinguishedNames = null ; return ; } this . organizationalUnitDistinguishedNames = new java . util . ArrayList < String > ( organizationalUnitDistinguishedNames ) ; |
public class Reflecter { /** * Populate the JavaBeans properties of this delegate object , based on the specified name / value pairs
* @ param properties
* @ return */
public < V > Reflecter < T > populate ( Map < String , V > properties , String ... excludes ) { } } | return populate ( properties , Arrays . asList ( excludes ) ) ; |
public class AttributeQualifierImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setLevNum ( Integer newLevNum ) { } } | Integer oldLevNum = levNum ; levNum = newLevNum ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . ATTRIBUTE_QUALIFIER__LEV_NUM , oldLevNum , levNum ) ) ; |
public class AstaDatabaseReader { /** * Select the project properties row from the database .
* @ throws SQLException */
private void processProjectProperties ( ) throws SQLException { } } | List < Row > rows = getRows ( "select * from project_summary where projid=?" , m_projectID ) ; if ( rows . isEmpty ( ) == false ) { m_reader . processProjectProperties ( rows . get ( 0 ) ) ; } |
public class OrmDescriptorImpl { /** * Adds a new namespace
* @ return the current instance of < code > OrmDescriptor < / code > */
public OrmDescriptor addNamespace ( String name , String value ) { } } | model . attribute ( name , value ) ; return this ; |
public class FatLfnDirectory { /** * { @ inheritDoc }
* < / p > < p >
* According to the FAT file system specification , leading and trailing
* spaces in the { @ code name } are ignored by this method .
* @ param name { @ inheritDoc }
* @ return { @ inheritDoc } */
@ Override public FatLfnDirectoryEntry getEntry ( String name ) { } } | name = name . trim ( ) . toLowerCase ( Locale . ROOT ) ; final FatLfnDirectoryEntry entry = longNameIndex . get ( name ) ; if ( entry == null ) { if ( ! ShortName . canConvert ( name ) ) return null ; return shortNameIndex . get ( ShortName . get ( name ) ) ; } else { return entry ; } |
public class WebMvcTags { /** * Creates a { @ code method } tag based on the { @ link HttpServletRequest # getMethod ( )
* method } of the given { @ code request } .
* @ param request the request
* @ return the method tag whose value is a capitalized method ( e . g . GET ) . */
public static Tag method ( @ Nullable HttpServletRequest request ) { } } | return request == null ? METHOD_UNKNOWN : Tag . of ( "method" , request . getMethod ( ) ) ; |
public class XIncProcXIncludeFilter { @ Override public void startDTD ( final String name , final String publicId , final String systemId ) throws SAXException { } } | LOG . trace ( "startDTD:{},{},{}" , name , publicId , systemId ) ; this . inDTD = true ; if ( ofNullable ( this . lexicalHandler ) . isPresent ( ) ) { this . lexicalHandler . startDTD ( name , publicId , systemId ) ; } this . context . setDocType ( name , publicId , systemId ) ; |
public class AliasFactory { /** * Create a proxy instance for the given class and path
* @ param < A >
* @ param cl type of the proxy
* @ param path underlying expression
* @ return proxy instance */
@ SuppressWarnings ( "unchecked" ) protected < A > A createProxy ( Class < A > cl , Expression < ? > path ) { } } | Enhancer enhancer = new Enhancer ( ) ; enhancer . setClassLoader ( AliasFactory . class . getClassLoader ( ) ) ; if ( cl . isInterface ( ) ) { enhancer . setInterfaces ( new Class < ? > [ ] { cl , ManagedObject . class } ) ; } else { enhancer . setSuperclass ( cl ) ; enhancer . setInterfaces ( new Class < ? > [ ] { ManagedObject . class } ) ; } // creates one handler per proxy
MethodInterceptor handler = new PropertyAccessInvocationHandler ( path , this , pathFactory , typeSystem ) ; enhancer . setCallback ( handler ) ; return ( A ) enhancer . create ( ) ; |
public class FindBugsCommandLine { /** * Load given project file .
* @ param arg
* name of project file
* @ throws java . io . IOException */
public void loadProject ( String arg ) throws IOException { } } | Project newProject = Project . readProject ( arg ) ; newProject . setConfiguration ( project . getConfiguration ( ) ) ; project = newProject ; projectLoadedFromFile = true ; |
public class LinkedOptionalMap { /** * Tries to merges the keys and the values of @ right into @ left . */
public static < K , V > MergeResult < K , V > mergeRightIntoLeft ( LinkedOptionalMap < K , V > left , LinkedOptionalMap < K , V > right ) { } } | LinkedOptionalMap < K , V > merged = new LinkedOptionalMap < > ( left ) ; merged . putAll ( right ) ; return new MergeResult < > ( merged , isLeftPrefixOfRight ( left , right ) ) ; |
public class VectorTile { /** * Return the current status of this VectorTile . Can be one of the following :
* < ul >
* < li > STATUS . EMPTY < / li >
* < li > STATUS . LOADING < / li >
* < li > STATUS . LOADED < / li >
* < / ul >
* @ return status */
public STATUS getStatus ( ) { } } | if ( featureContent . isLoaded ( ) ) { return STATUS . LOADED ; } if ( deferred == null ) { return STATUS . EMPTY ; } return STATUS . LOADING ; |
public class ListUtils { /** * 生成一个 { @ link Vector }
* @ param values 值数组
* @ param capacity 初始化长度
* @ param < T > 值类型
* @ return { @ link Vector }
* @ since 1.0.9 */
public static < T > Vector < T > getVector ( int capacity , T ... values ) { } } | return new Vector < T > ( ) { { addAll ( Arrays . asList ( values ) ) ; } } ; |
public class VdmPluginImages { /** * Returns the image descriptor for the given key in this registry . Might be called in a non - UI thread .
* @ param key
* the image ' s key
* @ return the image descriptor for the given key */
public static ImageDescriptor getDescriptor ( String key ) { } } | if ( fgImageRegistry == null ) { return fgAvoidSWTErrorMap . get ( key ) ; } return getImageRegistry ( ) . getDescriptor ( key ) ; |
public class CacheProxy { /** * Returns a deep copy of the map if value - based caching is enabled .
* @ param map the mapping of keys to expirable values
* @ return a deep or shallow copy of the mappings depending on the store by value setting */
protected final Map < K , V > copyMap ( Map < K , Expirable < V > > map ) { } } | ClassLoader classLoader = cacheManager . getClassLoader ( ) ; return map . entrySet ( ) . stream ( ) . collect ( toMap ( entry -> copier . copy ( entry . getKey ( ) , classLoader ) , entry -> copier . copy ( entry . getValue ( ) . get ( ) , classLoader ) ) ) ; |
public class NotificationBoard { /** * Add footer view .
* @ param view
* @ param index
* @ param lp */
public void addFooterView ( View view , int index , ViewGroup . LayoutParams lp ) { } } | mFooter . addView ( view , index , lp ) ; |
public class MergeableManifest2 { /** * Add the list with given bundles to the " Export - Package " main attribute .
* @ param exportedPackages The list of all packages to add . */
public void addExportedPackages ( String ... exportedPackages ) { } } | String oldBundles = mainAttributes . get ( EXPORT_PACKAGE ) ; if ( oldBundles == null ) oldBundles = "" ; BundleList oldResultList = BundleList . fromInput ( oldBundles , newline ) ; BundleList resultList = BundleList . fromInput ( oldBundles , newline ) ; for ( String bundle : exportedPackages ) resultList . mergeInto ( Bundle . fromInput ( bundle ) ) ; String result = resultList . toString ( ) ; boolean changed = ! oldResultList . toString ( ) . equals ( result ) ; modified |= changed ; if ( changed ) mainAttributes . put ( EXPORT_PACKAGE , result ) ; |
public class ZooKeeperStateHandleStore { /** * Releases all lock nodes of this ZooKeeperStateHandleStores and tries to remove all state nodes which
* are not locked anymore .
* < p > The delete operation is executed asynchronously
* @ throws Exception if the delete operation fails */
public void releaseAndTryRemoveAll ( ) throws Exception { } } | Collection < String > children = getAllPaths ( ) ; Exception exception = null ; for ( String child : children ) { try { releaseAndTryRemove ( '/' + child ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } } if ( exception != null ) { throw new Exception ( "Could not properly release and try removing all state nodes." , exception ) ; } |
public class TelegramBot { /** * This allows you to edit the text of a message you have already sent previously
* @ param chatId The chat ID of the chat containing the message you want to edit
* @ param messageId The message ID of the message you want to edit
* @ param text The new text you want to display
* @ param parseMode The ParseMode that should be used with this new text
* @ param disableWebPagePreview Whether any URLs should be displayed with a preview of their content
* @ param inlineReplyMarkup Any InlineReplyMarkup object you want to edit into the message
* @ return A new Message object representing the edited message */
public Message editMessageText ( String chatId , Long messageId , String text , ParseMode parseMode , boolean disableWebPagePreview , InlineReplyMarkup inlineReplyMarkup ) { } } | if ( chatId != null && messageId != null && text != null ) { JSONObject jsonResponse = this . editMessageText ( chatId , messageId , null , text , parseMode , disableWebPagePreview , inlineReplyMarkup ) ; if ( jsonResponse != null ) { return MessageImpl . createMessage ( jsonResponse . getJSONObject ( "result" ) , this ) ; } } return null ; |
public class FileManagerImpl { /** * / * Split a block on disk and return the appropriate address . We assume
* that this is only called to split a block on the misc list . */
private long split_block ( long block_addr , int request_size , int rem ) throws IOException { } } | allocated_words += request_size ; allocated_blocks ++ ; free_words -= request_size ; ml_hits ++ ; ml_splits ++ ; seek_and_count ( block_addr + request_size ) ; writeInt ( rem ) ; seek_and_count ( block_addr ) ; writeInt ( - request_size ) ; return ( block_addr + HDR_SIZE ) ; |
public class BlockingArrayQueue { @ SuppressWarnings ( "unchecked" ) @ Override public E get ( int index ) { } } | _tailLock . lock ( ) ; try { _headLock . lock ( ) ; try { if ( index < 0 || index >= _size . get ( ) ) throw new IndexOutOfBoundsException ( "!(" + 0 + "<" + index + "<=" + _size + ")" ) ; int i = _indexes [ HEAD_OFFSET ] + index ; int capacity = _elements . length ; if ( i >= capacity ) i -= capacity ; return ( E ) _elements [ i ] ; } finally { _headLock . unlock ( ) ; } } finally { _tailLock . unlock ( ) ; } |
public class SqlEntityQueryImpl { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . fluent . SqlEntityQuery # first ( ) */
@ Override public Optional < E > first ( ) { } } | try ( Stream < E > stream = stream ( ) ) { return stream . findFirst ( ) ; } |
public class WikiParser { /** * Finds first closing ' } } } ' for nowiki block or span .
* Skips escaped sequences : ' ~ } } } ' .
* @ param startBlock points to first char after ' { { { '
* @ return position of first ' } ' in closing ' } } } ' */
private int findEndOfNowiki ( int startBlock ) { } } | // NOTE : this method could step back one char from startBlock position
int endBlock = startBlock - 3 ; do { endBlock = wikiText . indexOf ( "}}}" , endBlock + 3 ) ; if ( endBlock < 0 ) return wikiLength ; // no matching ' } } } ' found
while ( endBlock + 3 < wikiLength && wikiChars [ endBlock + 3 ] == '}' ) endBlock ++ ; // shift to end of sequence of more than 3x ' } ' ( eg . ' } } } } } ' )
} while ( wikiChars [ endBlock - 1 ] == '~' ) ; return endBlock ; |
public class ApplicationServiceClient { /** * Creates a new application entity .
* < p > Sample code :
* < pre > < code >
* try ( ApplicationServiceClient applicationServiceClient = ApplicationServiceClient . create ( ) ) {
* ProfileName parent = ProfileName . of ( " [ PROJECT ] " , " [ TENANT ] " , " [ PROFILE ] " ) ;
* Application application = Application . newBuilder ( ) . build ( ) ;
* Application response = applicationServiceClient . createApplication ( parent , application ) ;
* < / code > < / pre >
* @ param parent Required .
* < p > Resource name of the profile under which the application is created .
* < p > The format is " projects / { project _ id } / tenants / { tenant _ id } / profiles / { profile _ id } " , for
* example , " projects / test - project / tenants / test - tenant / profiles / test - profile " .
* @ param application Required .
* < p > The application to be created .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Application createApplication ( ProfileName parent , Application application ) { } } | CreateApplicationRequest request = CreateApplicationRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setApplication ( application ) . build ( ) ; return createApplication ( request ) ; |
public class CommerceDiscountRelLocalServiceBaseImpl { /** * Creates a new commerce discount rel with the primary key . Does not add the commerce discount rel to the database .
* @ param commerceDiscountRelId the primary key for the new commerce discount rel
* @ return the new commerce discount rel */
@ Override @ Transactional ( enabled = false ) public CommerceDiscountRel createCommerceDiscountRel ( long commerceDiscountRelId ) { } } | return commerceDiscountRelPersistence . create ( commerceDiscountRelId ) ; |
public class LinkArgs { /** * Custom properties that my be used by application - specific markup builders or processors .
* @ param key Property key
* @ param value Property value
* @ return this */
public @ NotNull LinkArgs property ( String key , Object value ) { } } | if ( key == null ) { throw new IllegalArgumentException ( "Key argument must not be null." ) ; } getProperties ( ) . put ( key , value ) ; return this ; |
public class DefaultIncrementalAttributesMapper { /** * Lookup all values for the specified attribute , looping through the results incrementally if necessary .
* @ param ldapOperations The instance to use for performing the actual lookup .
* @ param dn The distinguished name of the object to find .
* @ param attribute name of the attribute to request .
* @ return an Attributes instance , populated with all found values for the requested attribute .
* Never < code > null < / code > , though the actual attribute may not be set if it was not
* set on the requested object . */
public static Attributes lookupAttributes ( LdapOperations ldapOperations , Name dn , String attribute ) { } } | return lookupAttributes ( ldapOperations , dn , new String [ ] { attribute } ) ; |
public class device_profile { /** * < pre >
* Performs generic data validation for the operation to be performed
* < / pre > */
protected void validate ( String operationType ) throws Exception { } } | super . validate ( operationType ) ; MPSString id_validator = new MPSString ( ) ; id_validator . setConstraintIsReq ( MPSConstants . DELETE_CONSTRAINT , true ) ; id_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; id_validator . validate ( operationType , id , "\"id\"" ) ; MPSString name_validator = new MPSString ( ) ; name_validator . setConstraintCharSetRegEx ( MPSConstants . GENERIC_CONSTRAINT , "[ a-zA-Z0-9_#.:@=-]+" ) ; name_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; name_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; name_validator . setConstraintIsReq ( MPSConstants . ADD_CONSTRAINT , true ) ; name_validator . validate ( operationType , name , "\"name\"" ) ; MPSString type_validator = new MPSString ( ) ; type_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 128 ) ; type_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; type_validator . validate ( operationType , type , "\"type\"" ) ; MPSBoolean is_default_validator = new MPSBoolean ( ) ; is_default_validator . validate ( operationType , is_default , "\"is_default\"" ) ; MPSString username_validator = new MPSString ( ) ; username_validator . setConstraintCharSetRegEx ( MPSConstants . GENERIC_CONSTRAINT , "[ a-zA-Z0-9_#.:@=-]+" ) ; username_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 127 ) ; username_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; username_validator . setConstraintIsReq ( MPSConstants . ADD_CONSTRAINT , true ) ; username_validator . validate ( operationType , username , "\"username\"" ) ; MPSString password_validator = new MPSString ( ) ; password_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 127 ) ; password_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; password_validator . setConstraintIsReq ( MPSConstants . ADD_CONSTRAINT , true ) ; password_validator . validate ( operationType , password , "\"password\"" ) ; MPSString snmpversion_validator = new MPSString ( ) ; snmpversion_validator . validate ( operationType , snmpversion , "\"snmpversion\"" ) ; MPSString snmpcommunity_validator = new MPSString ( ) ; snmpcommunity_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 31 ) ; snmpcommunity_validator . validate ( operationType , snmpcommunity , "\"snmpcommunity\"" ) ; MPSString snmpsecurityname_validator = new MPSString ( ) ; snmpsecurityname_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 31 ) ; snmpsecurityname_validator . validate ( operationType , snmpsecurityname , "\"snmpsecurityname\"" ) ; MPSString snmpsecuritylevel_validator = new MPSString ( ) ; snmpsecuritylevel_validator . validate ( operationType , snmpsecuritylevel , "\"snmpsecuritylevel\"" ) ; MPSString snmpauthprotocol_validator = new MPSString ( ) ; snmpauthprotocol_validator . validate ( operationType , snmpauthprotocol , "\"snmpauthprotocol\"" ) ; MPSString snmpauthpassword_validator = new MPSString ( ) ; snmpauthpassword_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 31 ) ; snmpauthpassword_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 8 ) ; snmpauthpassword_validator . validate ( operationType , snmpauthpassword , "\"snmpauthpassword\"" ) ; MPSString snmpprivprotocol_validator = new MPSString ( ) ; snmpprivprotocol_validator . validate ( operationType , snmpprivprotocol , "\"snmpprivprotocol\"" ) ; MPSString snmpprivpassword_validator = new MPSString ( ) ; snmpprivpassword_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 31 ) ; snmpprivpassword_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 8 ) ; snmpprivpassword_validator . validate ( operationType , snmpprivpassword , "\"snmpprivpassword\"" ) ; |
public class EmbeddedJCA { /** * Deploy
* @ param cl The class loader
* @ param name The resource name
* @ exception Throwable If an error occurs */
private void deploy ( ClassLoader cl , String name ) throws Throwable { } } | if ( cl == null ) throw new IllegalArgumentException ( "ClassLoader is null" ) ; if ( name == null ) throw new IllegalArgumentException ( "Name is null" ) ; URL url = cl . getResource ( name ) ; if ( url == null ) throw new IllegalArgumentException ( "Resource is null" ) ; log . debugf ( "Deploying: %s" , url ) ; kernel . getMainDeployer ( ) . deploy ( url ) ; |
public class ClassFileVersion { /** * Finds the highest class file version that is compatible to the current JVM version . Prior to Java 9 , this is achieved
* by parsing the { @ code java . version } property which is provided by { @ link java . lang . System # getProperty ( String ) } . If the system
* property is not available , the { @ code fallback } version is returned .
* @ param fallback The version to fallback to if locating a class file version is not possible .
* @ return The currently running Java process ' s class file version or the fallback if locating this version is impossible . */
@ SuppressFBWarnings ( value = "REC_CATCH_EXCEPTION" , justification = "Exception should not be rethrown but trigger a fallback" ) public static ClassFileVersion ofThisVm ( ClassFileVersion fallback ) { } } | try { return ofThisVm ( ) ; } catch ( Exception ignored ) { return fallback ; } |
public class PoolEvaluateAutoScaleOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly .
* @ param ocpDate the ocpDate value to set
* @ return the PoolEvaluateAutoScaleOptions object itself . */
public PoolEvaluateAutoScaleOptions withOcpDate ( DateTime ocpDate ) { } } | if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ; |
public class FrameReadProcessor { /** * Finish building the current frame : process its payload and pass it to the Stream Processor
* @ throws ProtocolException */
public void processCompleteFrame ( ) throws Http2Exception { } } | Frame currentFrame = getCurrentFrame ( ) ; boolean frameSizeError = false ; try { currentFrame . processPayload ( this ) ; } catch ( Http2Exception e ) { // If we get an error here , it should be safe to assume that this frame doesn ' t have the expected byte count ,
// which must be treated as an error of type FRAME _ SIZE _ ERROR . If we ' re processing a DATA or PRIORITY frame , then
// we can treat the error as a stream error rather than a connection error .
if ( ! e . isConnectionError ( ) ) { frameSizeError = true ; } else { // this is a connection error ; we need to send a GOAWAY on the connection
throw e ; } } catch ( Exception e ) { throw new ProtocolException ( "Error processing the payload for " + currentFrame . getFrameType ( ) + " frame on stream " + currentFrame . getStreamId ( ) ) ; } // call the stream processor to process this stream . For now , don ' t return from here until the
// frame has been fully processed .
int streamId = currentFrame . getStreamId ( ) ; H2StreamProcessor stream = muxLink . getStream ( streamId ) ; if ( stream == null ) { if ( ( streamId != 0 ) && ( streamId % 2 == 0 ) ) { if ( currentFrame . getFrameType ( ) . equals ( FrameTypes . PRIORITY ) ) { // ignore PRIORITY frames in any state
return ; } else if ( currentFrame . getFrameType ( ) . equals ( FrameTypes . RST_STREAM ) && streamId < muxLink . getHighestClientStreamId ( ) ) { // tolerate RST _ STREAM frames that are sent on closed push streams
return ; } else { throw new ProtocolException ( "Cannot start a stream from the client with an even numbered ID. stream-id: " + streamId ) ; } } else { stream = startNewInboundSession ( streamId ) ; } } if ( frameSizeError ) { currentFrame = new FrameRstStream ( streamId , 4 , ( byte ) 0 , false , FrameDirection . READ ) ; ( ( FrameRstStream ) currentFrame ) . setErrorCode ( Constants . FRAME_SIZE_ERROR ) ; } stream . processNextFrame ( currentFrame , Direction . READ_IN ) ; |
public class CapacityMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Capacity capacity , ProtocolMarshaller protocolMarshaller ) { } } | if ( capacity == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( capacity . getReadCapacityUnits ( ) , READCAPACITYUNITS_BINDING ) ; protocolMarshaller . marshall ( capacity . getWriteCapacityUnits ( ) , WRITECAPACITYUNITS_BINDING ) ; protocolMarshaller . marshall ( capacity . getCapacityUnits ( ) , CAPACITYUNITS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Period { /** * Returns a new period with the specified number of years .
* This period instance is immutable and unaffected by this method call .
* @ param years the amount of years to add , may be negative
* @ return the new period with the increased years
* @ throws UnsupportedOperationException if the field is not supported */
public Period withYears ( int years ) { } } | int [ ] values = getValues ( ) ; // cloned
getPeriodType ( ) . setIndexedField ( this , PeriodType . YEAR_INDEX , values , years ) ; return new Period ( values , getPeriodType ( ) ) ; |
public class MetadataService { /** * Removes the specified { @ link Token } from the specified { @ code projectName } . It also removes
* every token permission belonging to the { @ link Token } from every { @ link RepositoryMetadata } . */
public CompletableFuture < Revision > removeToken ( Author author , String projectName , Token token ) { } } | return removeToken ( author , projectName , requireNonNull ( token , "token" ) . appId ( ) ) ; |
public class ZWaveNode { /** * Encapsulates a serial message for sending to a
* multi - instance instance / multi - channel endpoint on
* a node .
* @ param serialMessage the serial message to encapsulate
* @ param commandClass the command class used to generate the message .
* @ param endpointId the instance / endpoint to encapsulate the message for
* @ param node the destination node .
* @ return SerialMessage on success , null on failure . */
public SerialMessage encapsulate ( SerialMessage serialMessage , ZWaveCommandClass commandClass , int endpointId ) { } } | ZWaveMultiInstanceCommandClass multiInstanceCommandClass ; if ( serialMessage == null ) return null ; // no encapsulation necessary .
if ( endpointId == 1 && commandClass . getInstances ( ) == 1 && commandClass . getEndpoint ( ) == null ) return serialMessage ; multiInstanceCommandClass = ( ZWaveMultiInstanceCommandClass ) this . getCommandClass ( ZWaveCommandClass . CommandClass . MULTI_INSTANCE ) ; if ( multiInstanceCommandClass != null ) { logger . debug ( "Encapsulating message for node {}, instance / endpoint {}" , this . getNodeId ( ) , endpointId ) ; switch ( multiInstanceCommandClass . getVersion ( ) ) { case 2 : if ( commandClass . getEndpoint ( ) != null ) { serialMessage = multiInstanceCommandClass . getMultiChannelEncapMessage ( serialMessage , commandClass . getEndpoint ( ) ) ; return serialMessage ; } break ; case 1 : default : if ( commandClass . getInstances ( ) >= endpointId ) { serialMessage = multiInstanceCommandClass . getMultiInstanceEncapMessage ( serialMessage , endpointId ) ; return serialMessage ; } break ; } } if ( endpointId != 1 ) { logger . warn ( "Encapsulating message for node {}, instance / endpoint {} failed, will discard message." , this . getNodeId ( ) , endpointId ) ; return null ; } return serialMessage ; |
public class PersonVisitor { /** * Visit a FamS . We will build up a collection of Navigators to the FamSs .
* @ see GedObjectVisitor # visit ( FamS ) */
@ Override public void visit ( final FamS fams ) { } } | final FamilyNavigator navigator = new FamilyNavigator ( fams ) ; final Family family = navigator . getFamily ( ) ; if ( family . isSet ( ) ) { familySNavigators . add ( navigator ) ; } |
public class _ComponentAttributesMap { /** * Execute the setter method of the specified property on the underlying
* component .
* @ param propertyDescriptor specifies which property to write .
* @ throws IllegalArgumentException if the property is not writable .
* @ throws FacesException if any other problem occurs while invoking
* the getter method . */
private void setComponentProperty ( _PropertyDescriptorHolder propertyDescriptor , Object value ) { } } | Method writeMethod = propertyDescriptor . getWriteMethod ( ) ; if ( writeMethod == null ) { throw new IllegalArgumentException ( "Component property " + propertyDescriptor . getName ( ) + " is not writable" ) ; } try { writeMethod . invoke ( _component , new Object [ ] { value } ) ; } catch ( Exception e ) { FacesContext facesContext = _component . getFacesContext ( ) ; throw new FacesException ( "Could not set property " + propertyDescriptor . getName ( ) + " of component " + _component . getClientId ( facesContext ) + " to value : " + value + " with type : " + ( value == null ? "null" : value . getClass ( ) . getName ( ) ) , e ) ; } |
public class StandaloneCommandBuilder { /** * Adds a JVM argument to the command ignoring { @ code null } arguments .
* @ param jvmArg the JVM argument to add
* @ return the builder */
public StandaloneCommandBuilder addJavaOption ( final String jvmArg ) { } } | if ( jvmArg != null && ! jvmArg . trim ( ) . isEmpty ( ) ) { final Argument argument = Arguments . parse ( jvmArg ) ; switch ( argument . getKey ( ) ) { case SERVER_BASE_DIR : if ( argument . getValue ( ) != null ) { setBaseDirectory ( argument . getValue ( ) ) ; } break ; case SERVER_CONFIG_DIR : if ( argument . getValue ( ) != null ) { setConfigurationDirectory ( argument . getValue ( ) ) ; } break ; case SERVER_LOG_DIR : if ( argument . getValue ( ) != null ) { setLogDirectory ( argument . getValue ( ) ) ; } break ; case SECURITY_MANAGER_PROP : setUseSecurityManager ( true ) ; break ; default : javaOpts . add ( argument ) ; break ; } } return this ; |
public class MapObjectReference { /** * CHECKSTYLE : OFF */
@ Override public void __put ( final Object key , final Key k ) { } } | keyMap . put ( key , k ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.