signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class VMath { /** * Reset the matrix to 0. * @ param m Matrix */ public static void clear ( final double [ ] [ ] m ) { } }
for ( int i = 0 ; i < m . length ; i ++ ) { Arrays . fill ( m [ i ] , 0.0 ) ; }
public class ObjectBooleanHashMap { /** * exposed for testing */ int probe ( Object element ) { } }
int index = this . spread ( element ) ; int removedIndex = - 1 ; if ( ObjectBooleanHashMap . isRemovedKey ( this . keys [ index ] ) ) { removedIndex = index ; } else if ( this . keys [ index ] == null || ObjectBooleanHashMap . nullSafeEquals ( this . toNonSentinel ( this . keys [ index ] ) , element ) ) { return index ; } int nextIndex = index ; int probe = 17 ; // loop until an empty slot is reached while ( true ) { // Probe algorithm : 17 * n * ( n + 1 ) / 2 where n = no . of collisions nextIndex += probe ; probe += 17 ; nextIndex &= this . keys . length - 1 ; if ( ObjectBooleanHashMap . isRemovedKey ( this . keys [ nextIndex ] ) ) { if ( removedIndex == - 1 ) { removedIndex = nextIndex ; } } else if ( ObjectBooleanHashMap . nullSafeEquals ( this . toNonSentinel ( this . keys [ nextIndex ] ) , element ) ) { return nextIndex ; } else if ( this . keys [ nextIndex ] == null ) { return removedIndex == - 1 ? nextIndex : removedIndex ; } }
public class VMath { /** * Computes component - wise v1 = v1 * s1 + v2, * overwriting the vector v1. * @ param v1 first vector ( overwritten ) * @ param s1 scalar factor for v1 * @ param v2 another vector * @ return v1 = v1 * s1 + v2 */ public static double [ ] timesPlusEquals ( final double [ ] v1 , final double s1 , final double [ ] v2 ) { } }
assert v1 . length == v2 . length : ERR_VEC_DIMENSIONS ; for ( int i = 0 ; i < v1 . length ; i ++ ) { v1 [ i ] = v1 [ i ] * s1 + v2 [ i ] ; } return v1 ;
public class FileSystemView { /** * Creates a hard link at the given link path to the regular file at the given path . The existing * file must exist and must be a regular file . The given file system view must belong to the same * file system as this view . */ public void link ( JimfsPath link , FileSystemView existingView , JimfsPath existing ) throws IOException { } }
checkNotNull ( link ) ; checkNotNull ( existingView ) ; checkNotNull ( existing ) ; if ( ! store . supportsFeature ( Feature . LINKS ) ) { throw new UnsupportedOperationException ( ) ; } if ( ! isSameFileSystem ( existingView ) ) { throw new FileSystemException ( link . toString ( ) , existing . toString ( ) , "can't link: source and target are in different file system instances" ) ; } Name linkName = link . name ( ) ; // existingView is in the same file system , so just one lock is needed store . writeLock ( ) . lock ( ) ; try { // we do want to follow links when finding the existing file File existingFile = existingView . lookUp ( existing , Options . FOLLOW_LINKS ) . requireExists ( existing ) . file ( ) ; if ( ! existingFile . isRegularFile ( ) ) { throw new FileSystemException ( link . toString ( ) , existing . toString ( ) , "can't link: not a regular file" ) ; } Directory linkParent = lookUp ( link , Options . NOFOLLOW_LINKS ) . requireDoesNotExist ( link ) . directory ( ) ; linkParent . link ( linkName , existingFile ) ; linkParent . updateModifiedTime ( ) ; } finally { store . writeLock ( ) . unlock ( ) ; }
public class QueryReportPageController { /** * Appends query page comment to request . * @ param requestContext request contract * @ param queryPage query page * @ param processor comment processor */ protected void appendQueryPageComments ( RequestContext requestContext , final QueryPage queryPage , ReportPageCommentProcessor processor ) { } }
QueryQuestionCommentDAO queryQuestionCommentDAO = new QueryQuestionCommentDAO ( ) ; processor . processComments ( ) ; List < QueryQuestionComment > rootComments = processor . getRootComments ( ) ; Map < Long , List < QueryQuestionComment > > childComments = queryQuestionCommentDAO . listTreesByQueryPage ( queryPage ) ; QueryUtils . appendQueryPageRootComments ( requestContext , queryPage . getId ( ) , rootComments ) ; QueryUtils . appendQueryPageChildComments ( requestContext , childComments ) ;
public class PluginDefaultGroovyMethods { /** * Overloads the left shift operator to provide an easy way to append multiple * objects as string representations to a StringBuilder . * @ param self a StringBuilder * @ param value a value to append * @ return the StringBuilder on which this operation was invoked */ public static StringBuilder leftShift ( StringBuilder self , Object value ) { } }
if ( value instanceof GString ) { // Force the conversion of the GString to string now , or appending // is going to be extremely expensive , due to calls to GString # charAt , // which is going to re - evaluate the GString for each character ! return self . append ( value . toString ( ) ) ; } if ( value instanceof CharSequence ) { return self . append ( ( CharSequence ) value ) ; } return self . append ( value ) ;
public class MtasDataCollector { /** * Gets the key list . * @ return the key list * @ throws IOException Signals that an I / O exception has occurred . */ public Set < String > getKeyList ( ) throws IOException { } }
if ( ! closed ) { close ( ) ; } return new HashSet < > ( Arrays . asList ( keyList ) ) ;
public class IfcAppliedValueImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcAppliedValue > getComponents ( ) { } }
return ( EList < IfcAppliedValue > ) eGet ( Ifc4Package . Literals . IFC_APPLIED_VALUE__COMPONENTS , true ) ;
public class XMLConfigAdmin { /** * delete a customtagmapping on system * @ param virtual * @ throws SecurityException */ public void removeCustomTag ( String virtual ) throws SecurityException { } }
checkWriteAccess ( ) ; Element mappings = _getRootElement ( "custom-tag" ) ; Element [ ] children = XMLConfigWebFactory . getChildren ( mappings , "mapping" ) ; for ( int i = 0 ; i < children . length ; i ++ ) { if ( virtual . equals ( createVirtual ( children [ i ] ) ) ) mappings . removeChild ( children [ i ] ) ; }
public class LengthExact { /** * { @ inheritDoc } * @ throws SuperCsvCellProcessorException * { @ literal if value is null } * @ throws SuperCsvConstraintViolationException * { @ literal if length is < min or length > max } */ @ SuppressWarnings ( "unchecked" ) public Object execute ( final Object value , final CsvContext context ) { } }
if ( value == null ) { return next . execute ( value , context ) ; } final String stringValue = value . toString ( ) ; final int length = stringValue . length ( ) ; if ( ! requriedLengths . contains ( length ) ) { final String joinedLength = requriedLengths . stream ( ) . map ( String :: valueOf ) . collect ( Collectors . joining ( ", " ) ) ; throw createValidationException ( context ) . messageFormat ( "the length (%d) of value '%s' not any of required lengths (%s)" , length , stringValue , joinedLength ) . rejectedValue ( stringValue ) . messageVariables ( "length" , length ) . messageVariables ( "requiredLengths" , getRequiredLengths ( ) ) . build ( ) ; } return next . execute ( stringValue , context ) ;
public class TableStreamer { /** * Finalize the output buffers and write them to the corresponding data targets * @ return A future that can used to wait for all targets to finish writing the buffers */ private ListenableFuture < ? > writeBlocksToTargets ( Collection < DBBPool . BBContainer > outputBuffers , int [ ] serialized ) { } }
Preconditions . checkArgument ( m_tableTasks . size ( ) == serialized . length ) ; Preconditions . checkArgument ( outputBuffers . size ( ) == serialized . length ) ; final List < ListenableFuture < ? > > writeFutures = new ArrayList < ListenableFuture < ? > > ( outputBuffers . size ( ) ) ; // The containers , the data targets , and the serialized byte counts should all line up Iterator < DBBPool . BBContainer > containerIter = outputBuffers . iterator ( ) ; int serializedIndex = 0 ; for ( SnapshotTableTask task : m_tableTasks ) { final DBBPool . BBContainer container = containerIter . next ( ) ; /* * Finalize the buffer by setting position to 0 and limit to the last used byte */ final ByteBuffer buf = container . b ( ) ; buf . limit ( serialized [ serializedIndex ++ ] + task . m_target . getHeaderSize ( ) ) ; buf . position ( 0 ) ; Callable < DBBPool . BBContainer > valueForTarget = Callables . returning ( container ) ; if ( task . m_filters != null ) { for ( SnapshotDataFilter filter : task . m_filters ) { valueForTarget = filter . filter ( valueForTarget ) ; } } ListenableFuture < ? > writeFuture = task . m_target . write ( valueForTarget , m_tableId ) ; if ( writeFuture != null ) { writeFutures . add ( writeFuture ) ; } } // Wraps all write futures in one future return Futures . allAsList ( writeFutures ) ;
public class StatusUpdate { /** * / * package */ HttpParameter [ ] asHttpParameterArray ( ) { } }
ArrayList < HttpParameter > params = new ArrayList < HttpParameter > ( ) ; appendParameter ( "status" , status , params ) ; if ( - 1 != inReplyToStatusId ) { appendParameter ( "in_reply_to_status_id" , inReplyToStatusId , params ) ; } if ( location != null ) { appendParameter ( "lat" , location . getLatitude ( ) , params ) ; appendParameter ( "long" , location . getLongitude ( ) , params ) ; } appendParameter ( "place_id" , placeId , params ) ; if ( ! displayCoordinates ) { appendParameter ( "display_coordinates" , "false" , params ) ; } if ( null != mediaFile ) { params . add ( new HttpParameter ( "media[]" , mediaFile ) ) ; params . add ( new HttpParameter ( "possibly_sensitive" , possiblySensitive ) ) ; } else if ( mediaName != null && mediaBody != null ) { params . add ( new HttpParameter ( "media[]" , mediaName , mediaBody ) ) ; params . add ( new HttpParameter ( "possibly_sensitive" , possiblySensitive ) ) ; } else if ( mediaIds != null && mediaIds . length >= 1 ) { params . add ( new HttpParameter ( "media_ids" , StringUtil . join ( mediaIds ) ) ) ; } if ( autoPopulateReplyMetadata ) { appendParameter ( "auto_populate_reply_metadata" , "true" , params ) ; } appendParameter ( "attachment_url" , attachmentUrl , params ) ; HttpParameter [ ] paramArray = new HttpParameter [ params . size ( ) ] ; return params . toArray ( paramArray ) ;
public class JMessageClient { /** * Update friends ' note information . The size is limit to 500. * @ param username Necessary * @ param array FriendNote array * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper updateFriendsNote ( String username , FriendNote [ ] array ) throws APIConnectionException , APIRequestException { } }
return _userClient . updateFriendsNote ( username , array ) ;
public class SmartBinder { /** * Cast the return value to the given type . * Example : Our current signature is ( String ) String but the method this * handle will eventually call returns CharSequence . * < code > binder = binder . castReturn ( CharSequence . class ) ; < / code > * Our handle will now successfully find and call the target method and * propagate the returned CharSequence as a String . * @ param type the new type for the return value * @ return a new SmartBinder */ public SmartBinder castReturn ( Class < ? > type ) { } }
return new SmartBinder ( this , signature ( ) . changeReturn ( type ) , binder . cast ( type , binder . type ( ) . parameterArray ( ) ) ) ;
public class AbstractHBCICallback { /** * Erzeugt einen Log - Eintrag . Diese Methode wird von den mitgelieferten * Callback - Klassen für die Erzeugung von Log - Einträgen verwendet . Um * ein eigenes Format für die Log - Eintrage zu definieren , kann diese * Methode mit einer eigenen Implementierung überschrieben werden . < br / > * Die Parameter entsprechen denen der * { @ link HBCICallback # log ( String , int , Date , StackTraceElement ) log } - Methode * @ return ein Log - Eintrag */ protected String createDefaultLogLine ( String msg , int level , Date date , StackTraceElement trace ) { } }
String [ ] levels = { "NON" , "ERR" , "WRN" , "INF" , "DBG" , "DB2" , "INT" } ; StringBuffer ret = new StringBuffer ( 128 ) ; ret . append ( "<" ) . append ( levels [ level ] ) . append ( "> " ) ; SimpleDateFormat df = new SimpleDateFormat ( "yyyy.MM.dd HH:mm:ss.SSS" ) ; ret . append ( "[" ) . append ( df . format ( date ) ) . append ( "] " ) ; Thread thread = Thread . currentThread ( ) ; ret . append ( "[" ) . append ( thread . getThreadGroup ( ) . getName ( ) ) ; ret . append ( "/" ) . append ( thread . getName ( ) ) . append ( "] " ) ; String classname = trace . getClassName ( ) ; String hbciname = "org.kapott.hbci." ; if ( classname != null && classname . startsWith ( hbciname ) ) ret . append ( classname . substring ( ( hbciname ) . length ( ) ) ) . append ( ": " ) ; if ( msg == null ) msg = "" ; StringBuffer escapedString = new StringBuffer ( ) ; int len = msg . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = msg . charAt ( i ) ; int x = ch ; if ( ( x < 26 && x != 9 && x != 10 && x != 13 ) || ch == '\\' ) { String temp = Integer . toString ( x , 16 ) ; if ( temp . length ( ) != 2 ) temp = "0" + temp ; escapedString . append ( "\\" ) . append ( temp ) ; } else escapedString . append ( ch ) ; } ret . append ( escapedString ) ; return ret . toString ( ) ;
public class HadoopLogsAnalyzer { /** * @ param args * Last arg is the input file . That file can be a directory , in which * case you get all the files in sorted order . We will decompress * files whose nmes end in . gz . * switches : - c collect line types . * - d debug mode * - delays print out the delays [ interval between job submit time and * launch time ] * - runtimes print out the job runtimes * - spreads print out the ratio of 10 % ile and 90 % ile , of both the * successful map task attempt run times and the the successful * reduce task attempt run times * - tasktimes prints out individual task time distributions * collects all the line types and prints the first example of each * one */ public static void main ( String [ ] args ) { } }
try { HadoopLogsAnalyzer analyzer = new HadoopLogsAnalyzer ( ) ; int result = ToolRunner . run ( analyzer , args ) ; if ( result == 0 ) { return ; } System . exit ( result ) ; } catch ( FileNotFoundException e ) { LOG . error ( "" , e ) ; e . printStackTrace ( staticDebugOutput ) ; System . exit ( 1 ) ; } catch ( IOException e ) { LOG . error ( "" , e ) ; e . printStackTrace ( staticDebugOutput ) ; System . exit ( 2 ) ; } catch ( Exception e ) { LOG . error ( "" , e ) ; e . printStackTrace ( staticDebugOutput ) ; System . exit ( 3 ) ; }
public class RouteFeature { /** * { @ inheritDoc } */ @ Override public boolean configure ( final FeatureContext context ) { } }
final String routePath = ( String ) context . getConfiguration ( ) . getProperty ( "resource.helper.route.path" ) ; if ( StringUtils . isNotBlank ( routePath ) ) { context . register ( new ModelProcessor ( ) { @ Override public ResourceModel processResourceModel ( ResourceModel resourceModel , Configuration configuration ) { ResourceModel . Builder resourceModelBuilder = new ResourceModel . Builder ( resourceModel , false ) ; Resource resource = Resource . builder ( RouteHelper . class ) . path ( routePath ) . build ( ) ; resourceModelBuilder . addResource ( resource ) ; return resourceModelBuilder . build ( ) ; } @ Override public ResourceModel processSubResource ( ResourceModel subResourceModel , Configuration configuration ) { return subResourceModel ; } } ) ; return true ; } return false ;
public class RunList { /** * Returns the first streak of the elements that satisfy the given predicate . * For example , { @ code filter ( [ 1,2,3,4 ] , odd ) = = [ 1,3 ] } but { @ code limit ( [ 1,2,3,4 ] , odd ) = = [ 1 ] } . */ private RunList < R > limit ( final CountingPredicate < R > predicate ) { } }
size = null ; first = null ; final Iterable < R > nested = base ; base = new Iterable < R > ( ) { public Iterator < R > iterator ( ) { return hudson . util . Iterators . limit ( nested . iterator ( ) , predicate ) ; } @ Override public String toString ( ) { return Iterables . toString ( this ) ; } } ; return this ;
public class OutputStreamLogSink { public String getDatedFilename ( ) { } }
if ( _filename == null ) return null ; if ( _out == null || ! ( _out instanceof RolloverFileOutputStream ) ) return null ; return ( ( RolloverFileOutputStream ) _out ) . getDatedFilename ( ) ;
public class AmazonIdentityManagementClient { /** * Deletes the specified instance profile . The instance profile must not have an associated role . * < important > * Make sure that you do not have any Amazon EC2 instances running with the instance profile you are about to * delete . Deleting a role or instance profile that is associated with a running instance will break any * applications running on the instance . * < / important > * For more information about instance profiles , go to < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / AboutInstanceProfiles . html " > About Instance Profiles < / a > . * @ param deleteInstanceProfileRequest * @ return Result of the DeleteInstanceProfile operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws DeleteConflictException * The request was rejected because it attempted to delete a resource that has attached subordinate * entities . The error message describes these entities . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error message describes the limit exceeded . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . DeleteInstanceProfile * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / DeleteInstanceProfile " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteInstanceProfileResult deleteInstanceProfile ( DeleteInstanceProfileRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteInstanceProfile ( request ) ;
public class GenericType { /** * Substitutes a free type variable with an actual type . See { @ link GenericType this class ' s * javadoc } for an example . */ @ NonNull public final < X > GenericType < T > where ( @ NonNull GenericTypeParameter < X > freeVariable , @ NonNull Class < X > actualType ) { } }
return where ( freeVariable , GenericType . of ( actualType ) ) ;
public class AdminToolQuartzServiceImpl { /** * / * ( non - Javadoc ) * @ see de . chandre . admintool . quartz . AdminToolQuartzService # changeTrigger ( de . chandre . admintool . quartz . JobTriggerTO , boolean ) */ @ Override public boolean changeTrigger ( JobTriggerTO triggerTO , boolean add ) throws SchedulerException { } }
if ( ! config . isChangeTriggerAllowed ( ) ) { LOGGER . warn ( "not allowed to change any trigger" ) ; return false ; } JobDetail detail = findJob ( triggerTO . getOriginalJobGroup ( ) , triggerTO . getOriginalJobName ( ) ) ; if ( null == detail ) { return false ; } Trigger trigger = findTrigger ( triggerTO . getOriginalJobGroup ( ) , triggerTO . getOriginalJobName ( ) , triggerTO . getOriginalTriggerGroup ( ) , triggerTO . getOriginalTriggerName ( ) ) ; if ( null == trigger ) { if ( add ) { this . scheduler . scheduleJob ( buildTrigger ( detail , null , triggerTO ) ) ; return true ; } return false ; } this . scheduler . rescheduleJob ( trigger . getKey ( ) , buildTrigger ( detail , trigger , triggerTO ) ) ; return true ;
public class CmsXmlVfsFileValue { /** * Sets the value as a structure id . < p > * @ param cms the current CMS context * @ param id the structure id which should be stored in the file value */ public void setIdValue ( CmsObject cms , CmsUUID id ) { } }
CmsRelationType type = getRelationType ( getPath ( ) ) ; CmsLink link = new CmsLink ( TYPE_VFS_LINK , type , id , "@" , true ) ; // link management check link . checkConsistency ( cms ) ; // update xml node CmsLinkUpdateUtil . updateXmlForVfsFile ( link , m_element . addElement ( CmsXmlPage . NODE_LINK ) ) ;
public class YamlEngine { /** * Unmarshal YAML . * @ param yamlContent YAML content * @ return map from YAML */ public static Map < ? , ? > unmarshal ( final String yamlContent ) { } }
return Strings . isNullOrEmpty ( yamlContent ) ? new LinkedHashMap < > ( ) : ( Map ) new Yaml ( ) . load ( yamlContent ) ;
public class AlertContainer { /** * Perform the specified action on the drop container . Also , notifies the container ' s parent * that an action has occurred . * @ param action An action . */ @ Override public void doAction ( Action action ) { } }
BaseComponent parent = getParent ( ) ; switch ( action ) { case REMOVE : ActionListener . unbindActionListeners ( this , actionListeners ) ; detach ( ) ; break ; case HIDE : case COLLAPSE : setVisible ( false ) ; break ; case SHOW : case EXPAND : setVisible ( true ) ; break ; case TOP : parent . addChild ( this , 0 ) ; break ; } if ( parent != null ) { EventUtil . post ( MainController . ALERT_ACTION_EVENT , parent , action ) ; }
public class StylesContainer { /** * Add a page style * @ param ps the style * @ return true if the master page style and the style layout where added */ public boolean addPageStyle ( final PageStyle ps ) { } }
boolean ret = this . addMasterPageStyle ( ps . getMasterPageStyle ( ) ) ; ret = this . addPageLayoutStyle ( ps . getPageLayoutStyle ( ) ) && ret ; return ret ;
public class ReloadingPropertyPlaceholderConfigurer { /** * 当配置更新时 , 被调用 * @ param event */ public void propertiesReloaded ( PropertiesReloadedEvent event ) { } }
Properties oldProperties = lastMergedProperties ; try { Properties newProperties = mergeProperties ( ) ; // 获取哪些 dynamic property 被影响 Set < String > placeholders = placeholderToDynamics . keySet ( ) ; Set < DynamicProperty > allDynamics = new HashSet < DynamicProperty > ( ) ; for ( String placeholder : placeholders ) { String newValue = newProperties . getProperty ( placeholder ) ; String oldValue = oldProperties . getProperty ( placeholder ) ; if ( newValue != null && ! newValue . equals ( oldValue ) || newValue == null && oldValue != null ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( "Property changed detected: " + placeholder + ( newValue != null ? "=" + newValue : " removed" ) ) ; } List < DynamicProperty > affectedDynamics = placeholderToDynamics . get ( placeholder ) ; allDynamics . addAll ( affectedDynamics ) ; } } // 获取受影响的beans Map < String , List < DynamicProperty > > dynamicsByBeanName = new HashMap < String , List < DynamicProperty > > ( ) ; Map < String , Object > beanByBeanName = new HashMap < String , Object > ( ) ; for ( DynamicProperty dynamic : allDynamics ) { String beanName = dynamic . getBeanName ( ) ; List < DynamicProperty > l = dynamicsByBeanName . get ( beanName ) ; if ( l == null ) { dynamicsByBeanName . put ( beanName , ( l = new ArrayList < DynamicProperty > ( ) ) ) ; Object bean = null ; try { bean = applicationContext . getBean ( beanName ) ; beanByBeanName . put ( beanName , bean ) ; } catch ( BeansException e ) { // keep dynamicsByBeanName list , warn only once . logger . error ( "Error obtaining bean " + beanName , e ) ; } // say hello try { if ( bean instanceof IReconfigurationAware ) { ( ( IReconfigurationAware ) bean ) . beforeReconfiguration ( ) ; // hello ! } } catch ( Exception e ) { logger . error ( "Error calling beforeReconfiguration on " + beanName , e ) ; } } l . add ( dynamic ) ; } // 处理受影响的bean Collection < String > beanNames = dynamicsByBeanName . keySet ( ) ; for ( String beanName : beanNames ) { Object bean = beanByBeanName . get ( beanName ) ; if ( bean == null ) // problems obtaining bean , earlier { continue ; } BeanWrapper beanWrapper = new BeanWrapperImpl ( bean ) ; // for all affected . . . List < DynamicProperty > dynamics = dynamicsByBeanName . get ( beanName ) ; for ( DynamicProperty dynamic : dynamics ) { String propertyName = dynamic . getPropertyName ( ) ; String unparsedValue = dynamic . getUnparsedValue ( ) ; // obtain an updated value , including dependencies String newValue ; removeDynamic ( dynamic ) ; currentBeanName = beanName ; currentPropertyName = propertyName ; try { newValue = parseStringValue ( unparsedValue , newProperties , new HashSet ( ) ) ; } finally { currentBeanName = null ; currentPropertyName = null ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( "Updating property " + beanName + "." + propertyName + " to " + newValue ) ; } // assign it to the bean try { beanWrapper . setPropertyValue ( propertyName , newValue ) ; } catch ( BeansException e ) { logger . error ( "Error setting property " + beanName + "." + propertyName + " to " + newValue , e ) ; } } } // say goodbye . for ( String beanName : beanNames ) { Object bean = beanByBeanName . get ( beanName ) ; try { if ( bean instanceof IReconfigurationAware ) { ( ( IReconfigurationAware ) bean ) . afterReconfiguration ( ) ; } } catch ( Exception e ) { logger . error ( "Error calling afterReconfiguration on " + beanName , e ) ; } } } catch ( IOException e ) { logger . error ( "Error trying to reload net.unicon.iamlabs.spring.properties.example.net.unicon.iamlabs" + ".spring" + ".properties: " + e . getMessage ( ) , e ) ; }
public class BrowserPane { /** * Activates tooltips . * @ param show * if true , shows tooltips . */ public void activateTooltip ( boolean show ) { } }
if ( show ) { ToolTipManager . sharedInstance ( ) . registerComponent ( this ) ; } else { ToolTipManager . sharedInstance ( ) . unregisterComponent ( this ) ; } ToolTipManager . sharedInstance ( ) . setEnabled ( show ) ;
public class HadoopLocationWizard { /** * Create a SWT Checked Button component for the given { @ link ConfProp } * boolean configuration property . * @ param listener * @ param parent * @ param prop * @ return */ private Button createConfCheckButton ( SelectionListener listener , Composite parent , ConfProp prop , String text ) { } }
Button button = new Button ( parent , SWT . CHECK ) ; button . setText ( text ) ; button . setData ( "hProp" , prop ) ; button . setSelection ( location . getConfProp ( prop ) . equalsIgnoreCase ( "yes" ) ) ; button . addSelectionListener ( listener ) ; return button ;
public class LessParser { /** * Create a rule and parse the content of an block . * @ param selector the selectors * @ param parent the parent in the hierarchy * @ param params the parameters if it is a mixin . * @ param guard an optional guard expression * @ return the rule */ @ Nonnull private Rule rule ( FormattableContainer parent , String selector , Operation params , Expression guard ) { } }
Rule rule = new Rule ( reader , parent , selector , params , guard ) ; parseRule ( rule ) ; return rule ;
public class Instant { /** * Obtains an instance of { @ code Instant } from a temporal object . * A { @ code TemporalAccessor } represents some form of date and time information . * This factory converts the arbitrary temporal object to an instance of { @ code Instant } . * The conversion extracts the { @ link ChronoField # INSTANT _ SECONDS INSTANT _ SECONDS } * and { @ link ChronoField # NANO _ OF _ SECOND NANO _ OF _ SECOND } fields . * This method matches the signature of the functional interface { @ link TemporalQuery } * allowing it to be used as a query via method reference , { @ code Instant : : from } . * @ param temporal the temporal object to convert , not null * @ return the instant , not null * @ throws DateTimeException if unable to convert to an { @ code Instant } */ public static Instant from ( TemporalAccessor temporal ) { } }
try { long instantSecs = temporal . getLong ( INSTANT_SECONDS ) ; int nanoOfSecond = temporal . get ( NANO_OF_SECOND ) ; return Instant . ofEpochSecond ( instantSecs , nanoOfSecond ) ; } catch ( DateTimeException ex ) { throw new DateTimeException ( "Unable to obtain Instant from TemporalAccessor: " + temporal + ", type " + temporal . getClass ( ) . getName ( ) , ex ) ; }
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1296:5 : arguments : ' ( ' ( expressionList ) ? ' ) ' ; */ public final void arguments ( ) throws RecognitionException { } }
int arguments_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 140 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1297:5 : ( ' ( ' ( expressionList ) ? ' ) ' ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1297:7 : ' ( ' ( expressionList ) ? ' ) ' { match ( input , 36 , FOLLOW_36_in_arguments6395 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1297:11 : ( expressionList ) ? int alt192 = 2 ; int LA192_0 = input . LA ( 1 ) ; if ( ( ( LA192_0 >= CharacterLiteral && LA192_0 <= DecimalLiteral ) || LA192_0 == FloatingPointLiteral || ( LA192_0 >= HexLiteral && LA192_0 <= Identifier ) || ( LA192_0 >= OctalLiteral && LA192_0 <= StringLiteral ) || LA192_0 == 29 || LA192_0 == 36 || ( LA192_0 >= 40 && LA192_0 <= 41 ) || ( LA192_0 >= 44 && LA192_0 <= 45 ) || LA192_0 == 53 || LA192_0 == 65 || LA192_0 == 67 || ( LA192_0 >= 70 && LA192_0 <= 71 ) || LA192_0 == 77 || ( LA192_0 >= 79 && LA192_0 <= 80 ) || LA192_0 == 82 || LA192_0 == 85 || LA192_0 == 92 || LA192_0 == 94 || ( LA192_0 >= 97 && LA192_0 <= 98 ) || LA192_0 == 105 || LA192_0 == 108 || LA192_0 == 111 || LA192_0 == 115 || LA192_0 == 118 || LA192_0 == 126 ) ) { alt192 = 1 ; } switch ( alt192 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1297:11 : expressionList { pushFollow ( FOLLOW_expressionList_in_arguments6397 ) ; expressionList ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } match ( input , 37 , FOLLOW_37_in_arguments6400 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 140 , arguments_StartIndex ) ; } }
public class UnicodeSet { /** * Utility to compare two collections , optionally by size , and then lexicographically . * @ hide unsupported on Android */ public static < T extends Comparable < T > > int compare ( Collection < T > collection1 , Collection < T > collection2 , ComparisonStyle style ) { } }
if ( style != ComparisonStyle . LEXICOGRAPHIC ) { int diff = collection1 . size ( ) - collection2 . size ( ) ; if ( diff != 0 ) { return ( diff < 0 ) == ( style == ComparisonStyle . SHORTER_FIRST ) ? - 1 : 1 ; } } return compare ( collection1 , collection2 ) ;
public class AWSDatabaseMigrationServiceClient { /** * Stops the replication task . * @ param stopReplicationTaskRequest * @ return Result of the StopReplicationTask operation returned by the service . * @ throws ResourceNotFoundException * The resource could not be found . * @ throws InvalidResourceStateException * The resource is in a state that prevents it from being used for database migration . * @ sample AWSDatabaseMigrationService . StopReplicationTask * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / StopReplicationTask " target = " _ top " > AWS API * Documentation < / a > */ @ Override public StopReplicationTaskResult stopReplicationTask ( StopReplicationTaskRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStopReplicationTask ( request ) ;
public class EnumConstantWriterImpl { /** * { @ inheritDoc } */ public Content getSignature ( FieldDoc enumConstant ) { } }
Content pre = new HtmlTree ( HtmlTag . PRE ) ; writer . addAnnotationInfo ( enumConstant , pre ) ; addModifiers ( enumConstant , pre ) ; Content enumConstantLink = writer . getLink ( new LinkInfoImpl ( configuration , LinkInfoImpl . Kind . MEMBER , enumConstant . type ( ) ) ) ; pre . addContent ( enumConstantLink ) ; pre . addContent ( " " ) ; if ( configuration . linksource ) { Content enumConstantName = new StringContent ( enumConstant . name ( ) ) ; writer . addSrcLink ( enumConstant , enumConstantName , pre ) ; } else { addName ( enumConstant . name ( ) , pre ) ; } return pre ;
public class UPropertyAliases { /** * Returns a value enum given a property enum and one of its value names . */ public int getPropertyValueEnum ( int property , CharSequence alias ) { } }
int valueMapIndex = findProperty ( property ) ; if ( valueMapIndex == 0 ) { throw new IllegalArgumentException ( "Invalid property enum " + property + " (0x" + Integer . toHexString ( property ) + ")" ) ; } valueMapIndex = valueMaps [ valueMapIndex + 1 ] ; if ( valueMapIndex == 0 ) { throw new IllegalArgumentException ( "Property " + property + " (0x" + Integer . toHexString ( property ) + ") does not have named values" ) ; } // valueMapIndex is the start of the property ' s valueMap , // where the first word is the BytesTrie offset . return getPropertyOrValueEnum ( valueMaps [ valueMapIndex ] , alias ) ;
public class SignatureAttribute { /** * Reads the signature . */ public void read ( ByteCodeParser in ) throws IOException { } }
int length = in . readInt ( ) ; if ( length != 2 ) throw new IOException ( "expected length of 2 at " + length ) ; int code = in . readShort ( ) ; _signature = in . getUTF8 ( code ) ;
public class Link { /** * Convenience method when chaining an existing { @ link Link } . * @ param name * @ param httpMethod * @ param inputType * @ param queryMethodParameters * @ param outputType * @ return */ public Link andAffordance ( String name , HttpMethod httpMethod , ResolvableType inputType , List < QueryParameter > queryMethodParameters , ResolvableType outputType ) { } }
return andAffordance ( new Affordance ( name , this , httpMethod , inputType , queryMethodParameters , outputType ) ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLightSourcePositional ( ) { } }
if ( ifcLightSourcePositionalEClass == null ) { ifcLightSourcePositionalEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 348 ) ; } return ifcLightSourcePositionalEClass ;
public class PClassLoader { /** * returns matching File Object or null if file not exust * @ param name * @ return matching file */ public Resource _getResource ( String name ) { } }
Resource f = directory . getRealResource ( name ) ; if ( f != null && f . exists ( ) && f . isFile ( ) ) return f ; return null ;
public class QueryPersistor { /** * Give a name to a DomainObjectMatch for better readability in a Java - DSL like string representation * @ param domainObjectMatch * @ param as * @ return */ public QueryPersistor augment ( DomainObjectMatch < ? > domainObjectMatch , String as ) { } }
if ( this . augmentations == null ) this . augmentations = new HashMap < DomainObjectMatch < ? > , String > ( ) ; this . augmentations . put ( domainObjectMatch , as ) ; return this ;
public class HttpDecodingClient { /** * Creates a new { @ link HttpDecodingClient } decorator with the specified { @ link StreamDecoderFactory } s . */ public static Function < Client < HttpRequest , HttpResponse > , HttpDecodingClient > newDecorator ( StreamDecoderFactory ... decoderFactories ) { } }
return newDecorator ( ImmutableList . copyOf ( decoderFactories ) ) ;
public class ScreenDialog { /** * GEN - LAST : event _ fontComboBoxActionPerformed */ private void themeComboBoxActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ themeComboBoxActionPerformed String themeName = ( String ) themeComboBox . getSelectedItem ( ) ; String themeClassName = ( String ) mapThemes . get ( themeName ) ; if ( ( themeClassName != null ) && ( ! ScreenUtil . DEFAULT . equalsIgnoreCase ( themeClassName ) ) ) properties . put ( ScreenUtil . THEME , themeClassName ) ; else properties . remove ( ScreenUtil . THEME ) ; ScreenUtil . updateLookAndFeel ( this , null , properties ) ; this . setSampleStyle ( ) ;
public class Jinx { /** * Do the actual GET or POST request . * < br > * flickrGet and flickrPost methods delegate work to this method . * @ param params request parameters . * @ param method http method to use . Method . GET and Method . POST are the only valid choices . * @ param tClass the class that will be returned . * @ param < T > type of the class returned . * @ param sign if true the request will be signed . * @ return an instance of the specified class containing data from Flickr . * @ throws JinxException if there are any errors . */ protected < T > T callFlickr ( Map < String , String > params , Method method , Class < T > tClass , boolean sign ) throws JinxException { } }
if ( this . oAuthAccessToken == null ) { throw new JinxException ( "Jinx has not been configured with an OAuth Access Token." ) ; } params . put ( "format" , "json" ) ; params . put ( "nojsoncallback" , "1" ) ; params . put ( "api_key" , getApiKey ( ) ) ; org . scribe . model . Response flickrResponse ; if ( method == Method . GET ) { OAuthRequest request = new OAuthRequest ( Verb . GET , JinxConstants . REST_ENDPOINT ) ; for ( String key : params . keySet ( ) ) { request . addQuerystringParameter ( key , params . get ( key ) ) ; if ( verboseLogging ) { JinxLogger . getLogger ( ) . log ( String . format ( "Added query parameter %s=%s" , key , params . get ( key ) ) ) ; } } if ( sign ) { this . oAuthService . signRequest ( this . accessToken , request ) ; } flickrResponse = request . send ( ) ; } else if ( method == Method . POST ) { OAuthRequest request = new OAuthRequest ( Verb . POST , JinxConstants . REST_ENDPOINT ) ; for ( String key : params . keySet ( ) ) { request . addBodyParameter ( key , params . get ( key ) ) ; if ( verboseLogging ) { JinxLogger . getLogger ( ) . log ( String . format ( "Added body parameter %s=%s" , key , params . get ( key ) ) ) ; } } if ( sign ) { this . oAuthService . signRequest ( this . accessToken , request ) ; } flickrResponse = request . send ( ) ; } else { throw new JinxException ( "Unsupported method: " + method . toString ( ) ) ; } if ( flickrResponse == null || flickrResponse . getBody ( ) == null ) { throw new JinxException ( "Null return from call to Flickr." ) ; } if ( verboseLogging ) { JinxLogger . getLogger ( ) . log ( "RESPONSE is " + flickrResponse . getBody ( ) ) ; } T fromJson = gson . fromJson ( flickrResponse . getBody ( ) , tClass ) ; if ( this . flickrErrorThrowsException && ( ( Response ) fromJson ) . getCode ( ) != 0 ) { Response r = ( Response ) fromJson ; throw new JinxException ( "Flickr returned non-zero status." , null , r ) ; } return fromJson ;
public class Client { /** * Creates a new transport based on the capabilities of the server . * @ param profile * profile to use for determining if HTTP POST is supported * @ return the new transport . */ private Transport createTransport ( final String profile ) { } }
if ( getCaCapabilities ( profile ) . isPostSupported ( ) ) { return transportFactory . forMethod ( Method . POST , url ) ; } else { return transportFactory . forMethod ( Method . GET , url ) ; }
public class DocletInvoker { /** * Generate documentation here . Return true on success . */ public boolean start ( RootDoc root ) { } }
Object retVal ; String methodName = "start" ; Class < ? > [ ] paramTypes = { RootDoc . class } ; Object [ ] params = { root } ; try { retVal = invoke ( methodName , null , paramTypes , params ) ; } catch ( DocletInvokeException exc ) { return false ; } if ( retVal instanceof Boolean ) { return ( ( Boolean ) retVal ) ; } else { messager . error ( Messager . NOPOS , "main.must_return_boolean" , docletClassName , methodName ) ; return false ; }
public class Option { /** * 添加数据 * @ param values * @ return */ public Option series ( Series ... values ) { } }
if ( values == null || values . length == 0 ) { return this ; } this . series ( ) . addAll ( Arrays . asList ( values ) ) ; return this ;
public class appflowpolicy { /** * Use this API to fetch filtered set of appflowpolicy resources . * set the filter parameter values in filtervalue object . */ public static appflowpolicy [ ] get_filtered ( nitro_service service , filtervalue [ ] filter ) throws Exception { } }
appflowpolicy obj = new appflowpolicy ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; appflowpolicy [ ] response = ( appflowpolicy [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class PositionManager { /** * This method scans through the nodes in the ordered list and identifies those that are not in * the passed in compViewParent . For those it then looks in its current parent and checks to see * if there are any down - stream ( higher sibling index ) siblings that have moveAllowed = " false " . * If any such sibling is found then the node is not allowed to be reparented and is removed * from the list . */ static void applyNoReparenting ( List < NodeInfo > order , Element compViewParent , Element positionSet ) { } }
int i = 0 ; while ( i < order . size ( ) ) { NodeInfo ni = order . get ( i ) ; if ( ! ni . getNode ( ) . getParentNode ( ) . equals ( compViewParent ) ) { if ( isNotReparentable ( ni , compViewParent , positionSet ) ) { LOG . info ( "Resetting the following NodeInfo because it is not reparentable: " + ni ) ; // this node should not be reparented . If it was placed // here by way of a position directive then delete that // directive out of the ni and posSet will be updated later ni . setPositionDirective ( null ) ; // now we need to remove it from the ordering list but // skip incrementing i , deleted ni now filled by next ni order . remove ( i ) ; continue ; } } i ++ ; }
public class BoxAuthentication { /** * Callback method to be called when authentication process finishes . * @ param infoOriginal the authentication information that successfully authenticated . * @ param context the current application context ( that can be used to launch ui or access resources ) . */ public void onAuthenticated ( BoxAuthenticationInfo infoOriginal , Context context ) { } }
BoxAuthenticationInfo info = BoxAuthenticationInfo . unmodifiableObject ( infoOriginal ) ; if ( ! SdkUtils . isBlank ( info . accessToken ( ) ) && ( info . getUser ( ) == null || SdkUtils . isBlank ( info . getUser ( ) . getId ( ) ) ) ) { // insufficient information so we need to fetch the user info first . doUserRefresh ( context , info ) ; return ; } getAuthInfoMap ( context ) . put ( info . getUser ( ) . getId ( ) , info . clone ( ) ) ; authStorage . storeLastAuthenticatedUserId ( info . getUser ( ) . getId ( ) , context ) ; authStorage . storeAuthInfoMap ( mCurrentAccessInfo , context ) ; // if accessToken has not already been refreshed , issue refresh request and cache result Set < AuthListener > listeners = getListeners ( ) ; for ( AuthListener listener : listeners ) { listener . onAuthCreated ( info ) ; }
public class FsCrawlerUtil { /** * We check if we can index the file or if we should ignore it * @ param directory true if the current file is a directory , false in other case ( actual file ) * @ param filename The filename to scan * @ param includes include rules , may be empty not null * @ param excludes exclude rules , may be empty not null */ public static boolean isIndexable ( boolean directory , String filename , List < String > includes , List < String > excludes ) { } }
logger . debug ( "directory = [{}], filename = [{}], includes = [{}], excludes = [{}]" , directory , filename , includes , excludes ) ; boolean isIndexable = isIndexable ( filename , includes , excludes ) ; // It can happen that we a dir " foo " which does not match the include name like " * . txt " // We need to go in it unless it has been explicitly excluded by the user if ( directory && ! isExcluded ( filename , excludes ) ) { isIndexable = true ; } return isIndexable ;
public class ResourceGroovyMethods { /** * Helper method to create a new BufferedReader for a URL and then * passes it to the closure . The reader is closed after the closure returns . * @ param url a URL * @ param closure the closure to invoke with the reader * @ return the value returned by the closure * @ throws IOException if an IOException occurs . * @ since 1.5.2 */ public static < T > T withReader ( URL url , @ ClosureParams ( value = SimpleType . class , options = "java.io.Reader" ) Closure < T > closure ) throws IOException { } }
return IOGroovyMethods . withReader ( url . openConnection ( ) . getInputStream ( ) , closure ) ;
public class TCPConnLink { /** * @ see * com . ibm . wsspi . channelfw . OutboundConnectionLink # connectAsynch ( java . lang . * Object ) */ public void connectAsynch ( Object context ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "connectAsynch" ) ; } this . syncObject = null ; // reset proxy response object if ( this . proxy != null ) { this . proxy . setIsProxyResponseValid ( false ) ; } connectCommon ( ( TCPConnectRequestContext ) context ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "connectAsynch" ) ; }
public class RequestHttpBase { /** * Adds a new header . If an old header with that name exists , * both headers are output . * @ param key the header key . * @ param value the header value . */ public void addHeaderOutImpl ( String key , String value ) { } }
if ( headerOutSpecial ( key , value ) ) { return ; } ArrayList < String > keys = _headerKeysOut ; ArrayList < String > values = _headerValuesOut ; int size = keys . size ( ) ; // webapp / 1k32 for ( int i = 0 ; i < size ; i ++ ) { if ( keys . get ( i ) . equals ( key ) && values . get ( i ) . equals ( value ) ) { return ; } } keys . add ( key ) ; values . add ( value ) ;
public class TypeConformanceComputer { /** * Logic was moved to inner class CommonSuperTypeFinder in the context of bug 495314. * This method is scheduled for deletion in Xtext 2.15 * @ deprecated see { @ link CommonSuperTypeFinder # getCommonParameterSuperType ( List ) } */ @ Deprecated public final LightweightTypeReference getCommonParameterSuperType ( List < LightweightTypeReference > types , List < LightweightTypeReference > initiallyRequested , ITypeReferenceOwner owner ) { } }
CommonSuperTypeFinder typeFinder = newCommonSuperTypeFinder ( owner ) ; typeFinder . requestsInProgress = Lists . newArrayList ( ) ; typeFinder . requestsInProgress . add ( initiallyRequested ) ; return typeFinder . getCommonParameterSuperType ( types ) ;
public class ServerOperationResolver { /** * Get server operations to affect a change to a system property . * @ param operation the domain or host level operation * @ param address address associated with { @ code operation } * @ param domain the domain model , or { @ code null } if { @ code address } isn ' t for a domain level resource * @ param affectedGroup the name of the server group affected by the operation , or { @ code null } * if { @ code address } isn ' t for a server group level resource * @ param host the host model * @ return the server operations */ private Map < Set < ServerIdentity > , ModelNode > getServerSystemPropertyOperations ( ModelNode operation , PathAddress address , Level level , ModelNode domain , String affectedGroup , ModelNode host ) { } }
Map < Set < ServerIdentity > , ModelNode > result = null ; if ( isServerAffectingSystemPropertyOperation ( operation ) ) { String propName = address . getLastElement ( ) . getValue ( ) ; boolean overridden = false ; Set < String > groups = null ; if ( level == Level . DOMAIN || level == Level . SERVER_GROUP ) { if ( hasSystemProperty ( host , propName ) ) { // host level value takes precedence overridden = true ; } else if ( affectedGroup != null ) { groups = Collections . singleton ( affectedGroup ) ; } else if ( domain . hasDefined ( SERVER_GROUP ) ) { // Top level domain update applies to all groups where it was not overridden groups = new HashSet < String > ( ) ; for ( Property groupProp : domain . get ( SERVER_GROUP ) . asPropertyList ( ) ) { String groupName = groupProp . getName ( ) ; if ( ! hasSystemProperty ( groupProp . getValue ( ) , propName ) ) { groups . add ( groupName ) ; } } } } Set < ServerIdentity > servers = null ; if ( ! overridden && host . hasDefined ( SERVER_CONFIG ) ) { servers = new HashSet < ServerIdentity > ( ) ; for ( Property serverProp : host . get ( SERVER_CONFIG ) . asPropertyList ( ) ) { String serverName = serverProp . getName ( ) ; if ( serverProxies . get ( serverName ) == null ) { continue ; } ModelNode server = serverProp . getValue ( ) ; if ( ! hasSystemProperty ( server , propName ) ) { String groupName = server . require ( GROUP ) . asString ( ) ; if ( groups == null || groups . contains ( groupName ) ) { servers . add ( new ServerIdentity ( localHostName , groupName , serverName ) ) ; } } } } if ( servers != null && servers . size ( ) > 0 ) { Map < ModelNode , Set < ServerIdentity > > ops = new HashMap < ModelNode , Set < ServerIdentity > > ( ) ; for ( ServerIdentity server : servers ) { ModelNode serverOp = getServerSystemPropertyOperation ( operation , propName , server , level , domain , host ) ; Set < ServerIdentity > set = ops . get ( serverOp ) ; if ( set == null ) { set = new HashSet < ServerIdentity > ( ) ; ops . put ( serverOp , set ) ; } set . add ( server ) ; } result = new HashMap < Set < ServerIdentity > , ModelNode > ( ) ; for ( Map . Entry < ModelNode , Set < ServerIdentity > > entry : ops . entrySet ( ) ) { result . put ( entry . getValue ( ) , entry . getKey ( ) ) ; } } } if ( result == null ) { result = Collections . emptyMap ( ) ; } return result ;
public class LineNumberReader { /** * Mark the present position in the stream . Subsequent calls to reset ( ) * will attempt to reposition the stream to this point , and will also reset * the line number appropriately . * @ param readAheadLimit * Limit on the number of characters that may be read while still * preserving the mark . After reading this many characters , * attempting to reset the stream may fail . * @ throws IOException * If an I / O error occurs */ public void mark ( int readAheadLimit ) throws IOException { } }
synchronized ( lock ) { super . mark ( readAheadLimit ) ; markedLineNumber = lineNumber ; markedSkipLF = skipLF ; }
public class PatternStreamBuilder { /** * Creates a data stream containing results of { @ link PatternProcessFunction } to fully matching event patterns . * @ param processFunction function to be applied to matching event sequences * @ param outTypeInfo output TypeInformation of * { @ link PatternProcessFunction # processMatch ( Map , PatternProcessFunction . Context , Collector ) } * @ param < OUT > type of output events * @ return Data stream containing fully matched event sequence with applied { @ link PatternProcessFunction } */ < OUT , K > SingleOutputStreamOperator < OUT > build ( final TypeInformation < OUT > outTypeInfo , final PatternProcessFunction < IN , OUT > processFunction ) { } }
checkNotNull ( outTypeInfo ) ; checkNotNull ( processFunction ) ; final TypeSerializer < IN > inputSerializer = inputStream . getType ( ) . createSerializer ( inputStream . getExecutionConfig ( ) ) ; final boolean isProcessingTime = inputStream . getExecutionEnvironment ( ) . getStreamTimeCharacteristic ( ) == TimeCharacteristic . ProcessingTime ; final boolean timeoutHandling = processFunction instanceof TimedOutPartialMatchHandler ; final NFACompiler . NFAFactory < IN > nfaFactory = NFACompiler . compileFactory ( pattern , timeoutHandling ) ; final CepOperator < IN , K , OUT > operator = new CepOperator < > ( inputSerializer , isProcessingTime , nfaFactory , comparator , pattern . getAfterMatchSkipStrategy ( ) , processFunction , lateDataOutputTag ) ; final SingleOutputStreamOperator < OUT > patternStream ; if ( inputStream instanceof KeyedStream ) { KeyedStream < IN , K > keyedStream = ( KeyedStream < IN , K > ) inputStream ; patternStream = keyedStream . transform ( "CepOperator" , outTypeInfo , operator ) ; } else { KeySelector < IN , Byte > keySelector = new NullByteKeySelector < > ( ) ; patternStream = inputStream . keyBy ( keySelector ) . transform ( "GlobalCepOperator" , outTypeInfo , operator ) . forceNonParallel ( ) ; } return patternStream ;
public class FilteredJobLifecycleListener { /** * { @ inheritDoc } */ @ Override public void onStatusChange ( JobExecutionState state , RunningState previousStatus , RunningState newStatus ) { } }
if ( this . filter . apply ( state . getJobSpec ( ) ) ) { this . delegate . onStatusChange ( state , previousStatus , newStatus ) ; }
public class VirtualHubsInner { /** * Updates VirtualHub tags . * @ param resourceGroupName The resource group name of the VirtualHub . * @ param virtualHubName The name of the VirtualHub . * @ param tags Resource tags . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < VirtualHubInner > updateTagsAsync ( String resourceGroupName , String virtualHubName , Map < String , String > tags , final ServiceCallback < VirtualHubInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateTagsWithServiceResponseAsync ( resourceGroupName , virtualHubName , tags ) , serviceCallback ) ;
public class Asn1Utils { /** * Encode an ASN . 1 BIT STRING . * @ param value * the value to be encoded * @ param nbits * the number of bits in the bit string * @ param buf * the buffer with space to the left of current position where the value will be encoded * @ return the length of the encoded data */ public static int encodeBitString ( BitSet value , int nbits , ByteBuffer buf ) { } }
if ( value == null || nbits < value . length ( ) ) { throw new IllegalArgumentException ( ) ; } int pos = buf . position ( ) ; int contentLength = ( int ) Math . ceil ( nbits / 8.0d ) ; for ( int i = contentLength ; i > 0 ; i -- ) { byte octet = 0 ; for ( int j = ( i - 1 ) * 8 ; j < i * 8 ; j ++ ) { if ( value . get ( j ) ) { octet |= BIT_STRING_MASK [ j % 8 ] ; } } pos -- ; buf . put ( pos , octet ) ; } // Write out padding byte ( primitive encoding ) pos -- ; buf . put ( pos , ( byte ) 0 ) ; contentLength ++ ; buf . position ( buf . position ( ) - contentLength ) ; int headerLength = DerUtils . encodeIdAndLength ( DerId . TagClass . UNIVERSAL , DerId . EncodingType . PRIMITIVE , ASN1_BIT_STRING_TAG_NUM , contentLength , buf ) ; return headerLength + contentLength ;
public class TopicTypesInner { /** * List event types . * List event types for a topic type . * @ param topicTypeName Name of the topic type * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < EventTypeInner > > listEventTypesAsync ( String topicTypeName , final ServiceCallback < List < EventTypeInner > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listEventTypesWithServiceResponseAsync ( topicTypeName ) , serviceCallback ) ;
public class DefaultComponentRegistry { /** * Adds a component to the registry . * @ param componentType * @ param component */ protected < T extends IComponent > void addComponent ( Class < T > componentType , T component ) { } }
components . put ( componentType , component ) ;
public class HashCodeBuilder { /** * Uses reflection to build a valid hash code from the fields of { @ code object } . * This constructor uses two hard coded choices for the constants needed to build a hash code . * It uses < code > AccessibleObject . setAccessible < / code > to gain access to private fields . This means that it will * throw a security exception if run under a security manager , if the permissions are not set up correctly . It is * also not as efficient as testing explicitly . * Transient members will be not be used , as they are likely derived fields , and not part of the value of the * < code > Object < / code > . * Static fields will not be tested . Superclass fields will be included . If no fields are found to include * in the hash code , the result of this method will be constant . * @ param object * the Object to create a < code > hashCode < / code > for * @ param excludeFields * array of field names to exclude from use in calculation of hash code * @ return int hash code * @ throws IllegalArgumentException * if the object is < code > null < / code > */ public static int reflectionHashCode ( final Object object , final String ... excludeFields ) { } }
return reflectionHashCode ( DEFAULT_INITIAL_VALUE , DEFAULT_MULTIPLIER_VALUE , object , false , null , excludeFields ) ;
public class LinearSolverChol_ZDRM { /** * Used internally to find the solution to a single column vector . */ private void solveInternalL ( ) { } }
// This takes advantage of the diagonal elements always being real numbers // solve L * y = b storing y in x TriangularSolver_ZDRM . solveL_diagReal ( t , vv , n ) ; // solve L ^ T * x = y TriangularSolver_ZDRM . solveConjTranL_diagReal ( t , vv , n ) ;
public class BaseMatchMethodPermutationBuilder { /** * Returns true if the given { @ link ClassName } is a decomposable match builder ; false otherwise . */ protected boolean isDecomposableBuilder ( ClassName t ) { } }
return t . equals ( ClassName . get ( DecomposableMatchBuilder0 . class ) ) || t . equals ( ClassName . get ( DecomposableMatchBuilder1 . class ) ) || t . equals ( ClassName . get ( DecomposableMatchBuilder2 . class ) ) || t . equals ( ClassName . get ( DecomposableMatchBuilder3 . class ) ) ;
public class DroolsFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertPackageNameTypeToString ( EDataType eDataType , Object instanceValue ) { } }
return XMLTypeFactory . eINSTANCE . convertToString ( XMLTypePackage . Literals . STRING , instanceValue ) ;
public class ApiTokenClient { /** * Retrieves a user scenario validation . * @ param id its id * @ return { @ link com . loadimpact . resource . UserScenarioValidation } */ public UserScenarioValidation getUserScenarioValidation ( int id ) { } }
return invoke ( USER_SCENARIO_VALIDATIONS , id , new RequestClosure < JsonObject > ( ) { @ Override public JsonObject call ( Invocation . Builder request ) { return request . get ( JsonObject . class ) ; } } , new ResponseClosure < JsonObject , UserScenarioValidation > ( ) { @ Override public UserScenarioValidation call ( JsonObject json ) { return new UserScenarioValidation ( json ) ; } } ) ;
public class ActionsValidator { /** * Validate if an Event should generate an Action based on the constraints defined on a TriggerAction . * @ param triggerAction a TriggerAction where status and time constraints are defined . * @ param event a given Event to validate against a TriggerAction * @ return true if the Event is validated and it should generated an action * false on the contrary */ public static boolean validate ( TriggerAction triggerAction , Event event ) { } }
if ( triggerAction == null || event == null ) { return true ; } if ( ( isEmpty ( triggerAction . getStates ( ) ) ) && triggerAction . getCalendar ( ) == null ) { return true ; } if ( event instanceof Alert && triggerAction . getStates ( ) != null && ! triggerAction . getStates ( ) . isEmpty ( ) && ! triggerAction . getStates ( ) . contains ( ( ( Alert ) event ) . getStatus ( ) . name ( ) ) ) { return false ; } if ( triggerAction . getCalendar ( ) != null ) { try { return triggerAction . getCalendar ( ) . isSatisfiedBy ( event . getCtime ( ) ) ; } catch ( Exception e ) { log . debug ( e . getMessage ( ) , e ) ; log . errorCannotValidateAction ( e . getMessage ( ) ) ; } } return true ;
public class GetCloudFormationTemplateRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetCloudFormationTemplateRequest getCloudFormationTemplateRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getCloudFormationTemplateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCloudFormationTemplateRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( getCloudFormationTemplateRequest . getTemplateId ( ) , TEMPLATEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GISCoordinates { /** * This function convert France Lambert IV coordinate to * extended France Lambert II coordinate . * @ param x is the coordinate in France Lambert IV * @ param y is the coordinate in France Lambert IV * @ return the extended France Lambert II coordinate . */ @ Pure public static Point2d L4_EL2 ( double x , double y ) { } }
final Point2d ntfLambdaPhi = NTFLambert_NTFLambdaPhi ( x , y , LAMBERT_4_N , LAMBERT_4_C , LAMBERT_4_XS , LAMBERT_4_YS ) ; return NTFLambdaPhi_NTFLambert ( ntfLambdaPhi . getX ( ) , ntfLambdaPhi . getY ( ) , LAMBERT_2E_N , LAMBERT_2E_C , LAMBERT_2E_XS , LAMBERT_2E_YS ) ;
public class J4pRequestHandler { /** * Create the request URI to use */ private URI createRequestURI ( String path , String queryParams ) throws URISyntaxException { } }
return new URI ( j4pServerUrl . getScheme ( ) , j4pServerUrl . getUserInfo ( ) , j4pServerUrl . getHost ( ) , j4pServerUrl . getPort ( ) , path , queryParams , null ) ;
public class EvaluatorWrapper { /** * This method is called when operators are rewritten as function calls . For instance , * x after y * Is rewritten as * after . evaluate ( _ workingMemory _ , x , y ) * @ return */ public boolean evaluate ( InternalWorkingMemory workingMemory , Object left , Object right ) { } }
Object leftValue = leftTimestamp != null ? leftTimestamp : left ; Object rightValue = rightTimestamp != null ? rightTimestamp : right ; return rightLiteral ? evaluator . evaluate ( workingMemory , new ConstantValueReader ( leftValue ) , dummyFactHandleOf ( leftValue ) , new ObjectFieldImpl ( rightValue ) ) : evaluator . evaluate ( workingMemory , new ConstantValueReader ( leftValue ) , dummyFactHandleOf ( leftValue ) , new ConstantValueReader ( rightValue ) , dummyFactHandleOf ( rightValue ) ) ;
public class HadoopInputFormatBase { @ Override public void configure ( Configuration parameters ) { } }
// enforce sequential configuration ( ) calls synchronized ( CONFIGURE_MUTEX ) { if ( mapreduceInputFormat instanceof Configurable ) { ( ( Configurable ) mapreduceInputFormat ) . setConf ( configuration ) ; } }
public class FileUtil { /** * pass in a directory path , get the list of statuses of leaf directories * @ param fs file system * @ param path path to scan * @ param acc the collection of file status of leaf directories * @ throws IOException if any error occurs */ public static void listStatusForLeafDir ( FileSystem fs , FileStatus pathStatus , List < FileStatus > acc ) throws IOException { } }
if ( ! pathStatus . isDir ( ) ) return ; FileStatus [ ] fileStatusResults = fs . listStatus ( pathStatus . getPath ( ) ) ; if ( fileStatusResults == null ) { throw new IOException ( "Path does not exist: " + pathStatus . getPath ( ) ) ; } boolean leafDir = true ; for ( FileStatus f : fileStatusResults ) { if ( f . isDir ( ) ) { leafDir = false ; listStatusForLeafDir ( fs , f , acc ) ; } } if ( leafDir ) { acc . add ( pathStatus ) ; // Accumulate leaf dir }
public class Serialized { /** * Returns the deserialized objects from the given { @ link File } as an * { @ link Observable } stream . A buffer size of 8192 bytes is used by * default . * @ param file * the input file containing serialized java objects * @ param < T > * the generic type of the deserialized objects returned in the * stream * @ return the stream of deserialized objects from the { @ link InputStream } * as an { @ link Observable } . */ public static < T extends Serializable > Observable < T > read ( final File file ) { } }
return read ( file , DEFAULT_BUFFER_SIZE ) ;
public class XMLPropertiesField { /** * Convert these java properties to a string . * @ param properties The java properties . * @ return The properties string . */ public static String propertiesToXML ( Map < String , Object > map ) { } }
String strProperties = null ; ByteArrayOutputStream baOut = new ByteArrayOutputStream ( ) ; try { Properties properties = new Properties ( ) ; properties . putAll ( map ) ; properties . storeToXML ( baOut , PROPERTIES_COMMENT ) ; byte [ ] rgBytes = baOut . toByteArray ( ) ; ByteArrayInputStream baIn = new ByteArrayInputStream ( rgBytes ) ; InputStreamReader isIn = new InputStreamReader ( baIn ) ; // byte - > char char [ ] cbuf = new char [ rgBytes . length ] ; isIn . read ( cbuf , 0 , rgBytes . length ) ; if ( cbuf . length == rgBytes . length ) strProperties = new String ( cbuf ) ; } catch ( IOException ex ) { ex . printStackTrace ( ) ; } if ( strProperties != null ) { int iStart = strProperties . indexOf ( "<properties" ) ; if ( iStart != - 1 ) strProperties = strProperties . substring ( iStart ) ; } return strProperties ;
public class ParquetInputFormat { /** * Configures the fields to be read and returned by the ParquetInputFormat . Selected fields must be present * in the configured schema . * @ param fieldNames Names of all selected fields . */ public void selectFields ( String [ ] fieldNames ) { } }
checkNotNull ( fieldNames , "fieldNames" ) ; this . fieldNames = fieldNames ; RowTypeInfo rowTypeInfo = ( RowTypeInfo ) ParquetSchemaConverter . fromParquetType ( expectedFileSchema ) ; TypeInformation [ ] selectFieldTypes = new TypeInformation [ fieldNames . length ] ; for ( int i = 0 ; i < fieldNames . length ; i ++ ) { try { selectFieldTypes [ i ] = rowTypeInfo . getTypeAt ( fieldNames [ i ] ) ; } catch ( IndexOutOfBoundsException e ) { throw new IllegalArgumentException ( String . format ( "Fail to access Field %s , " + "which is not contained in the file schema" , fieldNames [ i ] ) , e ) ; } } this . fieldTypes = selectFieldTypes ;
public class Reflection { /** * Get a Field intance for a given class and property . Iterate over super classes of a class when a < @ link * NoSuchFieldException > occurs until no more super classes are found then re - throw the < @ link NoSuchFieldException > . * @ param targetClass * @ param propertyName * @ return Field * @ throws NoSuchFieldException */ protected Field getField ( Class < ? > targetClass , String propertyName ) throws NoSuchFieldException { } }
Field field = null ; try { field = targetClass . getDeclaredField ( Inflector . getInstance ( ) . lowerCamelCase ( propertyName ) ) ; } catch ( NoSuchFieldException e ) { Class < ? > clazz = targetClass . getSuperclass ( ) ; if ( clazz != null ) { field = getField ( clazz , propertyName ) ; } else { throw e ; } } return field ;
public class FileJournalManager { /** * Find the maximum transaction in the journal . * This gets stored in a member variable , as corrupt edit logs * will be moved aside , but we still need to remember their first * tranaction id in the case that it was the maximum transaction in * the journal . */ private long findMaxTransaction ( ) throws IOException { } }
for ( EditLogFile elf : getLogFiles ( 0 ) ) { if ( elf . isInProgress ( ) ) { maxSeenTransaction = Math . max ( elf . getFirstTxId ( ) , maxSeenTransaction ) ; } maxSeenTransaction = Math . max ( elf . getLastTxId ( ) , maxSeenTransaction ) ; } return maxSeenTransaction ;
public class Packer { /** * Specify the insets for the component . * @ param insets * the insets to apply */ public Packer inset ( final Insets insets ) { } }
gc . insets = insets ; setConstraints ( comp , gc ) ; return this ;
public class DatastoreImpl { /** * @ SuppressWarnings ( " unchecked " ) * private < T > Key < T > save ( final MongoCollection collection , final T entity , final InsertOneOptions options ) { * final MappedClass mc = validateSave ( entity ) ; * involvedObjects is used not only as a cache but also as a list of what needs to be called for life - cycle methods at the end . * final LinkedHashMap < Object , DBObject > involvedObjects = new LinkedHashMap < Object , DBObject > ( ) ; * final Document document = new Document ( entityToDBObj ( entity , involvedObjects ) . toMap ( ) ) ; * try to do an update if there is a @ Version field * final Object idValue = document . get ( Mapper . ID _ KEY ) ; * UpdateResult wr = tryVersionedUpdate ( collection , entity , document , idValue , options , mc ) ; * if ( wr = = null ) { * if ( document . get ( ID _ FIELD _ NAME ) = = null ) { * collection . insertOne ( singletonList ( document ) , options ) ; * } else { * collection . updateOne ( new Document ( ID _ FIELD _ NAME , document . get ( ID _ FIELD _ NAME ) ) , document , * new com . mongodb . client . model . UpdateOptions ( ) * . bypassDocumentValidation ( options . getBypassDocumentValidation ( ) ) * . upsert ( true ) ) ; * return postSaveOperations ( singletonList ( entity ) , involvedObjects , collection . getNamespace ( ) . getCollectionName ( ) ) . get ( 0 ) ; */ private < T > MappedClass validateSave ( final T entity ) { } }
if ( entity == null ) { throw new UpdateException ( "Can not persist a null entity" ) ; } final MappedClass mc = mapper . getMappedClass ( entity ) ; if ( mc . getAnnotation ( NotSaved . class ) != null ) { throw new MappingException ( format ( "Entity type: %s is marked as NotSaved which means you should not try to save it!" , mc . getClazz ( ) . getName ( ) ) ) ; } return mc ;
public class RequestSecurityFilter { /** * If { @ code securityEnabled } , passes the request through the chain of { @ link RequestSecurityProcessor } s , * depending if the request URL * matches or not the { @ code urlsToInclude } or the { @ code urlsToExclude } . The last processor of the chain calls * the actual filter * chain . * @ param request * @ param response * @ param chain * @ throws IOException * @ throws ServletException */ public void doFilter ( ServletRequest request , ServletResponse response , FilterChain chain ) throws IOException , ServletException { } }
HttpServletRequest httpRequest = ( HttpServletRequest ) request ; if ( securityEnabled && ( includeRequest ( httpRequest ) || ! excludeRequest ( httpRequest ) ) ) { doFilterInternal ( ( HttpServletRequest ) request , ( HttpServletResponse ) response , chain ) ; } else { chain . doFilter ( request , response ) ; }
public class SSLTransportParameters { /** * Set the keystore , password , certificate type and the store type * @ param keyStore Location of the Keystore on disk * @ param keyPass Keystore password * @ param keyManagerType The default is X509 * @ param keyStoreType The default is JKS */ public void setKeyStore ( String keyStore , String keyPass , String keyManagerType , String keyStoreType ) { } }
if ( ( keyStore == null ) || ( keyPass == null ) ) { this . keyStore = System . getProperty ( "javax.net.ssl.keyStore" ) ; this . keyPass = System . getProperty ( "javax.net.ssl.keyStorePassword" ) ; } else { this . keyStore = keyStore ; this . keyPass = keyPass ; } if ( keyManagerType != null ) { this . keyManagerType = keyManagerType ; } if ( keyStoreType != null ) { this . keyStoreType = keyStoreType ; } isKeyStoreSet = ( keyStore != null ) && ( keyPass != null ) ;
public class MAPServiceLsmImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . api . MAPServiceBase # isServingService ( org * . mobicents . protocols . ss7 . map . api . MAPApplicationContext ) */ public ServingCheckData isServingService ( MAPApplicationContext dialogApplicationContext ) { } }
int vers = dialogApplicationContext . getApplicationContextVersion ( ) . getVersion ( ) ; switch ( dialogApplicationContext . getApplicationContextName ( ) ) { case locationSvcEnquiryContext : case locationSvcGatewayContext : if ( vers == 3 ) { return new ServingCheckDataImpl ( ServingCheckResult . AC_Serving ) ; } else if ( vers > 3 ) { long [ ] altOid = dialogApplicationContext . getOID ( ) ; altOid [ 7 ] = 3 ; ApplicationContextName alt = TcapFactory . createApplicationContextName ( altOid ) ; return new ServingCheckDataImpl ( ServingCheckResult . AC_VersionIncorrect , alt ) ; } else { return new ServingCheckDataImpl ( ServingCheckResult . AC_VersionIncorrect ) ; } } return new ServingCheckDataImpl ( ServingCheckResult . AC_NotServing ) ;
public class SqlDateTimeUtils { /** * NOTE : * ( 1 ) . JDK relies on the operating system clock for time . * Each operating system has its own method of handling date changes such as * leap seconds ( e . g . OS will slow down the clock to accommodate for this ) . * ( 2 ) . DST ( Daylight Saving Time ) is a legal issue , governments changed it * over time . Some days are NOT exactly 24 hours long , it could be 23/25 hours * long on the first or last day of daylight saving time . * JDK can handle DST correctly . * TODO : * carefully written algorithm can improve the performance */ public static int dateDiff ( long t1 , long t2 , TimeZone tz ) { } }
ZoneId zoneId = tz . toZoneId ( ) ; LocalDate ld1 = Instant . ofEpochMilli ( t1 ) . atZone ( zoneId ) . toLocalDate ( ) ; LocalDate ld2 = Instant . ofEpochMilli ( t2 ) . atZone ( zoneId ) . toLocalDate ( ) ; return ( int ) ChronoUnit . DAYS . between ( ld2 , ld1 ) ;
public class FileUtilsV2_2 { /** * Copies a file to a directory preserving the file date . * This method copies the contents of the specified source file * to a file of the same name in the specified destination directory . * The destination directory is created if it does not exist . * If the destination file exists , then this method will overwrite it . * < strong > Note : < / strong > This method tries to preserve the file ' s last * modified date / times using { @ link File # setLastModified ( long ) } , however * it is not guaranteed that the operation will succeed . * If the modification operation fails , no indication is provided . * @ param srcFile an existing file to copy , must not be < code > null < / code > * @ param destDir the directory to place the copy in , must not be < code > null < / code > * @ throws NullPointerException if source or destination is null * @ throws IOException if source or destination is invalid * @ throws IOException if an IO error occurs during copying * @ see # copyFile ( File , File , boolean ) */ public static void copyFileToDirectory ( File srcFile , File destDir ) throws IOException { } }
copyFileToDirectory ( srcFile , destDir , true ) ;
public class ElmBaseVisitor { /** * Visit a TupleTypeSpecifier . This method will be called for * every node in the tree that is a TupleTypeSpecifier . * @ param elm the ELM tree * @ param context the context passed to the visitor * @ return the visitor result */ public T visitTupleTypeSpecifier ( TupleTypeSpecifier elm , C context ) { } }
for ( TupleElementDefinition element : elm . getElement ( ) ) { visitElement ( element , context ) ; } return null ;
public class GeoPtConverter { /** * Converts the specified value to * { @ code com . google . appengine . api . datastore . GeoPt } . * @ see org . apache . commons . beanutils . converters . AbstractConverter # convertToType ( java . lang . Class , java . lang . Object ) */ @ SuppressWarnings ( "rawtypes" ) @ Override protected Object convertToType ( Class type , Object value ) throws Throwable { } }
String [ ] strings = value . toString ( ) . split ( "," ) ; if ( strings . length != 2 ) { throw new ConversionException ( "GeoPt 'value' must be able to be splitted into 2 float values " + "by ',' (latitude,longitude)" ) ; } try { float latitude = new BigDecimal ( strings [ 0 ] . trim ( ) ) . floatValue ( ) ; float longitude = new BigDecimal ( strings [ 1 ] . trim ( ) ) . floatValue ( ) ; return new GeoPt ( latitude , longitude ) ; } catch ( Exception e ) { throw new ConversionException ( "Cannot parse GeoPt value into 2 float values: " + "latitude [" + strings [ 0 ] . trim ( ) + "], longitude [" + strings [ 1 ] . trim ( ) + "]" ) ; }
public class DaoMetadata { /** * Populate the project metadata table . * @ param name the project name * @ param description an optional description . * @ param notes optional notes . * @ param creationUser the user creating the project . * @ throws java . io . IOException if something goes wrong . */ public static void fillProjectMetadata ( Connection connection , String name , String description , String notes , String creationUser ) throws Exception { } }
Date creationDate = new Date ( ) ; if ( name == null ) { name = "project-" + ETimeUtilities . INSTANCE . TIME_FORMATTER_LOCAL . format ( creationDate ) ; } if ( description == null ) { description = EMPTY_VALUE ; } if ( notes == null ) { notes = EMPTY_VALUE ; } if ( creationUser == null ) { creationUser = "dummy user" ; } insertPair ( connection , MetadataTableFields . KEY_NAME . getFieldName ( ) , name ) ; insertPair ( connection , MetadataTableFields . KEY_DESCRIPTION . getFieldName ( ) , description ) ; insertPair ( connection , MetadataTableFields . KEY_NOTES . getFieldName ( ) , notes ) ; insertPair ( connection , MetadataTableFields . KEY_CREATIONTS . getFieldName ( ) , String . valueOf ( creationDate . getTime ( ) ) ) ; insertPair ( connection , MetadataTableFields . KEY_LASTTS . getFieldName ( ) , EMPTY_VALUE ) ; insertPair ( connection , MetadataTableFields . KEY_CREATIONUSER . getFieldName ( ) , creationUser ) ; insertPair ( connection , MetadataTableFields . KEY_LASTUSER . getFieldName ( ) , EMPTY_VALUE ) ;
public class EditsVisitor { /** * Convenience shortcut method to parse a specific token type */ public VLongToken visitVLong ( EditsElement e ) throws IOException { } }
return ( VLongToken ) visit ( tokenizer . read ( new VLongToken ( e ) ) ) ;
public class Client { /** * Get a batch of users assigned to privilege . * This is usually the first version of the users assigned to privilege batching methods to call as it requires no after - cursor information . * @ param id Id of the privilege * @ param batchSize Size of the Batch * @ return OneLoginResponse of Long ( Batch ) * @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection * @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONResourceResponse and throwOAuthProblemException is enabled * @ throws URISyntaxException - if there is an error when generating the target URL at the getResource call * @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / privileges / get - users " > Get Assigned Users documentation < / a > */ public OneLoginResponse < Long > getUsersAssignedToPrivilegesBatch ( String id , int batchSize ) throws OAuthSystemException , OAuthProblemException , URISyntaxException { } }
return getUsersAssignedToPrivilegesBatch ( id , batchSize , null ) ;
public class ModelDiff { /** * Returns true if one of the differences of this ModelDiff instance is an OpenEngSBForeignKey . Returns false * otherwise . */ public boolean isForeignKeyChanged ( ) { } }
return CollectionUtils . exists ( differences . values ( ) , new Predicate ( ) { @ Override public boolean evaluate ( Object object ) { return ( ( ModelDiffEntry ) object ) . isForeignKey ( ) ; } } ) ;
public class CmsSchedulerThreadPool { /** * Grows the thread pool by one new thread if the maximum pool size * has not been reached . < p > */ private void growThreadPool ( ) { } }
if ( m_currentThreadCount < m_maxThreadCount ) { // if maximum number is not reached grow the thread pool synchronized ( m_nextRunnableLock ) { m_workers [ m_currentThreadCount ] = new CmsSchedulerThread ( this , m_threadGroup , m_threadNamePrefix + m_currentThreadCount , m_threadPriority , m_makeThreadsDaemons ) ; m_workers [ m_currentThreadCount ] . start ( ) ; if ( m_inheritLoader ) { m_workers [ m_currentThreadCount ] . setContextClassLoader ( Thread . currentThread ( ) . getContextClassLoader ( ) ) ; } // increas the current size m_currentThreadCount ++ ; // notify the waiting threads m_nextRunnableLock . notifyAll ( ) ; } }
public class FnBigInteger { /** * It returns the { @ link String } representation of the target as a currency in the * default { @ link Locale } * @ return the { @ link String } representation of the input as a currency */ public static final Function < BigInteger , String > toCurrencyStr ( ) { } }
return ( Function < BigInteger , String > ) ( ( Function ) FnNumber . toCurrencyStr ( ) ) ;
public class ColorYuv { /** * Convert a 3 - channel { @ link Planar } image from YUV into RGB . If integer then YCbCr and not YUV . * @ param rgb ( Input ) RGB encoded image * @ param yuv ( Output ) YUV encoded image */ public static < T extends ImageGray < T > > void yuvToRgb ( Planar < T > yuv , Planar < T > rgb ) { } }
rgb . reshape ( rgb . width , rgb . height , 3 ) ; if ( rgb . getBandType ( ) == GrayF32 . class ) { if ( BoofConcurrency . USE_CONCURRENT ) { ImplColorYuv_MT . yuvToRgb_F32 ( ( Planar < GrayF32 > ) yuv , ( Planar < GrayF32 > ) rgb ) ; } else { ImplColorYuv . yuvToRgb_F32 ( ( Planar < GrayF32 > ) yuv , ( Planar < GrayF32 > ) rgb ) ; } } else if ( rgb . getBandType ( ) == GrayU8 . class ) { if ( BoofConcurrency . USE_CONCURRENT ) { ImplColorYuv_MT . ycbcrToRgb_U8 ( ( Planar < GrayU8 > ) yuv , ( Planar < GrayU8 > ) rgb ) ; } else { ImplColorYuv . ycbcrToRgb_U8 ( ( Planar < GrayU8 > ) yuv , ( Planar < GrayU8 > ) rgb ) ; } } else { throw new IllegalArgumentException ( "Unsupported band type " + rgb . getBandType ( ) . getSimpleName ( ) ) ; }
public class XmlRpcClientExecutorFactory { /** * < p > newExecutor . < / p > * @ param url a { @ link java . lang . String } object . * @ return a { @ link com . greenpepper . server . rpc . xmlrpc . client . XmlRpcClientExecutor } object . * @ throws com . greenpepper . server . rpc . xmlrpc . client . XmlRpcClientExecutorException if any . */ public static XmlRpcClientExecutor newExecutor ( String url ) throws XmlRpcClientExecutorException { } }
try { LOGGER . debug ( "Instanciating new executor for url {} " , url ) ; return new XmlRpcV2ClientImpl ( url ) ; } catch ( Exception ex ) { throw new XmlRpcClientExecutorException ( GreenPepperServerErrorKey . GENERAL_ERROR , ex ) ; }
public class SortModel { /** * Check to see if the SortModel contains a { @ link Sort } whose sort expression matches the given * < code > sortExpression < / code > . * @ param sortExpression the sortExpression used to locate a { @ link Sort } * @ return < code > true < / code > if a { @ link Sort } is found whose { @ link Sort # getSortExpression ( ) } matches * the given < code > sortExpression < / code > . < code > false < / code > otherwise . */ public boolean isSorted ( String sortExpression ) { } }
if ( sortExpression == null ) return false ; Sort term = findSort ( sortExpression ) ; if ( term == null || term . getDirection ( ) == SortDirection . NONE ) return false ; else return true ;
public class NumberUtils { /** * Formats a number as per the given values . * @ param target The number to format . * @ param minIntegerDigits Minimum number digits to return ( 0 padding ) . * @ param thousandsPointType Character to use for separating number groups . * @ param fractionDigits Minimum number of fraction digits to format to * ( 0 padding ) . * @ param decimalPointType Character to use for separating decimals . * @ param locale Locale to draw more information from . * @ return The number formatted as specified , or { @ code null } if the number * given is { @ code null } . */ private static String formatNumber ( final Number target , final Integer minIntegerDigits , final NumberPointType thousandsPointType , final Integer fractionDigits , final NumberPointType decimalPointType , final Locale locale ) { } }
Validate . notNull ( fractionDigits , "Fraction digits cannot be null" ) ; Validate . notNull ( decimalPointType , "Decimal point type cannot be null" ) ; Validate . notNull ( thousandsPointType , "Thousands point type cannot be null" ) ; Validate . notNull ( locale , "Locale cannot be null" ) ; if ( target == null ) { return null ; } DecimalFormat format = ( DecimalFormat ) NumberFormat . getNumberInstance ( locale ) ; format . setMinimumFractionDigits ( fractionDigits . intValue ( ) ) ; format . setMaximumFractionDigits ( fractionDigits . intValue ( ) ) ; if ( minIntegerDigits != null ) { format . setMinimumIntegerDigits ( minIntegerDigits . intValue ( ) ) ; } format . setDecimalSeparatorAlwaysShown ( decimalPointType != NumberPointType . NONE && fractionDigits . intValue ( ) > 0 ) ; format . setGroupingUsed ( thousandsPointType != NumberPointType . NONE ) ; format . setDecimalFormatSymbols ( computeDecimalFormatSymbols ( decimalPointType , thousandsPointType , locale ) ) ; return format . format ( target ) ;
public class JSONDocApiMethodDocValidator { /** * This checks that some of the properties are correctly set to produce a meaningful documentation and a working playground . In case this does not happen * an error string is added to the jsondocerrors list in ApiMethodDoc . * It also checks that some properties are be set to produce a meaningful documentation . In case this does not happen * an error string is added to the jsondocwarnings list in ApiMethodDoc . * @ param apiMethodDoc * @ return */ public static ApiMethodDoc validateApiMethodDoc ( ApiMethodDoc apiMethodDoc , MethodDisplay displayMethodAs ) { } }
final String ERROR_MISSING_METHOD_PATH = "Missing documentation data: path" ; final String ERROR_MISSING_PATH_PARAM_NAME = "Missing documentation data: path parameter name" ; final String ERROR_MISSING_QUERY_PARAM_NAME = "Missing documentation data: query parameter name" ; final String ERROR_MISSING_HEADER_NAME = "Missing documentation data: header name" ; final String WARN_MISSING_METHOD_PRODUCES = "Missing documentation data: produces" ; final String WARN_MISSING_METHOD_CONSUMES = "Missing documentation data: consumes" ; final String HINT_MISSING_PATH_PARAM_DESCRIPTION = "Add description to ApiPathParam" ; final String HINT_MISSING_QUERY_PARAM_DESCRIPTION = "Add description to ApiQueryParam" ; final String HINT_MISSING_METHOD_DESCRIPTION = "Add description to ApiMethod" ; final String HINT_MISSING_METHOD_RESPONSE_OBJECT = "Add annotation ApiResponseObject to document the returned object" ; final String HINT_MISSING_METHOD_SUMMARY = "Method display set to SUMMARY, but summary info has not been specified" ; final String MESSAGE_MISSING_METHOD_SUMMARY = "Missing documentation data: summary" ; if ( apiMethodDoc . getPath ( ) . isEmpty ( ) ) { apiMethodDoc . setPath ( Sets . newHashSet ( ERROR_MISSING_METHOD_PATH ) ) ; apiMethodDoc . addJsondocerror ( ERROR_MISSING_METHOD_PATH ) ; } if ( apiMethodDoc . getSummary ( ) . trim ( ) . isEmpty ( ) && displayMethodAs . equals ( MethodDisplay . SUMMARY ) ) { apiMethodDoc . setSummary ( MESSAGE_MISSING_METHOD_SUMMARY ) ; apiMethodDoc . addJsondochint ( HINT_MISSING_METHOD_SUMMARY ) ; } for ( ApiParamDoc apiParamDoc : apiMethodDoc . getPathparameters ( ) ) { if ( apiParamDoc . getName ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondocerror ( ERROR_MISSING_PATH_PARAM_NAME ) ; } if ( apiParamDoc . getDescription ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondochint ( HINT_MISSING_PATH_PARAM_DESCRIPTION ) ; } } for ( ApiParamDoc apiParamDoc : apiMethodDoc . getQueryparameters ( ) ) { if ( apiParamDoc . getName ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondocerror ( ERROR_MISSING_QUERY_PARAM_NAME ) ; } if ( apiParamDoc . getDescription ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondochint ( HINT_MISSING_QUERY_PARAM_DESCRIPTION ) ; } } for ( ApiHeaderDoc apiHeaderDoc : apiMethodDoc . getHeaders ( ) ) { if ( apiHeaderDoc . getName ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondocerror ( ERROR_MISSING_HEADER_NAME ) ; } } if ( apiMethodDoc . getProduces ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondocwarning ( WARN_MISSING_METHOD_PRODUCES ) ; } if ( ( apiMethodDoc . getVerb ( ) . contains ( ApiVerb . POST ) || apiMethodDoc . getVerb ( ) . contains ( ApiVerb . PUT ) ) && apiMethodDoc . getConsumes ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondocwarning ( WARN_MISSING_METHOD_CONSUMES ) ; } if ( apiMethodDoc . getDescription ( ) . trim ( ) . isEmpty ( ) ) { apiMethodDoc . addJsondochint ( HINT_MISSING_METHOD_DESCRIPTION ) ; } if ( apiMethodDoc . getResponse ( ) == null ) { apiMethodDoc . addJsondochint ( HINT_MISSING_METHOD_RESPONSE_OBJECT ) ; } return apiMethodDoc ;