signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class YokeSecurity { /** * Signs a String value with a given MAC */ public static String sign ( @ NotNull String val , @ NotNull Mac mac ) { } }
return val + "." + Base64 . getEncoder ( ) . encodeToString ( val . getBytes ( ) ) ;
public class AuditEvent { /** * Set the observer keys / values . The provided Map will completely replace * the existing observer , i . e . all current observer keys / values will be removed * and the new observer keys / values will be added . * @ param observer - Map of all the observer keys / values */ public void setObserver ( Map < String , Object > observer ) { } }
removeEntriesStartingWith ( OBSERVER ) ; eventMap . putAll ( observer ) ;
public class AmazonConfigClient { /** * Returns status information for each of your AWS managed Config rules . The status includes information such as the * last time AWS Config invoked the rule , the last time AWS Config failed to invoke the rule , and the related error * for the last failure . * @ param describeConfigRuleEvaluationStatusRequest * @ return Result of the DescribeConfigRuleEvaluationStatus operation returned by the service . * @ throws NoSuchConfigRuleException * One or more AWS Config rules in the request are invalid . Verify that the rule names are correct and try * again . * @ throws InvalidParameterValueException * One or more of the specified parameters are invalid . Verify that your parameters are valid and try again . * @ throws InvalidNextTokenException * The specified next token is invalid . Specify the < code > nextToken < / code > string that was returned in the * previous response to get the next page of results . * @ sample AmazonConfig . DescribeConfigRuleEvaluationStatus * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / config - 2014-11-12 / DescribeConfigRuleEvaluationStatus " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeConfigRuleEvaluationStatusResult describeConfigRuleEvaluationStatus ( DescribeConfigRuleEvaluationStatusRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeConfigRuleEvaluationStatus ( request ) ;
public class Validators { /** * Creates and returns a validator , which allows to validate texts to ensure , that they only * contain numbers . Empty texts are also accepted . * @ param context * The context , which should be used to retrieve the error message , as an instance of * the class { @ link Context } . The context may not be null * @ return The validator , which has been created , as an instance of the type { @ link Validator } */ public static Validator < CharSequence > number ( @ NonNull final Context context ) { } }
return new NumberValidator ( context , R . string . default_error_message ) ;
public class EstimateSceneCalibrated { /** * Adds features which were triangulated using the stereo pair after the scale factor has been determined . * Don ' t mark the other view as being processed . It ' s 3D pose will be estimated later on using PNP with the * new features and features determined later on */ void addTriangulatedStereoFeatures ( View base , Motion edge , double scale ) { } }
View viewA = edge . viewSrc ; View viewB = edge . viewDst ; boolean baseIsA = base == viewA ; View other = baseIsA ? viewB : viewA ; // Determine transform from other to world edge . a_to_b . T . scale ( scale ) ; Se3_F64 otherToBase = baseIsA ? edge . a_to_b . invert ( null ) : edge . a_to_b . copy ( ) ; otherToBase . concat ( base . viewToWorld , other . viewToWorld ) ; // Convert already computed stereo 3D features and turn them into real features for ( int i = 0 ; i < edge . stereoTriangulations . size ( ) ; i ++ ) { Feature3D edge3D = edge . stereoTriangulations . get ( i ) ; int indexSrc = edge3D . obsIdx . get ( 0 ) ; int indexDst = edge3D . obsIdx . get ( 1 ) ; Feature3D world3D = baseIsA ? viewA . features3D [ indexSrc ] : viewB . features3D [ indexDst ] ; // find the 3D location of the point in world frame edge3D . worldPt . scale ( scale ) ; if ( baseIsA ) { viewA . viewToWorld . transform ( edge3D . worldPt , edge3D . worldPt ) ; } else { edge . a_to_b . transform ( edge3D . worldPt , edge3D . worldPt ) ; viewB . viewToWorld . transform ( edge3D . worldPt , edge3D . worldPt ) ; } // See if the feature is already known if ( world3D != null ) { // Add the other view if another feature in the other view was not already associated with this feature if ( ! world3D . views . contains ( other ) ) { world3D . views . add ( other ) ; world3D . obsIdx . add ( baseIsA ? indexDst : indexSrc ) ; } // Retriangulate the point if it appears that this stereo pair is better than the one which originally // computed it if ( world3D . triangulationAngle >= edge3D . triangulationAngle ) { continue ; } world3D . worldPt . set ( edge3D . worldPt ) ; world3D . triangulationAngle = edge3D . triangulationAngle ; other . features3D [ baseIsA ? indexDst : indexSrc ] = edge3D ; } else { graph . features3D . add ( edge3D ) ; viewA . features3D [ indexSrc ] = edge3D ; viewB . features3D [ indexDst ] = edge3D ; } } // free memory edge . stereoTriangulations = new ArrayList < > ( ) ;
public class JpaNamingStrategy { /** * CHECKSTYLE : OFF */ public String getRequestName ( JpaMethod jpaMethod , Method javaMethod , Object [ ] args , Query query ) { } }
// CHECKSTYLE : ON switch ( jpaMethod ) { case CREATE_QUERY : case CREATE_NAMED_QUERY : case CREATE_NATIVE_QUERY : case CREATE_STORED_PROCEDURE_QUERY : case CREATE_NAMED_STORED_PROCEDURE_QUERY : return getQueryRequestName ( javaMethod , args , query ) ; case FIND : return getMethodWithClassArgRequestName ( javaMethod , args ) ; case MERGE : case PERSIST : case REFRESH : case REMOVE : case DETACH : case LOCK : return getMethodWithEntityArgRequestName ( javaMethod , args ) ; case FLUSH : return getNoArgsRequestName ( javaMethod ) ; case OTHER : default : return getOtherRequestName ( javaMethod , args ) ; }
public class PayCloseRequest { /** * 扩展信息 */ @ Override public void checkVaild ( ) { } }
super . checkVaild ( ) ; if ( this . paymoney < 1 ) throw new RuntimeException ( "paymoney is illegal" ) ; if ( this . thirdpayno == null || this . thirdpayno . isEmpty ( ) ) throw new RuntimeException ( "thirdpayno is illegal" ) ;
public class RDBMServices { /** * Gets a named DataSource from JNDI , with special handling for the PORTAL _ DB datasource . * Successful lookups are cached and not done again . Lookup failure is remembered and blocks * retry for a number of milliseconds specified by JNDI _ RETRY _ TIME to reduce JNDI overhead and * log spam . * < p > There are two ways in which we handle the core uPortal DataSource specially . * < p > We determine and remember metadata in an DbMetaData object for the core uPortal * DataSource . We do not compute this DbMetaData for any other DataSource . * < p > We fall back on using rdbm . properties to construct our core uPortal DataSource in the case * where we cannot find it from JNDI . If the portal property * org . apereo . portal . jdbc . RDBMServices . getDatasourceFromJNDI is true , we first first try to get * the connection by looking in the JNDI context for the name defined by the portal property * org . apereo . portal . jdbc . RDBMServices . PortalDatasourceJndiName . * < p > If we were not configured to check JNDI or we didn ' t find it in JNDI having checked , we * then fall back on rdbm . properties . * @ param name The name of the DataSource to get . * @ return A named DataSource or < code > null < / code > if one cannot be found . * @ deprecated Where possible code should be injected with a { @ link DataSource } object via the * Spring application context */ @ Deprecated public static DataSource getDataSource ( String name ) { } }
if ( PORTAL_DB . equals ( name ) ) { return PortalDbLocator . getPortalDb ( ) ; } final ApplicationContext applicationContext = PortalApplicationContextLocator . getApplicationContext ( ) ; final DataSource dataSource = ( DataSource ) applicationContext . getBean ( name , DataSource . class ) ; return dataSource ;
public class LineInput { public synchronized int read ( byte b [ ] , int off , int len ) throws IOException { } }
int avail = _avail - _pos ; if ( avail <= 0 ) { fill ( ) ; avail = _avail - _pos ; } if ( avail <= 0 ) len = - 1 ; else { len = ( avail < len ) ? avail : len ; System . arraycopy ( _buf , _pos , b , off , len ) ; _pos += len ; } return len ;
public class WebUtil { /** * find cookie from request * @ param request current request * @ param name cookie name * @ return cookie value or null */ public static Cookie findCookie ( HttpServletRequest request , String name ) { } }
if ( request != null ) { Cookie [ ] cookies = request . getCookies ( ) ; if ( cookies != null && cookies . length > 0 ) { for ( Cookie cookie : cookies ) { if ( cookie . getName ( ) . equals ( name ) ) { return cookie ; } } } } return null ;
public class TimeZoneNamesImpl { /** * Initialize the transient fields , called from the constructor and * readObject . * @ param locale The locale */ private void initialize ( ULocale locale ) { } }
ICUResourceBundle bundle = ( ICUResourceBundle ) ICUResourceBundle . getBundleInstance ( ICUData . ICU_ZONE_BASE_NAME , locale ) ; _zoneStrings = ( ICUResourceBundle ) bundle . get ( ZONE_STRINGS_BUNDLE ) ; // TODO : Access is synchronized , can we use a non - concurrent map ? _tzNamesMap = new ConcurrentHashMap < String , ZNames > ( ) ; _mzNamesMap = new ConcurrentHashMap < String , ZNames > ( ) ; _namesFullyLoaded = false ; _namesTrie = new TextTrieMap < NameInfo > ( true ) ; _namesTrieFullyLoaded = false ; // Preload zone strings for the default time zone TimeZone tz = TimeZone . getDefault ( ) ; String tzCanonicalID = ZoneMeta . getCanonicalCLDRID ( tz ) ; if ( tzCanonicalID != null ) { loadStrings ( tzCanonicalID ) ; }
public class RankMap { /** * This method gets all elements with a { @ link # getRank ( Object ) rank } greater or equal to the given { @ code threshold } . * @ param threshold is the minimum accepted { @ link # getRank ( Object ) rank } . * @ return the list with all elements better or equal to the given { @ code threshold } . */ public List < E > getBetterOrEqual ( int threshold ) { } }
List < E > bests = new ArrayList < > ( ) ; for ( E element : this . map . keySet ( ) ) { Ranking ranking = this . map . get ( element ) ; if ( ( ranking != null ) && ( ranking . rank >= threshold ) ) { bests . add ( element ) ; } } return bests ;
public class GeoLocationUtil { /** * Replies if the specified distances are approximatively equal , * less or greater than . * @ param distance1 the first distance . * @ param distance2 the second distance . * @ return a negative value if the parameter < var > distance1 < / var > is * lower than < var > distance2 < / var > , a positive if < var > distance1 < / var > * is greater than < var > distance2 < / var > , zero if the two parameters * are approximatively equal . */ @ Pure public static int epsilonCompareToDistance ( double distance1 , double distance2 ) { } }
final double min = distance2 - distancePrecision ; final double max = distance2 + distancePrecision ; if ( distance1 >= min && distance1 <= max ) { return 0 ; } if ( distance1 < min ) { return - 1 ; } return 1 ;
public class FileUtils { /** * Verify that the file is within the base directory . { @ link org . mapfish . print . IllegalFileAccessException } * will be thrown if the assertion does not hold . * @ param descriptorOfBase a simple description of the base file , for example : configuration * @ param child the file to test that is is a child of base . * @ param baseFiles the directories that can legally contain the child . */ public static boolean assertIsSubDirectory ( final String descriptorOfBase , final File child , final File ... baseFiles ) { } }
File canonicalChild ; try { canonicalChild = child . getCanonicalFile ( ) ; } catch ( IOException e ) { throw new Error ( "Unable to get the canonical file of '" + child + "'. Therefore it is not possible to verify if it is a " + "child of '" + Arrays . toString ( baseFiles ) + "'." ) ; } for ( File base : baseFiles ) { File canonicalBase ; try { canonicalBase = base . getCanonicalFile ( ) ; } catch ( IOException e ) { throw new Error ( "Unable to get the canonical file of '" + base + "'. Therefore it is not possible to verify if '" + child + "' is a child of it." ) ; } File parentFile = canonicalChild ; while ( parentFile != null ) { if ( canonicalBase . equals ( parentFile ) ) { return true ; } parentFile = parentFile . getParentFile ( ) ; } } LOGGER . warn ( "A user attempted to access a file not within the '{}' directories ({}). Attempted access " + "to: {}" , descriptorOfBase , Arrays . toString ( baseFiles ) , canonicalChild ) ; throw new IllegalFileAccessException ( "'" + canonicalChild + "' identifies a file that is not within the '" + descriptorOfBase + "' directories: " + Arrays . toString ( baseFiles ) ) ;
public class NeighborhoodQuery { /** * Executes the query . * @ return Neighborhood */ public Set < GraphObject > run ( ) { } }
// result set of neighborhood query Set < GraphObject > queryResult = new HashSet < GraphObject > ( ) ; // if upstream is selected if ( direction == Direction . UPSTREAM || direction == Direction . BOTHSTREAM ) { // run BFS in upstream direction BFS bfsBackward = new BFS ( sourceNodes , null , Direction . UPSTREAM , this . limit ) ; /* Maps to hold forward and backward BFS results */ Map < GraphObject , Integer > mapBackward = bfsBackward . run ( ) ; // add result of BFS to result Set queryResult . addAll ( mapBackward . keySet ( ) ) ; } // if downstream is selected if ( direction == Direction . DOWNSTREAM || direction == Direction . BOTHSTREAM ) { // run BFS in downstream direction BFS bfsForward = new BFS ( sourceNodes , null , Direction . DOWNSTREAM , this . limit ) ; Map < GraphObject , Integer > mapForward = bfsForward . run ( ) ; // add result of BFS to result Set queryResult . addAll ( mapForward . keySet ( ) ) ; } // return the result of query return queryResult ;
public class Jenkins { /** * Accepts submission from the configuration page . */ @ RequirePOST public synchronized void doConfigSubmit ( StaplerRequest req , StaplerResponse rsp ) throws IOException , ServletException , FormException { } }
BulkChange bc = new BulkChange ( this ) ; try { checkPermission ( ADMINISTER ) ; JSONObject json = req . getSubmittedForm ( ) ; systemMessage = Util . nullify ( req . getParameter ( "system_message" ) ) ; boolean result = true ; for ( Descriptor < ? > d : Functions . getSortedDescriptorsForGlobalConfigUnclassified ( ) ) result &= configureDescriptor ( req , json , d ) ; save ( ) ; updateComputerList ( ) ; if ( result ) FormApply . success ( req . getContextPath ( ) + '/' ) . generateResponse ( req , rsp , null ) ; else FormApply . success ( "configure" ) . generateResponse ( req , rsp , null ) ; // back to config } finally { bc . commit ( ) ; }
public class AsyncLibrary { /** * @ see com . ibm . io . async . IAsyncProvider # getNewCompletionPort ( ) */ @ Override public synchronized long getNewCompletionPort ( ) throws AsyncException { } }
long port = aio_newCompletionPort ( ) ; completionPorts . add ( Long . valueOf ( port ) ) ; return port ;
public class TableWriterServiceImpl { /** * Writes a page to the current segment * @ param page * @ param sOut * @ param oldSequence * @ param saveLength * @ param saveTail * @ param sequenceWrite page sequence to correlate write requests with * flush completions */ @ InService ( TableWriterService . class ) public void writePage ( Page page , SegmentStream sOut , long oldSequence , int saveLength , int saveTail , int sequenceWrite , Result < Integer > result ) { } }
try { sOut . writePage ( this , page , saveLength , saveTail , sequenceWrite , result ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class References { /** * Gets a required component reference that matches specified locator . * @ param locator the locator to find a reference by . * @ return a matching component reference . * @ throws ReferenceException when no references found . */ public Object getOneRequired ( Object locator ) throws ReferenceException { } }
List < Object > components = find ( Object . class , locator , true ) ; return components . size ( ) > 0 ? components . get ( 0 ) : null ;
public class EvaluationContext { /** * Tries to resolve variables . * @ throws ConfigEvaluatorException */ public String resolveString ( String value , boolean ignoreWarnings ) throws ConfigEvaluatorException { } }
value = variableEvaluator . resolveVariables ( value , this , ignoreWarnings ) ; return value ;
public class Digester { /** * 初始化 * @ param algorithm 算法 * @ param provider 算法提供者 , null表示JDK默认 , 可以引入Bouncy Castle等来提供更多算法支持 * @ return { @ link Digester } * @ throws CryptoException Cause by IOException */ public Digester init ( String algorithm , Provider provider ) { } }
if ( null == provider ) { this . digest = SecureUtil . createMessageDigest ( algorithm ) ; } else { try { this . digest = MessageDigest . getInstance ( algorithm , provider ) ; } catch ( NoSuchAlgorithmException e ) { throw new CryptoException ( e ) ; } } return this ;
public class ComputeFunction { /** * Sends the given message to the vertex identified by the given key . If the target vertex does not exist , * the next superstep will cause an exception due to a non - deliverable message . * @ param target The key ( id ) of the target vertex to message . * @ param m The message . */ public final void sendMessageTo ( K target , Message m ) { } }
outMsg . f0 = target ; outMsg . f1 = m ; out . collect ( Either . Right ( outMsg ) ) ;
public class TempByteHolder { /** * Override directory to create temporary file in . * Does not affect already open temp file . * @ param dir File object representing temporary directory . * May be null which means that system default * ( java . io . tmpdir system property ) should be used . * @ throws IOException */ public void setTempDirectory ( File dir ) throws IOException { } }
File td = dir . getCanonicalFile ( ) ; if ( td . isDirectory ( ) ) { _temp_directory = td ; }
public class Vertex { /** * 创建一个音译人名实例 * @ param realWord * @ return */ public static Vertex newTranslatedPersonInstance ( String realWord , int frequency ) { } }
return new Vertex ( Predefine . TAG_PEOPLE , realWord , new CoreDictionary . Attribute ( Nature . nrf , frequency ) ) ;
public class WorkerPool { /** * { @ inheritDoc } */ @ Override public void setExceptionHandler ( final ExceptionHandler exceptionHandler ) { } }
for ( final Worker worker : this . workers ) { worker . setExceptionHandler ( exceptionHandler ) ; }
public class ThriftConnectionHandle { /** * 获取多服务模式下所有thrift客户端的方法 * @ return 多服务下所有thrift客户端集合 */ public Map < String , T > getMuiltServiceClients ( ) { } }
if ( this . thriftConnection instanceof MulitServiceThriftConnecion ) { MulitServiceThriftConnecion < T > connection = ( MulitServiceThriftConnecion < T > ) thriftConnection ; return connection . getMuiltServiceClients ( ) ; } throw new IllegalStateException ( "单服务运行模式下不允许调用该方法" ) ;
public class CommerceOrderNotePersistenceImpl { /** * Returns the last commerce order note in the ordered set where commerceOrderId = & # 63 ; and restricted = & # 63 ; . * @ param commerceOrderId the commerce order ID * @ param restricted the restricted * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce order note * @ throws NoSuchOrderNoteException if a matching commerce order note could not be found */ @ Override public CommerceOrderNote findByC_R_Last ( long commerceOrderId , boolean restricted , OrderByComparator < CommerceOrderNote > orderByComparator ) throws NoSuchOrderNoteException { } }
CommerceOrderNote commerceOrderNote = fetchByC_R_Last ( commerceOrderId , restricted , orderByComparator ) ; if ( commerceOrderNote != null ) { return commerceOrderNote ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceOrderId=" ) ; msg . append ( commerceOrderId ) ; msg . append ( ", restricted=" ) ; msg . append ( restricted ) ; msg . append ( "}" ) ; throw new NoSuchOrderNoteException ( msg . toString ( ) ) ;
public class HelloCLR { /** * Start Hello REEF job . Runs method runHelloCLR ( ) . * @ param args command line parameters . * @ throws org . apache . reef . tang . exceptions . BindException configuration error . * @ throws org . apache . reef . tang . exceptions . InjectionException configuration error . */ public static void main ( final String [ ] args ) throws BindException , InjectionException { } }
final Configuration runtimeConfiguration = LocalRuntimeConfiguration . CONF . set ( LocalRuntimeConfiguration . MAX_NUMBER_OF_EVALUATORS , 2 ) . build ( ) ; final File dotNetFolder = new File ( args [ 0 ] ) . getAbsoluteFile ( ) ; final LauncherStatus status = runHelloCLR ( runtimeConfiguration , JOB_TIMEOUT , dotNetFolder ) ; LOG . log ( Level . INFO , "REEF job completed: {0}" , status ) ;
public class FilePathUtil { /** * 获得上层目录的路径 */ public static String getParentPath ( String path ) { } }
String parentPath = path ; if ( Platforms . FILE_PATH_SEPARATOR . equals ( parentPath ) ) { return parentPath ; } parentPath = MoreStringUtil . removeEnd ( parentPath , Platforms . FILE_PATH_SEPARATOR_CHAR ) ; int idx = parentPath . lastIndexOf ( Platforms . FILE_PATH_SEPARATOR_CHAR ) ; if ( idx >= 0 ) { parentPath = parentPath . substring ( 0 , idx + 1 ) ; } else { parentPath = Platforms . FILE_PATH_SEPARATOR ; } return parentPath ;
public class Settings { /** * Value is split by comma and trimmed . Never returns null . * < br > * Examples : * < ul > * < li > " one , two , three " - & gt ; [ " one " , " two " , " three " ] < / li > * < li > " one , two , three " - & gt ; [ " one " , " two " , " three " ] < / li > * < li > " one , , three " - & gt ; [ " one " , " " , " three " ] < / li > * < / ul > */ public String [ ] getStringArray ( String key ) { } }
String effectiveKey = definitions . validKey ( key ) ; Optional < PropertyDefinition > def = getDefinition ( effectiveKey ) ; if ( ( def . isPresent ( ) ) && ( def . get ( ) . multiValues ( ) ) ) { String value = getString ( key ) ; if ( value == null ) { return ArrayUtils . EMPTY_STRING_ARRAY ; } List < String > values = new ArrayList < > ( ) ; for ( String v : Splitter . on ( "," ) . trimResults ( ) . split ( value ) ) { values . add ( v . replace ( "%2C" , "," ) ) ; } return values . toArray ( new String [ values . size ( ) ] ) ; } return getStringArrayBySeparator ( key , "," ) ;
public class Strings { /** * Returns the joined { @ code properties } with a < code > . < / code > delimiter , including a trailing * delimiter . */ public static String join ( List < ? extends PropertyInfo > properties ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( PropertyInfo info : properties ) sb . append ( info . getName ( ) ) . append ( '.' ) ; return sb . toString ( ) ;
public class LocationFormatter { /** * Formats the given location elements using { @ code this } formatter . * @ see # format ( Location ) * @ param lat the latitude part of the location * @ param lon the longitude part of the location * @ param ele the elevation part of the location * @ return the format string * @ throws FormatterException if the formatter tries to format a non - existing , * non - optional location fields . */ public String format ( final Latitude lat , final Longitude lon , final Length ele ) { } }
return format ( Location . of ( lat , lon , ele ) ) ;
public class Equation { /** * Removes all commas from the token list */ private void stripCommas ( TokenList tokens ) { } }
TokenList . Token t = tokens . getFirst ( ) ; while ( t != null ) { TokenList . Token next = t . next ; if ( t . getSymbol ( ) == Symbol . COMMA ) { tokens . remove ( t ) ; } t = next ; }
public class Consumers { /** * Yields the only element . * @ param < E > the element type parameter * @ param iterable the iterable to be searched * @ throws IllegalStateException if more than one element is found * @ throws IllegalArgumentException if no element matches * @ return the found element */ public static < E > E one ( Iterable < E > iterable ) { } }
dbc . precondition ( iterable != null , "cannot call one with a null iterable" ) ; return new OneElement < E > ( ) . apply ( iterable . iterator ( ) ) ;
public class CommerceAccountOrganizationRelPersistenceImpl { /** * Clears the cache for all commerce account organization rels . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CommerceAccountOrganizationRelImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class Header { /** * setter for citationStatus - sets Indicates the status of citation of a PubMed document , O * @ generated * @ param v value to set into the feature */ public void setCitationStatus ( String v ) { } }
if ( Header_Type . featOkTst && ( ( Header_Type ) jcasType ) . casFeat_citationStatus == null ) jcasType . jcas . throwFeatMissing ( "citationStatus" , "de.julielab.jules.types.pubmed.Header" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( Header_Type ) jcasType ) . casFeatCode_citationStatus , v ) ;
public class NFAppenderAttachableImpl { /** * ( non - Javadoc ) * @ see * org . apache . log4j . helpers . AppenderAttachableImpl # appendLoopOnAppenders * ( org . apache . log4j . spi . LoggingEvent ) */ @ Override public int appendLoopOnAppenders ( LoggingEvent event ) { } }
int size = 0 ; Appender appender ; if ( appenderList != null ) { size = appenderList . size ( ) ; Iterator < Appender > it = appenderList . iterator ( ) ; while ( it . hasNext ( ) ) { appender = ( Appender ) it . next ( ) ; appender . doAppend ( event ) ; } } return size ;
public class ContactValidator { /** * { @ inheritDoc } */ @ Override public void validate ( ValidationHelper helper , Context context , String key , Contact t ) { } }
if ( t != null ) { String url = t . getUrl ( ) ; if ( url != null ) { if ( ! ValidatorUtils . isValidURI ( url ) ) { final String message = Tr . formatMessage ( tc , "contactInvalidURL" , url ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( "url" ) , message ) ) ; } } String email = t . getEmail ( ) ; if ( email != null ) { if ( ! ValidatorUtils . isValidEmailAddress ( email ) ) { final String message = Tr . formatMessage ( tc , "contactInvalidEmail" , email ) ; helper . addValidationEvent ( new ValidationEvent ( ValidationEvent . Severity . ERROR , context . getLocation ( "email" ) , message ) ) ; } } }
public class DatabaseMetaData { /** * { @ inheritDoc } */ public ResultSet getSuperTables ( final String catalog , final String schemaPattern , final String tableNamePattern ) throws SQLException { } }
return RowLists . rowList4 ( String . class , String . class , String . class , String . class ) . withLabel ( 1 , "TABLE_CAT" ) . withLabel ( 2 , "TABLE_SCHEM" ) . withLabel ( 3 , "TABLE_NAME" ) . withLabel ( 4 , "SUPERTABLE_NAME" ) . resultSet ( ) ;
public class Channel { /** * Register a block listener . * @ param listener function with single argument with type { @ link BlockEvent } * @ return The handle of the registered block listener . * @ throws InvalidArgumentException if the channel is shutdown . */ public String registerBlockListener ( BlockListener listener ) throws InvalidArgumentException { } }
if ( shutdown ) { throw new InvalidArgumentException ( format ( "Channel %s has been shutdown." , name ) ) ; } if ( null == listener ) { throw new InvalidArgumentException ( "Listener parameter is null." ) ; } String handle = new BL ( listener ) . getHandle ( ) ; logger . trace ( format ( "Register event BlockEvent listener %s" , handle ) ) ; return handle ;
public class CoreUtils { /** * Spin on a ReentrantLock before blocking . Default behavior is not to spin . */ public static void spinLock ( ReentrantLock lock ) { } }
if ( LOCK_SPIN_MICROSECONDS > 0 ) { long nanos = - 1 ; for ( ; ; ) { if ( lock . tryLock ( ) ) return ; if ( nanos == - 1 ) { nanos = System . nanoTime ( ) ; } else if ( System . nanoTime ( ) - nanos > LOCK_SPIN_MICROSECONDS ) { lock . lock ( ) ; return ; } } } else { lock . lock ( ) ; }
public class TrajectoryEnvelopeSolver { /** * Get all { @ link TrajectoryEnvelope } s that have no super - envelopes . * @ return All { @ link TrajectoryEnvelope } s that have no super - envelopes . */ public TrajectoryEnvelope [ ] getRootTrajectoryEnvelopes ( ) { } }
ArrayList < TrajectoryEnvelope > ret = new ArrayList < TrajectoryEnvelope > ( ) ; for ( Variable var : this . getVariables ( ) ) { TrajectoryEnvelope te = ( TrajectoryEnvelope ) var ; if ( ! te . hasSuperEnvelope ( ) ) ret . add ( te ) ; } return ret . toArray ( new TrajectoryEnvelope [ ret . size ( ) ] ) ;
public class PdfLine { /** * Gets the biggest descender for all the fonts used * in this line . Note that this is a negative number . * @ return maximum size of all the ascenders used in this line */ public float getDescender ( ) { } }
float descender = 0 ; for ( int k = 0 ; k < line . size ( ) ; ++ k ) { PdfChunk ck = ( PdfChunk ) line . get ( k ) ; if ( ck . isImage ( ) ) descender = Math . min ( descender , ck . getImageOffsetY ( ) ) ; else { PdfFont font = ck . font ( ) ; descender = Math . min ( descender , font . getFont ( ) . getFontDescriptor ( BaseFont . DESCENT , font . size ( ) ) ) ; } } return descender ;
public class ReflectiveInterceptor { /** * Performs all necessary checks that need to be done before a field set should be allowed . * @ throws IllegalAccessException */ private static Field asSetableField ( Field field , Object target , Class < ? > valueType , Object value , boolean makeAccessibleCopy ) throws IllegalAccessException { } }
// Must do the checks exactly in the same order as JVM if we want identical error messages . // JVM doesn ' t do this , since it cannot happen without reloading , we do it first of all . if ( isDeleted ( field ) ) { throw Exceptions . noSuchFieldError ( field ) ; } Class < ? > clazz = field . getDeclaringClass ( ) ; int mods = field . getModifiers ( ) ; if ( field . isAccessible ( ) || Modifier . isPublic ( mods & jlClassGetModifiers ( clazz ) ) ) { // More expensive check not required / copy not required } else { // More expensive check required Class < ? > callerClass = getCallerClass ( ) ; JVM . ensureMemberAccess ( callerClass , clazz , target , mods ) ; if ( makeAccessibleCopy ) { // TODO : This code is not covered by a test . It needs a non - reloadable type with non - public // field , being accessed reflectively from a context that is " priviliged " to access it without setting the access flag . field = JVM . copyField ( field ) ; // copy : we must not change accessible flag on original field ! field . setAccessible ( true ) ; } } if ( isPrimitive ( valueType ) ) { // It seems for primitive types , the order of the checks ( in Sun JVM ) is different ! typeCheckFieldSet ( field , valueType , value ) ; if ( ! field . isAccessible ( ) && Modifier . isFinal ( mods ) ) { throw Exceptions . illegalSetFinalFieldException ( field , field . getType ( ) , coerce ( value , field . getType ( ) ) ) ; } } else { if ( ! field . isAccessible ( ) && Modifier . isFinal ( mods ) ) { throw Exceptions . illegalSetFinalFieldException ( field , valueType , value ) ; } typeCheckFieldSet ( field , valueType , value ) ; } return makeAccessibleCopy ? field : null ;
public class JinxUtils { /** * Convert a Flickr geo context id to the corresponding { @ link net . jeremybrooks . jinx . JinxConstants . GeoContext } enum . * @ param contextId Flickr geo context id to convert . * @ return corresponding GeoContext enum , or null if the parameter is not a valid Flickr geo context id . */ public static JinxConstants . GeoContext flickrContextIdToGeoContext ( int contextId ) { } }
JinxConstants . GeoContext ret ; switch ( contextId ) { case 0 : ret = JinxConstants . GeoContext . not_defined ; break ; case 1 : ret = JinxConstants . GeoContext . indoors ; break ; case 2 : ret = JinxConstants . GeoContext . outdoors ; break ; default : ret = null ; break ; } return ret ;
public class SMPPOutboundServerSession { /** * Wait for outbind request . * @ param timeout is the timeout . * @ return the { @ link OutbindRequest } . * @ throws IllegalStateException if this invocation of this method has been made or invoke when state is not OPEN . * @ throws TimeoutException if the timeout has been reach and { @ link SMPPOutboundServerSession } are no more valid because * the connection will be close automatically . */ public OutbindRequest waitForOutbind ( long timeout ) throws IllegalStateException , TimeoutException { } }
SessionState currentSessionState = getSessionState ( ) ; if ( currentSessionState . equals ( SessionState . OPEN ) ) { new SMPPOutboundServerSession . PDUReaderWorker ( ) . start ( ) ; try { return outbindRequestReceiver . waitForRequest ( timeout ) ; } catch ( IllegalStateException e ) { throw new IllegalStateException ( "Invocation of waitForOutbind() has been made" , e ) ; } catch ( TimeoutException e ) { close ( ) ; throw e ; } } else { throw new IllegalStateException ( "waitForOutbind() should be invoked on OPEN state, actual state is " + currentSessionState ) ; }
public class CmsSearchWorkplaceBean { /** * Sets the fields parameter value . < p > * @ param fields the fields parameter value to set */ public void setFields ( String fields ) { } }
if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( fields ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_VALIDATE_SEARCH_PARAMS_0 ) ) ; } m_fields = fields ;
public class CheckAccessControls { /** * Determines whether the given OBJECTLIT property visibility violates the coding convention . * @ param key The objectlit key node ( STRING _ KEY , GETTER _ DEF , SETTER _ DEF , MEMBER _ FUNCTION _ DEF ) . */ private void checkKeyVisibilityConvention ( Node key , Node parent ) { } }
JSDocInfo info = key . getJSDocInfo ( ) ; if ( info == null ) { return ; } if ( ! isPrivateByConvention ( key . getString ( ) ) ) { return ; } Node assign = parent . getParent ( ) ; if ( assign == null || ! assign . isAssign ( ) ) { return ; } Node left = assign . getFirstChild ( ) ; if ( ! left . isGetProp ( ) || ! left . getLastChild ( ) . getString ( ) . equals ( "prototype" ) ) { return ; } Visibility declaredVisibility = info . getVisibility ( ) ; // Visibility is declared to be something other than private . if ( declaredVisibility != Visibility . INHERITED && declaredVisibility != Visibility . PRIVATE ) { compiler . report ( JSError . make ( key , CONVENTION_MISMATCH ) ) ; }
public class RedisClient { /** * Initialize indexer . */ private void initializeIndexer ( ) { } }
if ( this . indexManager . getIndexer ( ) != null && this . indexManager . getIndexer ( ) . getClass ( ) . getSimpleName ( ) . equals ( "RedisIndexer" ) ) { ( ( RedisIndexer ) this . indexManager . getIndexer ( ) ) . assignConnection ( getConnection ( ) ) ; }
public class AssetRenditionContentDispositionFilter { /** * This filter only processes GET requests that targets a nt : file resource . * @ param request * @ return true if the filter accepts the given request */ @ SuppressWarnings ( "null" ) private boolean accepts ( SlingHttpServletRequest request ) { } }
return request . getMethod ( ) . equalsIgnoreCase ( METHOD_GET ) && request . getResource ( ) != null && StringUtils . equals ( request . getResource ( ) . getValueMap ( ) . get ( JCR_PRIMARYTYPE , String . class ) , NT_FILE ) ;
public class GedLinkDocumentMongoToGedObjectConverterVisitor { /** * { @ inheritDoc } */ @ Override public final void visit ( final SubmissionLinkDocumentMongo document ) { } }
setGedObject ( new SubmissionLink ( getParent ( ) , "Submission" , new ObjectId ( document . getString ( ) ) ) ) ;
public class DigestHelper { /** * @ param key * @ param text * @ return * @ throws IllegalArgumentException */ public static byte [ ] sha1hmac ( byte [ ] key , byte [ ] text ) throws IllegalArgumentException { } }
try { SecretKey sk = new SecretKeySpec ( key , "HMACSHA1" ) ; Mac m = Mac . getInstance ( sk . getAlgorithm ( ) ) ; m . init ( sk ) ; return m . doFinal ( text ) ; } catch ( InvalidKeyException e ) { throw new IllegalArgumentException ( e ) ; } catch ( NoSuchAlgorithmException e ) { throw new IllegalArgumentException ( e ) ; }
public class NovaException { /** * / / { " badRequest " : { " message " : " AddressLimitExceeded : Address quota exceeded . You cannot allocate any more addresses " , " code " : 400 } } */ static public ExceptionItems parseException ( int code , String json ) { } }
ExceptionItems items = new ExceptionItems ( ) ; items . code = code ; items . type = CloudErrorType . GENERAL ; items . message = "unknown" ; items . details = "The cloud returned an error code with explanation: " ; if ( items . code == HttpStatus . SC_UNAUTHORIZED ) { items . type = CloudErrorType . AUTHENTICATION ; } if ( json != null ) { try { JSONObject ob = new JSONObject ( json ) ; if ( code == 400 && ob . has ( "badRequest" ) ) { ob = ob . getJSONObject ( "badRequest" ) ; } if ( code == 413 && ob . has ( "overLimit" ) ) { ob = ob . getJSONObject ( "overLimit" ) ; } if ( ob . has ( "message" ) ) { items . message = ob . getString ( "message" ) ; if ( items . message == null ) { items . message = "unknown" ; } else { items . message = items . message . trim ( ) ; } } if ( items . message . equals ( "unknown" ) ) { String [ ] names = JSONObject . getNames ( ob ) ; for ( String key : names ) { try { JSONObject msg = ob . getJSONObject ( key ) ; if ( msg . has ( "message" ) && ! msg . isNull ( "message" ) ) { items . message = msg . getString ( "message" ) ; } } catch ( JSONException e ) { items . message = ob . getString ( key ) ; } } } if ( ob . has ( "details" ) ) { items . details = items . details + ob . getString ( "details" ) ; } else { items . details = items . details + "[" + code + "] " + items . message ; } String t = items . message . toLowerCase ( ) . trim ( ) ; if ( code == 413 ) { items . type = CloudErrorType . THROTTLING ; } else if ( t . startsWith ( "addresslimitexceeded" ) || t . startsWith ( "ramlimitexceeded" ) ) { items . type = CloudErrorType . QUOTA ; } else if ( t . equals ( "unauthorized" ) ) { items . type = CloudErrorType . AUTHENTICATION ; } else if ( t . equals ( "serviceunavailable" ) ) { items . type = CloudErrorType . CAPACITY ; } else if ( t . equals ( "badrequest" ) || t . equals ( "badmediatype" ) || t . equals ( "badmethod" ) || t . equals ( "notimplemented" ) ) { items . type = CloudErrorType . COMMUNICATION ; } else if ( t . equals ( "overlimit" ) ) { items . type = CloudErrorType . QUOTA ; } else if ( t . equals ( "servercapacityunavailable" ) ) { items . type = CloudErrorType . CAPACITY ; } else if ( t . equals ( "itemnotfound" ) ) { return null ; } } catch ( JSONException e ) { NovaOpenStack . getLogger ( NovaException . class , "std" ) . warn ( "parseException(): Invalid JSON in cloud response: " + json ) ; items . details = items . details + " " + json ; } } return items ;
public class CmsModule { /** * Adjusts the site root and returns a cloned CmsObject , iff the module has set an import site that differs * from the site root of the CmsObject provided as argument . Otherwise returns the provided CmsObject unchanged . * @ param cms The original CmsObject . * @ param module The module where the import site is read from . * @ return The original CmsObject , or , if necessary , a clone with adjusted site root * @ throws CmsException see { @ link OpenCms # initCmsObject ( CmsObject ) } */ private static CmsObject adjustSiteRootIfNecessary ( final CmsObject cms , final CmsModule module ) throws CmsException { } }
CmsObject cmsClone ; if ( ( null == module . getSite ( ) ) || cms . getRequestContext ( ) . getSiteRoot ( ) . equals ( module . getSite ( ) ) ) { cmsClone = cms ; } else { cmsClone = OpenCms . initCmsObject ( cms ) ; cmsClone . getRequestContext ( ) . setSiteRoot ( module . getSite ( ) ) ; } return cmsClone ;
public class JettyBootstrap { /** * Blocks the calling thread until the server stops . * @ return this instance * @ throws JettyBootstrapException * if an exception occurs while blocking the thread */ public JettyBootstrap joinServer ( ) throws JettyBootstrapException { } }
try { if ( isServerStarted ( ) ) { LOG . debug ( "Joining Server..." ) ; server . join ( ) ; } else { LOG . warn ( "Can't join Server. Not started" ) ; } } catch ( InterruptedException e ) { throw new JettyBootstrapException ( e ) ; } return this ;
public class StringUtils { /** * Converts an object into a tab delimited string with given fields * Requires the object has public access for the specified fields * @ param object Object to convert * @ param delimiter delimiter * @ param fieldNames fieldnames * @ return String representing object */ public static String objectToColumnString ( Object object , String delimiter , String [ ] fieldNames ) throws IllegalAccessException , NoSuchFieldException , NoSuchMethodException , InvocationTargetException { } }
StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < fieldNames . length ; i ++ ) { if ( sb . length ( ) > 0 ) { sb . append ( delimiter ) ; } try { Field field = object . getClass ( ) . getDeclaredField ( fieldNames [ i ] ) ; sb . append ( field . get ( object ) ) ; } catch ( IllegalAccessException ex ) { Method method = object . getClass ( ) . getDeclaredMethod ( "get" + StringUtils . capitalize ( fieldNames [ i ] ) ) ; sb . append ( method . invoke ( object ) ) ; } } return sb . toString ( ) ;
public class NumericComparator { /** * Converts the lhs and rhs JsonNodes to the numeric values * and delegates to the matches method that operates on the * numeric values alone . * @ param lhs Lhs expression * @ param rhs Rhs expression * @ return Boolean result of the matches method of the * corresponding comparison operator */ @ Override public final boolean matches ( JsonNode lhs , JsonNode rhs ) { } }
return matches ( lhs . decimalValue ( ) , rhs . decimalValue ( ) ) ;
public class TagUtils { /** * String - - > long * @ param value * @ return */ public static long getLong ( Object value ) { } }
if ( value == null ) { return 0 ; } return Long . valueOf ( value . toString ( ) ) . longValue ( ) ;
public class ASTTransformationVisitor { /** * Main loop entry . * First , it delegates to the super visitClass so we can collect the * relevant annotations in an AST tree walk . * Second , it calls the visit method on the transformation for each relevant * annotation found . * @ param classNode the class to visit */ public void visitClass ( ClassNode classNode ) { } }
// only descend if we have annotations to look for Map < Class < ? extends ASTTransformation > , Set < ASTNode > > baseTransforms = classNode . getTransforms ( phase ) ; if ( ! baseTransforms . isEmpty ( ) ) { final Map < Class < ? extends ASTTransformation > , ASTTransformation > transformInstances = new HashMap < Class < ? extends ASTTransformation > , ASTTransformation > ( ) ; for ( Class < ? extends ASTTransformation > transformClass : baseTransforms . keySet ( ) ) { try { transformInstances . put ( transformClass , transformClass . newInstance ( ) ) ; } catch ( InstantiationException e ) { source . getErrorCollector ( ) . addError ( new SimpleMessage ( "Could not instantiate Transformation Processor " + transformClass , // + " declared by " + annotation . getClassNode ( ) . getName ( ) , source ) ) ; } catch ( IllegalAccessException e ) { source . getErrorCollector ( ) . addError ( new SimpleMessage ( "Could not instantiate Transformation Processor " + transformClass , // + " declared by " + annotation . getClassNode ( ) . getName ( ) , source ) ) ; } } // invert the map , is now one to many transforms = new HashMap < ASTNode , List < ASTTransformation > > ( ) ; for ( Map . Entry < Class < ? extends ASTTransformation > , Set < ASTNode > > entry : baseTransforms . entrySet ( ) ) { for ( ASTNode node : entry . getValue ( ) ) { List < ASTTransformation > list = transforms . get ( node ) ; if ( list == null ) { list = new ArrayList < ASTTransformation > ( ) ; transforms . put ( node , list ) ; } list . add ( transformInstances . get ( entry . getKey ( ) ) ) ; } } targetNodes = new LinkedList < ASTNode [ ] > ( ) ; // first pass , collect nodes super . visitClass ( classNode ) ; // second pass , call visit on all of the collected nodes for ( ASTNode [ ] node : targetNodes ) { for ( ASTTransformation snt : transforms . get ( node [ 0 ] ) ) { if ( snt instanceof CompilationUnitAware ) { ( ( CompilationUnitAware ) snt ) . setCompilationUnit ( context . getCompilationUnit ( ) ) ; } snt . visit ( node , source ) ; } } }
public class Resolver { /** * Implementation of { @ link URIResolver # resolve ( String , String ) } . */ public Source resolve ( String pHref , String pBase ) throws TransformerException { } }
URL url = null ; final Source source = resolver . resolve ( pHref , pBase ) ; if ( source != null ) { if ( xincludeAware ) { /* * Avoid risky cast use correct resolved systemid * to configure a xinclude aware source . */ try { url = new URI ( source . getSystemId ( ) ) . toURL ( ) ; } catch ( Exception e ) { throw new TransformerException ( e ) ; } } else { return source ; } } if ( null == url ) { url = resolve ( pHref ) ; // probably should call new method resolve ( String , URI ) but left alone for legacy reasons . } if ( url != null ) { try { return asSaxSource ( asInputSource ( url ) ) ; } catch ( IOException e ) { throw new TransformerException ( e ) ; } catch ( SAXException e ) { throw new TransformerException ( e ) ; } catch ( ParserConfigurationException e ) { throw new TransformerException ( e ) ; } } return null ;
public class LimitHelper { /** * Retrieve the indicated first row for pagination * @ param selection The row selection options * @ return The first row */ public static int getFirstRow ( RowSelection selection ) { } }
return ( selection == null || selection . getFirstRow ( ) == null ) ? 0 : selection . getFirstRow ( ) ;
public class TypicalFaicliApiFailureHook { protected FaicliUnifiedFailureResult createFailureResult ( FaicliUnifiedFailureType failureType , ApiFailureResource resource , RuntimeException cause ) { } }
final Map < String , List < String > > propertyMessageMap = extractPropertyMessageMap ( resource , cause ) ; final List < FaicliFailureErrorPart > errors = toErrors ( resource , propertyMessageMap ) ; return newUnifiedFailureResult ( failureType , errors ) ;
public class FileDownloadNotificationHelper { /** * Show the notification with indeterminate progress . * @ param id The download id . * @ param status { @ link FileDownloadStatus } */ public void showIndeterminate ( final int id , int status ) { } }
final BaseNotificationItem notification = get ( id ) ; if ( notification == null ) { return ; } notification . updateStatus ( status ) ; notification . show ( false ) ;
public class ZoomableDraweeView { /** * Sets a custom zoomable controller , instead of using the default one . */ public void setZoomableController ( ZoomableController zoomableController ) { } }
Preconditions . checkNotNull ( zoomableController ) ; mZoomableController . setListener ( null ) ; mZoomableController = zoomableController ; mZoomableController . setListener ( mZoomableListener ) ;
public class JerseyConnector { /** * This method is used to lazy - initialize the connector on the first request . * The method uses the request object to obtain a reference to the client making the request . The configuration of * the client is then inspected to determine if a proxy is needed and how to handle invalid or expired certificates * when the connection is over HTTPS . The logger that was setup for the client is also used to set up the logging * filter and to log any messages from the connector . Additional configuration settings could be used in the future . * @ throws NoSuchAlgorithmException * @ throws KeyManagementException */ private synchronized < T > void connect ( OpenStackRequest < T > request ) throws KeyManagementException , NoSuchAlgorithmException { } }
/* * If we ' ve already set up the client , return it . Otherwise , set it up and cache the client for subsequent use . */ if ( client != null ) { return ; } OpenStackClient osClient = request . getOpenStackClient ( ) ; Properties properties = osClient . getProperties ( ) ; logger = osClient . getLogger ( ) ; filter = new LoggingFilter ( new PasswordFilter ( logger ) ) ; ClientConfig clientConfig = new DefaultClientConfig ( ) ; /* * Process the trusted hosts list if provided . In this case , we convert each entry in the comma - delimited list * to a regular expression pattern and cache the patterns . We will use the patterns in the host name verifier to * determine if the host is trusted or not . */ String temp = properties . getProperty ( Constants . TRUST_HOST_LIST ) ; if ( temp != null && temp . length ( ) > 0 ) { String [ ] tokens = temp . split ( "," ) ; for ( String token : tokens ) { if ( token != null && token . length ( ) > 0 ) { StringBuffer buffer = new StringBuffer ( token ) ; for ( int index = 0 ; index < buffer . length ( ) ; index ++ ) { switch ( buffer . charAt ( index ) ) { // Convert * to . * case '*' : buffer . insert ( index , "." ) ; index ++ ; break ; // Convert + to . + case '+' : buffer . insert ( index , "." ) ; index ++ ; break ; // Convert . to \ . ( escaped period ) case '.' : buffer . replace ( index , index + 1 , "\\." ) ; index ++ ; break ; } } trustedHostPatterns . add ( Pattern . compile ( buffer . toString ( ) ) ) ; } } } /* * Process the proxy if defined . */ proxyHost = properties . getProperty ( Constants . PROXY_HOST ) ; if ( proxyHost != null && proxyHost . length ( ) > 0 ) { temp = properties . getProperty ( Constants . PROXY_PORT ) ; if ( temp != null ) { temp = temp . trim ( ) ; } if ( temp == null || temp . length ( ) == 0 ) { proxyPort = DEFAULT_PROXY_PORT ; proxyConnection = true ; } else { try { proxyPort = Integer . valueOf ( temp ) ; } catch ( NumberFormatException e ) { logger . error ( String . format ( "Invalid port \"%s\" specified for proxy host \"%s\", a direct " + "connection will be used" , temp , proxyHost ) ) ; } if ( proxyPort > 0 && proxyPort < 65536 ) { proxyConnection = true ; } else { logger . error ( String . format ( "Invalid port \"%d\" specified for proxy host \"%s\", a direct " + "connection will be used" , proxyPort , proxyHost ) ) ; } } } /* * If this is an HTTPS connection , and a trusted hosts list has been provided , then we also need to set up the * host name verifier and allow all certificates and just allow or disallow based on trusted hosts */ if ( request . endpoint ( ) . startsWith ( "https" ) && ! trustedHostPatterns . isEmpty ( ) ) { clientConfig . getProperties ( ) . put ( HTTPSProperties . PROPERTY_HTTPS_PROPERTIES , new HTTPSProperties ( getHostnameVerifier ( ) , getSSLContext ( ) ) ) ; } /* * Finally , add the object mapper classes */ clientConfig . getClasses ( ) . add ( JacksonJaxbJsonProvider . class ) ; clientConfig . getClasses ( ) . add ( OpenStackObjectMapper . class ) ; clientConfig . getClasses ( ) . add ( StringProvider . class ) ; /* * And create the client either with or without proxy */ if ( proxyConnection ) { logger . debug ( String . format ( "Connecting using proxy %s:%d" , proxyHost , proxyPort ) ) ; client = new Client ( new URLConnectionClientHandler ( new HttpURLConnectionFactory ( ) { Proxy p = null ; @ Override public HttpURLConnection getHttpURLConnection ( URL url ) throws IOException { if ( p == null ) { if ( proxyHost != null && proxyPort != 0 ) { p = new Proxy ( Proxy . Type . HTTP , new InetSocketAddress ( proxyHost , proxyPort ) ) ; } else { p = Proxy . NO_PROXY ; } } return ( HttpURLConnection ) url . openConnection ( p ) ; } } ) , clientConfig ) ; } else { logger . debug ( String . format ( "Direct connection to %s" , request . endpoint ( ) ) ) ; proxyHost = null ; client = Client . create ( clientConfig ) ; }
public class AnalysisScreen { /** * Get the matching basis field given the summary field . * Override this if you don ' t want the defaults . * @ param fldSummary The summary field to match . * @ param recBasic The basis record . * @ param iSummarySeq The position in the summary record . * @ return The basis field . */ public BaseField getBasisField ( BaseField fldSummary , Record recBasis , int iSummarySeq ) { } }
BaseField fldBasis = null ; String strFieldName = fldSummary . getFieldName ( ) ; if ( ( strFieldName != null ) && ( strFieldName . indexOf ( '.' ) != - 1 ) ) { Record record = this . getRecord ( strFieldName . substring ( 0 , strFieldName . indexOf ( '.' ) ) ) ; if ( ( strFieldName . indexOf ( '.' ) == 0 ) || ( "ScreenRecord" . equalsIgnoreCase ( strFieldName . substring ( 0 , strFieldName . indexOf ( '.' ) ) ) ) ) record = this . getScreenRecord ( ) ; fldBasis = record . getField ( strFieldName . substring ( strFieldName . indexOf ( '.' ) + 1 ) ) ; } else fldBasis = recBasis . getField ( strFieldName ) ; return fldBasis ;
public class PasswordEditText { /** * Returns the helper text , which corresponds to a specific password strength . * @ param score * The password strength as a { @ link Float } value between 0.0 and 1.0 , which represents * the fraction of constraints , which are satisfied * @ return The helper text as an instance of the type { @ link CharSequence } or null , if no helper * text for the given password strength is available */ private CharSequence getHelperText ( final float score ) { } }
if ( ! helperTexts . isEmpty ( ) ) { float interval = 1.0f / helperTexts . size ( ) ; int index = ( int ) Math . floor ( score / interval ) - 1 ; index = Math . max ( index , 0 ) ; index = Math . min ( index , helperTexts . size ( ) - 1 ) ; return helperTexts . get ( index ) ; } return null ;
public class BoxDeveloperEditionAPIConnection { /** * Creates a new Box Developer Edition connection with App User token . * @ param userId the user ID to use for an App User . * @ param clientId the client ID to use when exchanging the JWT assertion for an access token . * @ param clientSecret the client secret to use when exchanging the JWT assertion for an access token . * @ param encryptionPref the encryption preferences for signing the JWT . * @ return a new instance of BoxAPIConnection . * @ deprecated Use the version of this method that accepts an IAccessTokenCache to prevent unneeded * requests to Box for access tokens . */ @ Deprecated public static BoxDeveloperEditionAPIConnection getAppUserConnection ( String userId , String clientId , String clientSecret , JWTEncryptionPreferences encryptionPref ) { } }
BoxDeveloperEditionAPIConnection connection = new BoxDeveloperEditionAPIConnection ( userId , DeveloperEditionEntityType . USER , clientId , clientSecret , encryptionPref ) ; connection . authenticate ( ) ; return connection ;
public class RoundLcdClockSkin { /** * * * * * * Canvas * * * * * */ private void drawForeground ( final ZonedDateTime TIME ) { } }
foregroundCtx . clearRect ( 0 , 0 , size , size ) ; Locale locale = clock . getLocale ( ) ; // draw the time if ( clock . isTextVisible ( ) ) { foregroundCtx . setFill ( textColor ) ; foregroundCtx . setTextBaseline ( VPos . CENTER ) ; foregroundCtx . setTextAlign ( TextAlignment . CENTER ) ; if ( Locale . US == locale ) { foregroundCtx . setFont ( Fonts . digital ( 0.17 * size ) ) ; foregroundCtx . fillText ( AMPM_TIME_FORMATTER . format ( TIME ) , center , center ) ; } else { foregroundCtx . setFont ( Fonts . digital ( 0.2 * size ) ) ; foregroundCtx . fillText ( TIME_FORMATTER . format ( TIME ) , center , center ) ; } } // draw the date if ( clock . isDateVisible ( ) ) { foregroundCtx . setFill ( dateColor ) ; foregroundCtx . setFont ( Fonts . digital ( 0.09 * size ) ) ; foregroundCtx . fillText ( dateFormat . format ( TIME ) , center , size * 0.65 ) ; } // draw the alarmOn icon if ( clock . isAlarmsEnabled ( ) && clock . getAlarms ( ) . size ( ) > 0 ) { foregroundCtx . setFill ( alarmColor ) ; drawAlarmIcon ( foregroundCtx , foregroundCtx . getFill ( ) ) ; }
public class ClassInfo { /** * Returns information on all visible fields declared by this class , or by its superclasses . See also : * < ul > * < li > { @ link # getFieldInfo ( String ) } * < li > { @ link # getDeclaredFieldInfo ( String ) } * < li > { @ link # getDeclaredFieldInfo ( ) } * < / ul > * Requires that { @ link ClassGraph # enableFieldInfo ( ) } be called before scanning , otherwise throws * { @ link IllegalArgumentException } . * By default only returns information for public methods , unless { @ link ClassGraph # ignoreFieldVisibility ( ) } was * called before the scan . * @ return the list of FieldInfo objects for visible fields of this class or its superclases , or the empty list * if no fields were found or visible . * @ throws IllegalArgumentException * if { @ link ClassGraph # enableFieldInfo ( ) } was not called prior to initiating the scan . */ public FieldInfoList getFieldInfo ( ) { } }
if ( ! scanResult . scanSpec . enableFieldInfo ) { throw new IllegalArgumentException ( "Please call ClassGraph#enableFieldInfo() before #scan()" ) ; } // Implement field overriding final FieldInfoList fieldInfoList = new FieldInfoList ( ) ; final Set < String > fieldNameSet = new HashSet < > ( ) ; for ( final ClassInfo ci : getOverrideOrder ( ) ) { for ( final FieldInfo fi : ci . getDeclaredFieldInfo ( ) ) { // If field has not been overridden by field of same name if ( fieldNameSet . add ( fi . getName ( ) ) ) { // Add field to output order fieldInfoList . add ( fi ) ; } } } return fieldInfoList ;
public class Apptentive { /** * Add a custom data String to the Device . Custom data will be sent to the server , is displayed * in the Conversation view , and can be used in Interaction targeting . Calls to this method are * idempotent . * @ param key The key to store the data under . * @ param value A String value . */ public static void addCustomDeviceData ( final String key , final String value ) { } }
dispatchConversationTask ( new ConversationDispatchTask ( ) { @ Override protected boolean execute ( Conversation conversation ) { conversation . getDevice ( ) . getCustomData ( ) . put ( key , trim ( value ) ) ; return true ; } } , "add custom device data" ) ;
public class DefaultRdfStream { /** * Create an RdfStream from an existing Model . * @ param node The subject node * @ param model An input Model * @ return a new RdfStream object */ public static RdfStream fromModel ( final Node node , final Model model ) { } }
return new DefaultRdfStream ( node , stream ( spliteratorUnknownSize ( model . listStatements ( ) , IMMUTABLE ) , false ) . map ( Statement :: asTriple ) ) ;
public class AdminListUserAuthEventsResult { /** * The response object . It includes the < code > EventID < / code > , < code > EventType < / code > , < code > CreationDate < / code > , * < code > EventRisk < / code > , and < code > EventResponse < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAuthEvents ( java . util . Collection ) } or { @ link # withAuthEvents ( java . util . Collection ) } if you want to * override the existing values . * @ param authEvents * The response object . It includes the < code > EventID < / code > , < code > EventType < / code > , * < code > CreationDate < / code > , < code > EventRisk < / code > , and < code > EventResponse < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public AdminListUserAuthEventsResult withAuthEvents ( AuthEventType ... authEvents ) { } }
if ( this . authEvents == null ) { setAuthEvents ( new java . util . ArrayList < AuthEventType > ( authEvents . length ) ) ; } for ( AuthEventType ele : authEvents ) { this . authEvents . add ( ele ) ; } return this ;
public class RowIndexSearcher { /** * { @ inheritDoc } */ @ Override public IndexExpression highestSelectivityPredicate ( List < IndexExpression > clause , boolean trace ) { } }
for ( IndexExpression expression : clause ) { ByteBuffer columnName = expression . column ; boolean sameName = indexedColumnName . equals ( columnName ) ; if ( expression . operator . equals ( EQ ) && sameName ) { return expression ; } } return null ;
public class DOTranslationUtility { /** * Make URLs that are relative to the local Fedora repository ABSOLUTE URLs . * First , see if any URLs are expressed in relative URL syntax ( beginning * with " fedora / get " or " fedora / search " ) and convert these to the special * Fedora local URL syntax ( " http : / / local . fedora . server / . . . " ) . Then look for * all URLs that contain the special Fedora local URL syntax and replace * instances of this string with the actual host : port configured for the * repository . This ensures that all forms of relative repository URLs are * converted to proper absolute URLs that reference the hostname : port of the * local Fedora repository . Examples : * " http : / / local . fedora . server / fedora / get / demo : 1 / DS1 " is converted to * " http : / / myrepo . com : 8080 / fedora / get / demo : 1 / DS1 " " fedora / get / demo : 1 / DS1 " is * converted to " http : / / myrepo . com : 8080 / fedora / get / demo : 1 / DS1" * " http : / / local . fedora . server / fedora / get / demo : 1 / sdef : 1 / getFoo ? in = " * http : / / local . fedora . server / fedora / get / demo : 2 / DC " is converted to * " http : / / myrepo . com : 8080 / fedora / get / demo : 1 / sdef : 1 / getFoo ? in = " * http : / / myrepo . com : 8080 / fedora / get / demo : 2 / DC " * @ param input * @ return String with all relative repository URLs and Fedora local URLs * converted to absolute URL syntax . */ public String makeAbsoluteURLs ( String input ) { } }
String output = input ; // First pass : convert fedora app context URLs via variable substitution output = m_fedoraLocalAppContextPattern . matcher ( output ) . replaceAll ( m_hostContextInfo ) ; // Second pass : convert non - fedora - app - context URLs via variable substitution output = m_fedoraLocalPattern . matcher ( output ) . replaceAll ( m_hostInfo ) ; logger . debug ( "makeAbsoluteURLs: input={}, output={}" , input , output ) ; return output ;
public class ListAcceptedPortfolioSharesResult { /** * Information about the portfolios . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPortfolioDetails ( java . util . Collection ) } or { @ link # withPortfolioDetails ( java . util . Collection ) } if you * want to override the existing values . * @ param portfolioDetails * Information about the portfolios . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListAcceptedPortfolioSharesResult withPortfolioDetails ( PortfolioDetail ... portfolioDetails ) { } }
if ( this . portfolioDetails == null ) { setPortfolioDetails ( new java . util . ArrayList < PortfolioDetail > ( portfolioDetails . length ) ) ; } for ( PortfolioDetail ele : portfolioDetails ) { this . portfolioDetails . add ( ele ) ; } return this ;
public class GroupImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setGuardCondition ( Condition newGuardCondition ) { } }
if ( newGuardCondition != guardCondition ) { NotificationChain msgs = null ; if ( guardCondition != null ) msgs = ( ( InternalEObject ) guardCondition ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . GROUP__GUARD_CONDITION , null , msgs ) ; if ( newGuardCondition != null ) msgs = ( ( InternalEObject ) newGuardCondition ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . GROUP__GUARD_CONDITION , null , msgs ) ; msgs = basicSetGuardCondition ( newGuardCondition , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XtextPackage . GROUP__GUARD_CONDITION , newGuardCondition , newGuardCondition ) ) ;
public class WebApp { /** * createServlet only called when adding / creating programmatically */ @ Override public < T extends Servlet > T createServlet ( Class < T > classToCreate ) throws ServletException { } }
logger . logp ( Level . FINE , CLASS_NAME , "createServlet" , "called for class: " + classToCreate ) ; return super . createServlet ( classToCreate ) ;
public class Terminals { /** * Returns a { @ link Terminals } object for lexing and parsing the operators with names specified in * { @ code ops } , and for lexing and parsing the keywords case sensitively . Parsers for operators * and keywords can be obtained through { @ link # token } ; parsers for identifiers through * { @ link # identifier } . * < p > In detail , keywords and operators are lexed as { @ link Tokens . Fragment } with * { @ link Tag # RESERVED } tag . Words that are not among { @ code keywords } are lexed as * { @ code Fragment } with { @ link Tag # IDENTIFIER } tag . * @ param wordScanner the scanner that returns a word in the language . * @ param ops the operator names . * @ param keywords the keyword names . * @ return the Terminals instance . * @ deprecated Use { @ code operators ( ops ) * . words ( wordScanner ) * . keywords ( keywords ) * . build ( ) } instead . */ @ Deprecated public static Terminals caseSensitive ( Parser < String > wordScanner , String [ ] ops , String [ ] keywords ) { } }
return operators ( ops ) . words ( wordScanner ) . keywords ( keywords ) . build ( ) ;
public class SFTrustManager { /** * Gets OCSP URLs associated with the certificate . * @ param bcCert Bouncy Castle Certificate * @ return a set of OCSP URLs */ private Set < String > getOcspUrls ( Certificate bcCert ) { } }
TBSCertificate bcTbsCert = bcCert . getTBSCertificate ( ) ; Extensions bcExts = bcTbsCert . getExtensions ( ) ; if ( bcExts == null ) { throw new RuntimeException ( "Failed to get Tbs Certificate." ) ; } Set < String > ocsp = new HashSet < > ( ) ; for ( Enumeration en = bcExts . oids ( ) ; en . hasMoreElements ( ) ; ) { ASN1ObjectIdentifier oid = ( ASN1ObjectIdentifier ) en . nextElement ( ) ; Extension bcExt = bcExts . getExtension ( oid ) ; if ( bcExt . getExtnId ( ) == Extension . authorityInfoAccess ) { // OCSP URLS are included in authorityInfoAccess DLSequence seq = ( DLSequence ) bcExt . getParsedValue ( ) ; for ( ASN1Encodable asn : seq ) { ASN1Encodable [ ] pairOfAsn = ( ( DLSequence ) asn ) . toArray ( ) ; if ( pairOfAsn . length == 2 ) { ASN1ObjectIdentifier key = ( ASN1ObjectIdentifier ) pairOfAsn [ 0 ] ; if ( key == OIDocsp ) { // ensure OCSP and not CRL GeneralName gn = GeneralName . getInstance ( pairOfAsn [ 1 ] ) ; ocsp . add ( gn . getName ( ) . toString ( ) ) ; } } } } } return ocsp ;
public class SpringAopHelper { /** * 获取 目标对象 * @ param proxy 代理对象 * @ return * @ throws Exception */ public static Object getTarget ( Object proxy ) throws Exception { } }
if ( ! AopUtils . isAopProxy ( proxy ) ) { return proxy ; // 不是代理对象 } if ( AopUtils . isJdkDynamicProxy ( proxy ) ) { Object object = getJdkDynamicProxyTargetObject ( proxy ) ; return getTarget ( object ) ; } else { // cglib Object object = getCglibProxyTargetObject ( proxy ) ; return getTarget ( object ) ; }
public class BigIntegerMapper { /** * { @ inheritDoc } */ @ Override protected String doBase ( String name , Object value ) { } }
// Parse big decimal String svalue = value . toString ( ) ; BigInteger bi ; try { bi = new BigInteger ( svalue ) ; } catch ( NumberFormatException e ) { throw new IndexException ( "Field '{}' requires a base 10 integer, but found '{}'" , name , svalue ) ; } // Check size if ( bi . abs ( ) . toString ( ) . length ( ) > digits ) { throw new IndexException ( "Field '{}' with value '{}' has more than %d digits" , name , value , digits ) ; } // Map bi = bi . add ( complement ) ; String bis = encode ( bi ) ; return StringUtils . leftPad ( bis , hexDigits + 1 , '0' ) ;
public class AmazonRoute53ResolverClient { /** * Associates a resolver rule with a VPC . When you associate a rule with a VPC , Resolver forwards all DNS queries * for the domain name that is specified in the rule and that originate in the VPC . The queries are forwarded to the * IP addresses for the DNS resolvers that are specified in the rule . For more information about rules , see * < a > CreateResolverRule < / a > . * @ param associateResolverRuleRequest * @ return Result of the AssociateResolverRule operation returned by the service . * @ throws ResourceNotFoundException * The specified resource doesn ' t exist . * @ throws InvalidRequestException * The request is invalid . * @ throws InvalidParameterException * One or more parameters in this request are not valid . * @ throws ResourceUnavailableException * The specified resource isn ' t available . * @ throws ResourceExistsException * The resource that you tried to create already exists . * @ throws InternalServiceErrorException * We encountered an unknown error . Try again in a few minutes . * @ throws ThrottlingException * The request was throttled . Try again in a few minutes . * @ sample AmazonRoute53Resolver . AssociateResolverRule * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53resolver - 2018-04-01 / AssociateResolverRule " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AssociateResolverRuleResult associateResolverRule ( AssociateResolverRuleRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociateResolverRule ( request ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcRelVoidsElement ( ) { } }
if ( ifcRelVoidsElementEClass == null ) { ifcRelVoidsElementEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 563 ) ; } return ifcRelVoidsElementEClass ;
public class FullMappingPropertiesBasedBundlesHandlerFactory { /** * Returns the list of joinable resource bundle * @ param properties * the properties * @ return the list of joinable resource bundle */ public List < JoinableResourceBundle > getResourceBundles ( Properties properties ) { } }
PropertiesConfigHelper props = new PropertiesConfigHelper ( properties , resourceType ) ; String fileExtension = "." + resourceType ; // Initialize custom bundles List < JoinableResourceBundle > customBundles = new ArrayList < > ( ) ; // Check if we should use the bundle names property or // find the bundle name using the bundle id declaration : // jawr . < type > . bundle . < name > . id if ( null != props . getProperty ( PropertiesBundleConstant . BUNDLE_FACTORY_CUSTOM_NAMES ) ) { StringTokenizer tk = new StringTokenizer ( props . getProperty ( PropertiesBundleConstant . BUNDLE_FACTORY_CUSTOM_NAMES ) , "," ) ; while ( tk . hasMoreTokens ( ) ) { customBundles . add ( buildJoinableResourceBundle ( props , tk . nextToken ( ) . trim ( ) , fileExtension , rsReaderHandler ) ) ; } } else { Iterator < String > bundleNames = props . getPropertyBundleNameSet ( ) . iterator ( ) ; while ( bundleNames . hasNext ( ) ) { customBundles . add ( buildJoinableResourceBundle ( props , bundleNames . next ( ) , fileExtension , rsReaderHandler ) ) ; } } // Initialize the bundles dependencies Iterator < String > bundleNames = props . getPropertyBundleNameSet ( ) . iterator ( ) ; while ( bundleNames . hasNext ( ) ) { String bundleName = ( String ) bundleNames . next ( ) ; List < String > bundleNameDependencies = props . getCustomBundlePropertyAsList ( bundleName , PropertiesBundleConstant . BUNDLE_FACTORY_CUSTOM_DEPENDENCIES ) ; if ( ! bundleNameDependencies . isEmpty ( ) ) { JoinableResourceBundle bundle = getBundleFromName ( bundleName , customBundles ) ; List < JoinableResourceBundle > bundleDependencies = getBundlesFromName ( bundleNameDependencies , customBundles ) ; bundle . setDependencies ( bundleDependencies ) ; } } return customBundles ;
public class DFAState { /** * Optimizes transition by merging ranges */ void optimizeTransitions ( ) { } }
HashMap < DFAState < T > , RangeSet > hml = new HashMap < > ( ) ; for ( Transition < DFAState < T > > t : transitions . values ( ) ) { RangeSet rs = hml . get ( t . getTo ( ) ) ; if ( rs == null ) { rs = new RangeSet ( ) ; hml . put ( t . getTo ( ) , rs ) ; } rs . add ( t . getCondition ( ) ) ; } transitions . clear ( ) ; for ( DFAState < T > dfa : hml . keySet ( ) ) { RangeSet rs = RangeSet . merge ( hml . get ( dfa ) ) ; for ( CharRange r : rs ) { addTransition ( r , dfa ) ; } }
public class Config { /** * Returns the ScheduledExecutorConfig for the given name , creating one * if necessary and adding it to the collection of known configurations . * The configuration is found by matching the configuration name * pattern to the provided { @ code name } without the partition qualifier * ( the part of the name after { @ code ' @ ' } ) . * If no configuration matches , it will create one by cloning the * { @ code " default " } configuration and add it to the configuration * collection . * This method is intended to easily and fluently create and add * configurations more specific than the default configuration without * explicitly adding it by invoking * { @ link # addScheduledExecutorConfig ( ScheduledExecutorConfig ) } . * Because it adds new configurations if they are not already present , * this method is intended to be used before this config is used to * create a hazelcast instance . Afterwards , newly added configurations * may be ignored . * @ param name name of the scheduled executor config * @ return the scheduled executor configuration * @ throws ConfigurationException if ambiguous configurations are found * @ see StringPartitioningStrategy # getBaseName ( java . lang . String ) * @ see # setConfigPatternMatcher ( ConfigPatternMatcher ) * @ see # getConfigPatternMatcher ( ) */ public ScheduledExecutorConfig getScheduledExecutorConfig ( String name ) { } }
return ConfigUtils . getConfig ( configPatternMatcher , scheduledExecutorConfigs , name , ScheduledExecutorConfig . class ) ;
public class SmartLdapGroupStore { /** * Return an UnsupportedOperationException ! * @ param entityType * @ return * @ throws GroupsException */ @ Override public IEntityGroup newInstance ( Class entityType ) throws GroupsException { } }
log . warn ( "Unsupported method accessed: SmartLdapGroupStore.newInstance" ) ; throw new UnsupportedOperationException ( UNSUPPORTED_MESSAGE ) ;
public class ServiceManagerSparql { /** * Obtains a list of service URIs with all the services known to the system * @ return list of URIs with all the services in the registry */ @ Override public Set < URI > listServices ( ) { } }
String queryStr = new StringBuilder ( ) . append ( "SELECT DISTINCT ?svc WHERE { \n" ) . append ( " GRAPH ?g { \n" ) . append ( "?svc " ) . append ( "<" ) . append ( RDF . type . getURI ( ) ) . append ( ">" ) . append ( " " ) . append ( "<" ) . append ( MSM . Service . getURI ( ) ) . append ( "> . \n" ) . append ( " } \n" ) . append ( "} \n" ) . toString ( ) ; return this . graphStoreManager . listResourcesByQuery ( queryStr , "svc" ) ;
public class Shutdown { /** * / * The actual shutdown sequence is defined here . * If it weren ' t for runFinalizersOnExit , this would be simple - - we ' d just * run the hooks and then halt . Instead we need to keep track of whether * we ' re running hooks or finalizers . In the latter case a finalizer could * invoke exit ( 1 ) to cause immediate termination , while in the former case * any further invocations of exit ( n ) , for any n , simply stall . Note that * if on - exit finalizers are enabled they ' re run iff the shutdown is * initiated by an exit ( 0 ) ; they ' re never run on exit ( n ) for n ! = 0 or in * response to SIGINT , SIGTERM , etc . */ private static void sequence ( ) { } }
synchronized ( lock ) { /* Guard against the possibility of a daemon thread invoking exit * after DestroyJavaVM initiates the shutdown sequence */ if ( state != HOOKS ) return ; } runHooks ( ) ; boolean rfoe ; synchronized ( lock ) { state = FINALIZERS ; rfoe = runFinalizersOnExit ; } if ( rfoe ) runAllFinalizers ( ) ;
public class CommerceDiscountPersistenceImpl { /** * Returns the commerce discount with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found . * @ param primaryKey the primary key of the commerce discount * @ return the commerce discount * @ throws NoSuchDiscountException if a commerce discount with the primary key could not be found */ @ Override public CommerceDiscount findByPrimaryKey ( Serializable primaryKey ) throws NoSuchDiscountException { } }
CommerceDiscount commerceDiscount = fetchByPrimaryKey ( primaryKey ) ; if ( commerceDiscount == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchDiscountException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commerceDiscount ;
public class FlattenMojo { /** * This method determines if the given { @ link Repository } section is identical to what is defined from the super * POM . * @ param repo is the { @ link Repository } section to check . * @ return < code > true < / code > if maven central default configuration , < code > false < / code > otherwise . */ private static boolean isCentralRepositoryFromSuperPom ( Repository repo ) { } }
if ( repo != null ) { if ( "central" . equals ( repo . getId ( ) ) ) { RepositoryPolicy snapshots = repo . getSnapshots ( ) ; if ( snapshots != null && ! snapshots . isEnabled ( ) ) { return true ; } } } return false ;
public class RPC { /** * called on a timeout . */ public synchronized RPC < V > call ( ) { } }
++ _callCnt ; // completer will not be carried over to remote // add it to the RPC call . if ( _dt . getCompleter ( ) != null ) { CountedCompleter cc = _dt . getCompleter ( ) ; assert cc instanceof H2OCountedCompleter ; boolean alreadyIn = false ; if ( _fjtasks != null ) for ( H2OCountedCompleter hcc : _fjtasks ) if ( hcc == cc ) alreadyIn = true ; if ( ! alreadyIn ) addCompleter ( ( H2OCountedCompleter ) cc ) ; _dt . setCompleter ( null ) ; } // If running on self , just submit to queues & do locally if ( _target == H2O . SELF ) { assert _dt . getCompleter ( ) == null ; _dt . setCompleter ( new H2O . H2OCallback < DTask > ( ) { @ Override public void callback ( DTask dt ) { assert dt == _dt ; synchronized ( RPC . this ) { assert ! _done ; // F / J guarentees called once _done = true ; RPC . this . notifyAll ( ) ; } doAllCompletions ( ) ; } @ Override public boolean onExceptionalCompletion ( Throwable ex , CountedCompleter dt ) { assert dt == _dt ; synchronized ( RPC . this ) { // Might be called several times if ( _done ) return true ; // Filter down to 1st exceptional completion _dt . setException ( ex ) ; _done = true ; // must be set as the last thing before notify , the waiting thread can wake up any at any time ! RPC . this . notifyAll ( ) ; } doAllCompletions ( ) ; return true ; } } ) ; H2O . submitTask ( _dt ) ; return this ; } // Keep a global record , for awhile if ( _target != null ) _target . taskPut ( _tasknum , this ) ; try { // We could be racing timeouts - vs - replies . Blow off timeout if we have an answer . if ( isDone ( ) ) { if ( _target != null ) _target . taskRemove ( _tasknum ) ; return this ; } // Default strategy : ( re ) fire the packet and ( re ) start the timeout . We // " count " exactly 1 failure : just whether or not we shipped via TCP ever // once . After that we fearlessly ( re ) send UDP - sized packets until the // server replies . // Pack classloader / class & the instance data into the outgoing // AutoBuffer . If it fits in a single UDP packet , ship it . If not , // finish off the current AutoBuffer ( which is now going TCP style ) , and // make a new UDP - sized packet . On a re - send of a TCP - sized hunk , just // send the basic UDP control packet . if ( ! _sentTcp ) { // Ship the UDP packet ! while ( true ) { // Retry loop for broken TCP sends AutoBuffer ab = new AutoBuffer ( _target ) ; try { ab . putTask ( UDP . udp . exec , _tasknum ) . put1 ( CLIENT_UDP_SEND ) . put ( _dt ) ; boolean t = ab . hasTCP ( ) ; assert sz_check ( ab ) : "Resend of " + _dt . getClass ( ) + " changes size from " + _size + " to " + ab . size ( ) + " for task#" + _tasknum ; ab . close ( ) ; // Then close ; send final byte _sentTcp = t ; // Set after close ( and any other possible fail ) break ; // Break out of retry loop } catch ( AutoBuffer . AutoBufferException e ) { Log . info_no_DKV ( Log . Tag . Sys . WATER , "IOException during RPC call: " + e . _ioe . getMessage ( ) + ", AB=" + ab + ", for task#" + _tasknum + ", waiting and retrying..." ) ; ab . drainClose ( ) ; try { Thread . sleep ( 500 ) ; } catch ( InterruptedException ignore ) { } } } // end of while ( true ) } else { // Else it was sent via TCP in a prior attempt , and we ' ve timed out . // This means the caller ' s ACK / answer probably got dropped and we need // him to resend it ( or else the caller is still processing our // request ) . Send a UDP reminder - but with the CLIENT _ TCP _ SEND flag // instead of the UDP send , and no DTask ( since it previously went via // TCP , no need to resend it ) . AutoBuffer ab = new AutoBuffer ( _target ) . putTask ( UDP . udp . exec , _tasknum ) ; ab . put1 ( CLIENT_TCP_SEND ) . close ( ) ; } // Double retry until we exceed existing age . This is the time to delay // until we try again . Note that we come here immediately on creation , // so the first doubling happens before anybody does any waiting . Also // note the generous 5sec cap : ping at least every 5 sec . _retry += ( _retry < 5000 ) ? _retry : 5000 ; // Put self on the " TBD " list of tasks awaiting Timeout . // So : dont really ' forget ' but remember me in a little bit . UDPTimeOutThread . PENDING . add ( this ) ; return this ; } catch ( Error t ) { throw Log . err ( t ) ; }
public class SwaggerExtension { /** * Associate the InterceptorBinding annotation . */ public void processBeforeBeanDiscovery ( @ Observes BeforeBeanDiscovery event , BeanManager beanManager ) { } }
event . addInterceptorBinding ( beanManager . createAnnotatedType ( AddSwaggerResources . class ) ) ; event . addAnnotatedType ( beanManager . createAnnotatedType ( SwaggerRestApplicationInterceptor . class ) , SwaggerRestApplicationInterceptor . class . getName ( ) ) ;
public class NotificationRegistry { /** * We don ' t throw any errors because the connector is about to be closed . */ private void sendClosingSignal ( ) { } }
URL clientURL = null ; HttpsURLConnection connection = null ; try { // Get the appropriate URL to delete notification client if ( serverConnection . serverVersion >= 4 ) { // V4 + clients use / { clientID } to delete the notification client clientURL = getNotificationClientURL ( ) ; } else { // Pre - V4 clients use / { clientID } / inbox to delete the notification client clientURL = getInboxURL ( ) ; } if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , "sendClosingSignal" , "Making a call to delete inbox [" + clientURL + "] from [" + RESTClientMessagesUtil . getObjID ( this ) + "]" ) ; } // Get connection to server connection = serverConnection . getConnection ( clientURL , HttpMethod . DELETE , true ) ; connection . setReadTimeout ( serverConnection . getConnector ( ) . getReadTimeout ( ) ) ; // Check response code from server int responseCode = 0 ; try { responseCode = connection . getResponseCode ( ) ; } catch ( ConnectException ce ) { logger . logp ( Level . FINE , logger . getName ( ) , "sendClosingSignal" , ce . getMessage ( ) , ce ) ; } if ( logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , logger . getName ( ) , "sendClosingSignal" , "Response code: " + responseCode ) ; } } catch ( IOException io ) { logger . logp ( Level . FINE , logger . getName ( ) , "sendClosingSignal" , io . getMessage ( ) , io ) ; }
public class SharedTorrent { /** * Peer disconnection handler . * When a peer disconnects , we need to mark in all of the pieces it had * available that they can ' t be reached through this peer anymore . * @ param peer The peer we got this piece from . */ @ Override public synchronized void handlePeerDisconnected ( SharingPeer peer ) { } }
BitSet availablePieces = peer . getAvailablePieces ( ) ; for ( int i = availablePieces . nextSetBit ( 0 ) ; i >= 0 ; i = availablePieces . nextSetBit ( i + 1 ) ) { this . pieces [ i ] . noLongerAt ( peer ) ; } Set < Piece > requested = peer . getRequestedPieces ( ) ; if ( requested != null ) { for ( Piece piece : requested ) { this . requestedPieces . set ( piece . getIndex ( ) , false ) ; } } myDownloaders . remove ( peer ) ; try { closeFileChannelIfNecessary ( ) ; } catch ( IOException e ) { logger . info ( "I/O error on attempt to close file storage: " + e . toString ( ) ) ; } logger . debug ( "Peer {} went away with {} piece(s) [{}/{}]." , new Object [ ] { peer , availablePieces . cardinality ( ) , this . completedPieces . cardinality ( ) , this . pieces . length } ) ; logger . trace ( "We now have {} piece(s) and {} outstanding request(s): {}" , new Object [ ] { this . completedPieces . cardinality ( ) , this . requestedPieces . cardinality ( ) , this . requestedPieces } ) ; eventDispatcher . multicaster ( ) . peerDisconnected ( peer ) ;
public class ObjectWriterImpl { /** * { @ inheritDoc } */ public void writeString ( String str ) throws IOException { } }
byte [ ] bytes = str . getBytes ( Constants . DEFAULT_ENCODING ) ; writeInt ( bytes . length ) ; write ( bytes ) ;
public class LinkedList { /** * Removes all of the elements from this list . * The list will be empty after this call returns . */ public void clear ( ) { } }
// Clearing all of the links between nodes is " unnecessary " , but : // - helps a generational GC if the discarded nodes inhabit // more than one generation // - is sure to free memory even if there is a reachable Iterator for ( Node < E > x = first ; x != null ; ) { Node < E > next = x . next ; x . item = null ; x . next = null ; x . prev = null ; x = next ; } first = last = null ; size = 0 ; modCount ++ ;
public class LinearSkin { /** * * * * * * Initialization * * * * * */ private void initGraphics ( ) { } }
// Set initial size if ( Double . compare ( gauge . getPrefWidth ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getPrefHeight ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getWidth ( ) , 0.0 ) <= 0 || Double . compare ( gauge . getHeight ( ) , 0.0 ) <= 0 ) { if ( gauge . getPrefWidth ( ) > 0 && gauge . getPrefHeight ( ) > 0 ) { gauge . setPrefSize ( gauge . getPrefWidth ( ) , gauge . getPrefHeight ( ) ) ; } else { gauge . setPrefSize ( preferredWidth , preferredHeight ) ; } } barBorder1 = new Line ( ) ; barBorder2 = new Line ( ) ; barBackground = new Rectangle ( ) ; ticksAndSectionsCanvas = new Canvas ( preferredWidth , preferredHeight ) ; ticksAndSections = ticksAndSectionsCanvas . getGraphicsContext2D ( ) ; ledCanvas = new Canvas ( ) ; led = ledCanvas . getGraphicsContext2D ( ) ; Helper . enableNode ( ledCanvas , gauge . isLedVisible ( ) ) ; lcd = new Rectangle ( 0.3 * preferredWidth , 0.014 * preferredHeight ) ; lcd . setArcWidth ( 0.0125 * preferredHeight ) ; lcd . setArcHeight ( 0.0125 * preferredHeight ) ; lcd . relocate ( ( preferredWidth - lcd . getWidth ( ) ) * 0.5 , 0.44 * preferredHeight ) ; Helper . enableNode ( lcd , gauge . isLcdVisible ( ) && gauge . isValueVisible ( ) ) ; bar = new Rectangle ( ) ; bar . setStroke ( null ) ; barHighlight = new Rectangle ( ) ; barHighlight . setStroke ( null ) ; Helper . enableNode ( barHighlight , gauge . isBarEffectEnabled ( ) ) ; titleText = new Text ( gauge . getTitle ( ) ) ; titleText . setFill ( gauge . getTitleColor ( ) ) ; Helper . enableNode ( titleText , ! gauge . getTitle ( ) . isEmpty ( ) ) ; unitText = new Text ( gauge . getUnit ( ) ) ; unitText . setFill ( gauge . getUnitColor ( ) ) ; Helper . enableNode ( unitText , ! gauge . getUnit ( ) . isEmpty ( ) ) ; valueText = new Text ( formatNumber ( gauge . getLocale ( ) , gauge . getFormatString ( ) , gauge . getDecimals ( ) , gauge . getCurrentValue ( ) ) ) ; valueText . setFill ( gauge . getValueColor ( ) ) ; Helper . enableNode ( valueText , gauge . isValueVisible ( ) ) ; pane = new Pane ( barBorder1 , barBorder2 , barBackground , ticksAndSectionsCanvas , titleText , unitText , ledCanvas , lcd , valueText , bar , barHighlight ) ; pane . setBorder ( new Border ( new BorderStroke ( gauge . getBorderPaint ( ) , BorderStrokeStyle . SOLID , CornerRadii . EMPTY , new BorderWidths ( gauge . getBorderWidth ( ) ) ) ) ) ; pane . setBackground ( new Background ( new BackgroundFill ( gauge . getBackgroundPaint ( ) , CornerRadii . EMPTY , Insets . EMPTY ) ) ) ; getChildren ( ) . setAll ( pane ) ;
public class SimpleRandomSampling { /** * Calculates Variance for Pbar for a finite population size * @ param pbar * @ param sampleN * @ param populationN * @ return */ public static double pbarVariance ( double pbar , int sampleN , int populationN ) { } }
if ( populationN <= 0 || sampleN <= 0 || sampleN > populationN ) { throw new IllegalArgumentException ( "All the parameters must be positive and sampleN smaller than populationN." ) ; } double f = ( double ) sampleN / populationN ; double pbarVariance = ( ( 1.0 - f ) * pbar * ( 1.0 - pbar ) ) / ( sampleN - 1.0 ) ; return pbarVariance ;