signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FreeVarCollector { /** * If tree refers to a qualified this or super expression * for anything but the current class , add the outer this * stack as a free variable . */ public void visitSelect ( JCFieldAccess tree ) { } }
if ( ( tree . name == names . _this || tree . name == names . _super ) && tree . selected . type . tsym != clazz && outerThisStack . head != null ) visitSymbol ( outerThisStack . head ) ; super . visitSelect ( tree ) ;
public class DateUtils { /** * 字符串转换为日期java . util . Date * @ param dateString * 字符串 * @ param format * 日期格式 * @ return */ public static Date stringToDate ( String dateString , String format ) { } }
return stringToDate ( dateString , format , LENIENT_DATE ) ;
public class ConfigMgrImpl { /** * 主要用于邮箱发送 * @ return */ private String getConfigUrlHtml ( Config config ) { } }
return "<br/>点击<a href='http://" + applicationPropertyConfig . getDomain ( ) + "/modifyFile.html?configId=" + config . getId ( ) + "'> 这里 </a> 进入查看<br/>" ;
public class JSONNavi { /** * get the current object value as Long if the current Object can not be * cast as Long return null . */ public Long asLongObj ( ) { } }
if ( current == null ) return null ; if ( current instanceof Number ) { if ( current instanceof Long ) return ( Long ) current ; if ( current instanceof Integer ) return Long . valueOf ( ( ( Number ) current ) . longValue ( ) ) ; return null ; } return null ;
public class PatternRuleQueryBuilder { /** * Iterate over all elements , ignore those not supported , add the other ones to a BooleanQuery . * @ throws UnsupportedPatternRuleException if no query could be created for the rule */ public Query buildRelaxedQuery ( AbstractPatternRule rule ) throws UnsupportedPatternRuleException { } }
BooleanQuery . Builder builder = new BooleanQuery . Builder ( ) ; for ( PatternToken patternToken : rule . getPatternTokens ( ) ) { try { BooleanClause clause = makeQuery ( patternToken ) ; builder . add ( clause ) ; } catch ( UnsupportedPatternRuleException e ) { // System . out . println ( " Ignoring because it ' s not supported : " + element + " : " + e ) ; // cannot handle - okay to ignore , as we may return too broad matches } catch ( Exception e ) { throw new RuntimeException ( "Could not create query for rule " + rule . getId ( ) , e ) ; } } BooleanQuery query = builder . build ( ) ; if ( query . clauses ( ) . isEmpty ( ) ) { throw new UnsupportedPatternRuleException ( "No items found in rule that can be used to build a search query: " + rule ) ; } return query ;
public class SecondsDescriptor { /** * Provide a human readable description for And instance . * @ param and - And * @ return human readable description - String */ protected String describe ( final And and ) { } }
final List < FieldExpression > expressions = new ArrayList < > ( ) ; final List < FieldExpression > onExpressions = new ArrayList < > ( ) ; for ( final FieldExpression fieldExpression : and . getExpressions ( ) ) { if ( fieldExpression instanceof On ) { onExpressions . add ( fieldExpression ) ; } else { expressions . add ( fieldExpression ) ; } } final StringBuilder builder = new StringBuilder ( ) ; if ( ! onExpressions . isEmpty ( ) ) { builder . append ( bundle . getString ( "at" ) ) ; createAndDescription ( builder , onExpressions ) ; } if ( ! expressions . isEmpty ( ) ) { createAndDescription ( builder , expressions ) ; } return builder . toString ( ) ;
public class MoverModel { /** * Mover */ @ Override public void moveLocation ( double extrp , Direction direction , Direction ... directions ) { } }
double vx = direction . getDirectionHorizontal ( ) ; double vy = direction . getDirectionVertical ( ) ; for ( final Direction current : directions ) { vx += current . getDirectionHorizontal ( ) ; vy += current . getDirectionVertical ( ) ; } setLocation ( x + vx * extrp , y + vy * extrp ) ;
public class AbstractBeanDefinition { /** * Resolves a bean for the given { @ link FieldInjectionPoint } . * @ param resolutionContext The { @ link BeanResolutionContext } * @ param context The { @ link BeanContext } * @ param injectionPoint The { @ link FieldInjectionPoint } * @ return The resolved bean * @ throws DependencyInjectionException If the bean cannot be resolved */ @ SuppressWarnings ( "WeakerAccess" ) @ Internal @ UsedByGeneratedCode protected final Object getBeanForField ( BeanResolutionContext resolutionContext , BeanContext context , FieldInjectionPoint injectionPoint ) { } }
Class beanType = injectionPoint . getType ( ) ; if ( beanType . isArray ( ) ) { Collection beansOfType = getBeansOfTypeForField ( resolutionContext , context , injectionPoint ) ; return beansOfType . toArray ( ( Object [ ] ) Array . newInstance ( beanType . getComponentType ( ) , beansOfType . size ( ) ) ) ; } else if ( Collection . class . isAssignableFrom ( beanType ) ) { Collection beansOfType = getBeansOfTypeForField ( resolutionContext , context , injectionPoint ) ; if ( beanType . isInstance ( beansOfType ) ) { return beansOfType ; } else { // noinspection unchecked return CollectionUtils . convertCollection ( beanType , beansOfType ) . orElse ( null ) ; } } else if ( Stream . class . isAssignableFrom ( beanType ) ) { return getStreamOfTypeForField ( resolutionContext , context , injectionPoint ) ; } else if ( Provider . class . isAssignableFrom ( beanType ) ) { return getBeanProviderForField ( resolutionContext , context , injectionPoint ) ; } else if ( Optional . class . isAssignableFrom ( beanType ) ) { return findBeanForField ( resolutionContext , context , injectionPoint ) ; } else { BeanResolutionContext . Path path = resolutionContext . getPath ( ) ; path . pushFieldResolve ( this , injectionPoint ) ; try { Qualifier qualifier = resolveQualifier ( resolutionContext , injectionPoint . asArgument ( ) ) ; @ SuppressWarnings ( "unchecked" ) Object bean = ( ( DefaultBeanContext ) context ) . getBean ( resolutionContext , beanType , qualifier ) ; path . pop ( ) ; return bean ; } catch ( NoSuchBeanException e ) { if ( injectionPoint . isDeclaredAnnotationPresent ( Nullable . class ) ) { path . pop ( ) ; return null ; } throw new DependencyInjectionException ( resolutionContext , injectionPoint , e ) ; } }
public class FileValueStorage { /** * Prepare RootDir . * @ param rootDirPath * path * @ throws IOException * if error * @ throws RepositoryConfigurationException * if confog error */ protected void prepareRootDir ( String rootDirPath ) throws IOException , RepositoryConfigurationException { } }
this . rootDir = new File ( rootDirPath ) ; if ( ! rootDir . exists ( ) ) { if ( rootDir . mkdirs ( ) ) { LOG . info ( "Value storage directory created: " + rootDir . getAbsolutePath ( ) ) ; // create internal temp dir File tempDir = new File ( rootDir , TEMP_DIR_NAME ) ; tempDir . mkdirs ( ) ; if ( tempDir . exists ( ) && tempDir . isDirectory ( ) ) { // care about storage temp dir cleanup for ( File tmpf : tempDir . listFiles ( ) ) if ( ! tmpf . delete ( ) ) LOG . warn ( "Storage temporary directory contains un-deletable file " + tmpf . getAbsolutePath ( ) + ". It's recommended to leave this directory for JCR External Values Storage private use." ) ; } else throw new RepositoryConfigurationException ( "Cannot create " + TEMP_DIR_NAME + " directory under External Value Storage." ) ; } else { LOG . warn ( "Directory IS NOT created: " + rootDir . getAbsolutePath ( ) ) ; } } else { if ( ! rootDir . isDirectory ( ) ) { throw new RepositoryConfigurationException ( "File exists but is not a directory " + rootDirPath ) ; } }
public class Mimetypes { /** * Loads MIME type info from the file ' mime . types ' in the classpath , if it ' s available . */ public synchronized static Mimetypes getInstance ( ) { } }
if ( mimetypes != null ) return mimetypes ; mimetypes = new Mimetypes ( ) ; InputStream is = mimetypes . getClass ( ) . getResourceAsStream ( "/mime.types" ) ; if ( is != null ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Loading mime types from file in the classpath: mime.types" ) ; } try { mimetypes . loadAndReplaceMimetypes ( is ) ; } catch ( IOException e ) { if ( log . isErrorEnabled ( ) ) { log . error ( "Failed to load mime types from file in the classpath: mime.types" , e ) ; } } finally { try { is . close ( ) ; } catch ( IOException ex ) { log . debug ( "" , ex ) ; } } } else { if ( log . isWarnEnabled ( ) ) { log . warn ( "Unable to find 'mime.types' file in classpath" ) ; } } return mimetypes ;
public class XImportSectionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case XtypePackage . XIMPORT_SECTION__IMPORT_DECLARATIONS : return importDeclarations != null && ! importDeclarations . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class DerIndefLenConverter { /** * Parse the length and if it is an indefinite length then add * the current position to the < code > ndefsList < / code > vector . */ private int parseLength ( ) throws IOException { } }
int curLen = 0 ; if ( dataPos == dataSize ) return curLen ; int lenByte = data [ dataPos ++ ] & 0xff ; if ( isIndefinite ( lenByte ) ) { ndefsList . add ( new Integer ( dataPos ) ) ; unresolved ++ ; return curLen ; } if ( isLongForm ( lenByte ) ) { lenByte &= LEN_MASK ; if ( lenByte > 4 ) { throw new IOException ( "Too much data" ) ; } if ( ( dataSize - dataPos ) < ( lenByte + 1 ) ) { throw new IOException ( "Too little data" ) ; } for ( int i = 0 ; i < lenByte ; i ++ ) { curLen = ( curLen << 8 ) + ( data [ dataPos ++ ] & 0xff ) ; } if ( curLen < 0 ) { throw new IOException ( "Invalid length bytes" ) ; } } else { curLen = ( lenByte & LEN_MASK ) ; } return curLen ;
public class JTAXAResourceImpl { /** * Write information about the resource to the transaction log * @ exception SystemException */ @ Override public void log ( RecoverableUnitSection rus ) throws SystemException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "log" , new Object [ ] { this , rus } ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "about to log stoken " + Util . toHexString ( ( ( XidImpl ) _xid ) . getStoken ( ) ) ) ; Tr . debug ( tc , "about to log recoveryId " + getRecoveryId ( ) ) ; Tr . debug ( tc , "about to log seqNo " + ( ( XidImpl ) _xid ) . getSequenceNumber ( ) ) ; Tr . debug ( tc , "ID from pld " + _recoveryData . _recoveryId ) ; } // Log the stoken , recoveryId and the sequence number final byte [ ] stoken = ( ( XidImpl ) _xid ) . getStoken ( ) ; final int recoveryId = ( int ) getRecoveryId ( ) ; final int seqNo = ( ( XidImpl ) _xid ) . getSequenceNumber ( ) ; final byte [ ] data = new byte [ stoken . length + 6 ] ; System . arraycopy ( stoken , 0 , data , 0 , stoken . length ) ; Util . setBytesFromInt ( data , stoken . length , 4 , recoveryId ) ; Util . setBytesFromInt ( data , stoken . length + 4 , 2 , seqNo ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "logging stoken " + Util . toHexString ( stoken ) ) ; Tr . debug ( tc , "logging recoveryId " + recoveryId ) ; Tr . debug ( tc , "logging seqNo " + seqNo ) ; Tr . debug ( tc , "Actual data logged" , Util . toHexString ( data ) ) ; } try { rus . addData ( data ) ; } catch ( Exception exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.Transaction.JTA.JTAXAResourceImpl.log" , "326" , this ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Exception raised adding data to the transaction log" , exc ) ; throw new SystemException ( exc . toString ( ) ) ; } finally { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "log" ) ; }
public class AnnotationTypeOptionalMemberWriterImpl { /** * { @ inheritDoc } */ public void addSummaryAnchor ( TypeElement typeElement , Content memberTree ) { } }
memberTree . addContent ( writer . getMarkerAnchor ( SectionName . ANNOTATION_TYPE_OPTIONAL_ELEMENT_SUMMARY ) ) ;
public class IntrospectionLevelMember { /** * Make a string that describes this object * @ param value * The value needing a description * @ return The description */ private String makeDescription ( Object value ) { } }
String answer ; // Not initialized , so the compiler tells if we miss a // case if ( value == null ) { answer = "null" ; } else if ( value instanceof String ) { answer = "\"" + value + "\"" ; } else { Class < ? > objClass = value . getClass ( ) ; if ( ( objClass == Boolean . class ) || ( objClass == Character . class ) || ( objClass == Byte . class ) || ( objClass == Short . class ) || ( objClass == Integer . class ) || ( objClass == Long . class ) || ( objClass == Float . class ) || ( objClass == Double . class ) ) { answer = value . toString ( ) ; } else if ( objClass . isArray ( ) ) { if ( objClass . getComponentType ( ) . isPrimitive ( ) ) { answer = convertSimpleArrayToString ( value ) ; } else { answer = objClass . getComponentType ( ) + "[" + Array . getLength ( value ) + "]" ; } } else { answer = value . getClass ( ) . toString ( ) + "@" + Integer . toHexString ( System . identityHashCode ( value ) ) ; } } return answer ;
public class MacroTextDecorator { /** * Get the target text and execute each text decorator on its results * @ return the text results */ public String getText ( ) { } }
// loop thru text StringText stringText = new StringText ( ) ; stringText . setText ( this . target . getText ( ) ) ; TextDecorator < Textable > textDecorator = null ; // loop thru decorator and get results from each for ( Iterator < TextDecorator < Textable > > i = textables . iterator ( ) ; i . hasNext ( ) ; ) { textDecorator = i . next ( ) ; textDecorator . setTarget ( stringText ) ; stringText . setText ( textDecorator . getText ( ) ) ; } return stringText . getText ( ) ;
public class IntStreamEx { /** * Folds the elements of this stream using the provided accumulation * function , going left to right . This is equivalent to : * < pre > * { @ code * boolean foundAny = false ; * int result = 0; * for ( int element : this stream ) { * if ( ! foundAny ) { * foundAny = true ; * result = element ; * else * result = accumulator . apply ( result , element ) ; * return foundAny ? OptionalInt . of ( result ) : OptionalInt . empty ( ) ; * < / pre > * This is a terminal operation . * This method cannot take all the advantages of parallel streams as it must * process elements strictly left to right . If your accumulator function is * associative , consider using { @ link # reduce ( IntBinaryOperator ) } method . * For parallel stream it ' s not guaranteed that accumulator will always be * executed in the same thread . * @ param accumulator a * < a href = " package - summary . html # NonInterference " > non - interfering * < / a > , < a href = " package - summary . html # Statelessness " > stateless < / a > * function for incorporating an additional element into a result * @ return the result of the folding * @ see # foldLeft ( int , IntBinaryOperator ) * @ see # reduce ( IntBinaryOperator ) * @ since 0.4.0 */ public OptionalInt foldLeft ( IntBinaryOperator accumulator ) { } }
PrimitiveBox b = new PrimitiveBox ( ) ; forEachOrdered ( t -> { if ( b . b ) b . i = accumulator . applyAsInt ( b . i , t ) ; else { b . i = t ; b . b = true ; } } ) ; return b . asInt ( ) ;
public class FactoryBuilderSupport { /** * A hook before the factory creates the node . < br > * It will call any registered preInstantiateDelegates , if you override this * method be sure to call this impl somewhere in your code . * @ param name the name of the node * @ param attributes the attributes of the node * @ param value the value argument ( s ) of the node */ protected void preInstantiate ( Object name , Map attributes , Object value ) { } }
for ( Closure preInstantiateDelegate : getProxyBuilder ( ) . getPreInstantiateDelegates ( ) ) { ( preInstantiateDelegate ) . call ( new Object [ ] { this , attributes , value } ) ; }
public class ZeroMQNetworkService { /** * Build the byte array that may be used for the ZeroMQ filtering associated with { @ link Socket # subscribe ( byte [ ] ) } . For a given contextID * ( translated into a byte array with an { @ link EventSerializer } ) , this function must always reply the same sequence of bytes . * @ param contextID * the identifier of the context . * @ return the header of the ZeroMQ message that may be used for filtering . */ private static byte [ ] buildFilterableHeader ( byte [ ] contextID ) { } }
final byte [ ] header = new byte [ Ints . BYTES + contextID . length ] ; final byte [ ] length = Ints . toByteArray ( contextID . length ) ; System . arraycopy ( length , 0 , header , 0 , length . length ) ; System . arraycopy ( contextID , 0 , header , length . length , contextID . length ) ; return header ;
public class SocksSocketFactory { /** * Set the proxy of this socket factory as described in the string * parameter * @ param proxyStr the proxy address using the format " host : port " */ private void setProxy ( String proxyStr ) { } }
String [ ] strs = proxyStr . split ( ":" , 2 ) ; if ( strs . length != 2 ) throw new RuntimeException ( "Bad SOCKS proxy parameter: " + proxyStr ) ; String host = strs [ 0 ] ; int port = Integer . parseInt ( strs [ 1 ] ) ; this . proxy = new Proxy ( Proxy . Type . SOCKS , InetSocketAddress . createUnresolved ( host , port ) ) ;
public class POICellFormatter { /** * セルの値をフォーマットする 。 * @ param poiCell フォーマット対象のセル * @ param locale ロケール * @ return フォーマットした結果 */ private CellFormatResult getCellValue ( final POICell poiCell , final Locale locale ) { } }
final short formatIndex = poiCell . getFormatIndex ( ) ; final String formatPattern = poiCell . getFormatPattern ( ) ; if ( formatterResolver . canResolve ( formatIndex ) ) { final CellFormatter cellFormatter = formatterResolver . getFormatter ( formatIndex ) ; return cellFormatter . format ( poiCell , locale ) ; } else if ( formatterResolver . canResolve ( formatPattern ) ) { final CellFormatter cellFormatter = formatterResolver . getFormatter ( formatPattern ) ; return cellFormatter . format ( poiCell , locale ) ; } else { // キャッシュに存在しない場合 final CellFormatter cellFormatter = formatterResolver . createFormatter ( formatPattern ) ; if ( isCache ( ) ) { formatterResolver . registerFormatter ( formatPattern , cellFormatter ) ; } return cellFormatter . format ( poiCell , locale ) ; }
public class XMLElement { /** * Returns children that have the specified name as a XMLElement array . * @ param name The name to search . * @ return The children . */ public XMLElement [ ] getChildren ( String name ) { } }
java . util . List < XMLElement > list = new java . util . ArrayList < XMLElement > ( ) ; for ( XMLElement child : getChildren ( ) ) { if ( child . getName ( ) . equals ( name ) ) { list . add ( child ) ; } } return ( 0 < list . size ( ) ) ? list . toArray ( new XMLElement [ 0 ] ) : null ;
public class FeedbackTransformation { /** * Adds a feedback edge . The parallelism of the { @ code StreamTransformation } must match * the parallelism of the input { @ code StreamTransformation } of this * { @ code FeedbackTransformation } * @ param transform The new feedback { @ code StreamTransformation } . */ public void addFeedbackEdge ( StreamTransformation < T > transform ) { } }
if ( transform . getParallelism ( ) != this . getParallelism ( ) ) { throw new UnsupportedOperationException ( "Parallelism of the feedback stream must match the parallelism of the original" + " stream. Parallelism of original stream: " + this . getParallelism ( ) + "; parallelism of feedback stream: " + transform . getParallelism ( ) + ". Parallelism can be modified using DataStream#setParallelism() method" ) ; } feedbackEdges . add ( transform ) ;
public class LeaderState { /** * Commits the given configuration . */ protected CompletableFuture < Long > configure ( Collection < Member > members ) { } }
final long index ; try ( ConfigurationEntry entry = context . getLog ( ) . create ( ConfigurationEntry . class ) ) { entry . setTerm ( context . getTerm ( ) ) . setTimestamp ( System . currentTimeMillis ( ) ) . setMembers ( members ) ; index = context . getLog ( ) . append ( entry ) ; LOGGER . trace ( "{} - Appended {}" , context . getCluster ( ) . member ( ) . address ( ) , entry ) ; // Store the index of the configuration entry in order to prevent other configurations from // being logged and committed concurrently . This is an important safety property of Raft . configuring = index ; context . getClusterState ( ) . configure ( new Configuration ( entry . getIndex ( ) , entry . getTerm ( ) , entry . getTimestamp ( ) , entry . getMembers ( ) ) ) ; } return appender . appendEntries ( index ) . whenComplete ( ( commitIndex , commitError ) -> { context . checkThread ( ) ; if ( isOpen ( ) ) { // Reset the configuration index to allow new configuration changes to be committed . configuring = 0 ; } } ) ;
public class MapperTemplate { /** * 重新设置SqlSource * @ param ms * @ param sqlSource */ protected void setSqlSource ( MappedStatement ms , SqlSource sqlSource ) { } }
MetaObject msObject = MetaObjectUtil . forObject ( ms ) ; msObject . setValue ( "sqlSource" , sqlSource ) ;
public class StringUtilities { /** * Test to see if a String is null or contains only whitespace . * @ param input The String to test * @ return true if input is null or contains only whitespace , and false otherwise */ public static boolean isStringNullOrEmpty ( final String input ) { } }
if ( input == null || input . trim ( ) . isEmpty ( ) ) { return true ; } return false ;
public class AmazonDirectConnectClient { /** * Lists the attachments between your Direct Connect gateways and virtual interfaces . You must specify a Direct * Connect gateway , a virtual interface , or both . If you specify a Direct Connect gateway , the response contains all * virtual interfaces attached to the Direct Connect gateway . If you specify a virtual interface , the response * contains all Direct Connect gateways attached to the virtual interface . If you specify both , the response * contains the attachment between the Direct Connect gateway and the virtual interface . * @ param describeDirectConnectGatewayAttachmentsRequest * @ return Result of the DescribeDirectConnectGatewayAttachments operation returned by the service . * @ throws DirectConnectServerException * A server - side error occurred . * @ throws DirectConnectClientException * One or more parameters are not valid . * @ sample AmazonDirectConnect . DescribeDirectConnectGatewayAttachments * @ see < a * href = " http : / / docs . aws . amazon . com / goto / WebAPI / directconnect - 2012-10-25 / DescribeDirectConnectGatewayAttachments " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeDirectConnectGatewayAttachmentsResult describeDirectConnectGatewayAttachments ( DescribeDirectConnectGatewayAttachmentsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeDirectConnectGatewayAttachments ( request ) ;
public class SingleFileRepositoryConnection { /** * Create an empty single file Repository connection . * Repository data will be stored in { @ code jsonFile } which must not exist and will be created by this method . * @ param jsonFile the location for the repository file * @ return the repository connection * @ throws IOException if { @ code jsonFile } already exists or there is a problem creating the file */ public static SingleFileRepositoryConnection createEmptyRepository ( File jsonFile ) throws IOException { } }
if ( jsonFile . exists ( ) ) { throw new IOException ( "Cannot create empty repository as the file already exists: " + jsonFile . getAbsolutePath ( ) ) ; } OutputStreamWriter writer = null ; try { writer = new OutputStreamWriter ( new FileOutputStream ( jsonFile ) , "UTF-8" ) ; writer . write ( "[]" ) ; } finally { if ( writer != null ) { writer . close ( ) ; } } return new SingleFileRepositoryConnection ( jsonFile ) ;
public class FileEventStore { /** * Gets the cache directory for the given project . Optionally creates the directory if it * doesn ' t exist . * @ param projectId The project ID . * @ return The cache directory for the project . * @ throws IOException */ private File getProjectDir ( String projectId , boolean create ) throws IOException { } }
File projectDir = new File ( getKeenCacheDirectory ( ) , projectId ) ; if ( create && ! projectDir . exists ( ) ) { KeenLogging . log ( "Cache directory for project '" + projectId + "' doesn't exist. " + "Creating it." ) ; if ( ! projectDir . mkdirs ( ) ) { throw new IOException ( "Could not create project cache directory '" + projectDir . getAbsolutePath ( ) + "'" ) ; } } return projectDir ;
public class SizeBoundedQueue { /** * For testing only - should always be the same as size ( ) */ public int queueSize ( ) { } }
return queue . stream ( ) . map ( el -> el . size ) . reduce ( 0 , ( l , r ) -> l + r ) ;
public class RoundRobinPolicy { /** * The policy uses the first fetch of worker info list as the base , and visits each of them in a * round - robin manner in the subsequent calls . The policy doesn ' t assume the list of worker info * in the subsequent calls has the same order from the first , and it will skip the workers that * are no longer active . * @ param options options * @ return the address of the worker to write to */ @ Override @ Nullable public WorkerNetAddress getWorker ( GetWorkerOptions options ) { } }
Set < WorkerNetAddress > eligibleAddresses = new HashSet < > ( ) ; for ( BlockWorkerInfo info : options . getBlockWorkerInfos ( ) ) { eligibleAddresses . add ( info . getNetAddress ( ) ) ; } WorkerNetAddress address = mBlockLocationCache . get ( options . getBlockInfo ( ) . getBlockId ( ) ) ; if ( address != null && eligibleAddresses . contains ( address ) ) { return address ; } else { address = null ; } if ( ! mInitialized ) { mWorkerInfoList = Lists . newArrayList ( options . getBlockWorkerInfos ( ) ) ; Collections . shuffle ( mWorkerInfoList ) ; mIndex = 0 ; mInitialized = true ; } // at most try all the workers for ( int i = 0 ; i < mWorkerInfoList . size ( ) ; i ++ ) { WorkerNetAddress candidate = mWorkerInfoList . get ( mIndex ) . getNetAddress ( ) ; BlockWorkerInfo workerInfo = findBlockWorkerInfo ( options . getBlockWorkerInfos ( ) , candidate ) ; mIndex = ( mIndex + 1 ) % mWorkerInfoList . size ( ) ; if ( workerInfo != null && workerInfo . getCapacityBytes ( ) >= options . getBlockInfo ( ) . getLength ( ) && eligibleAddresses . contains ( candidate ) ) { address = candidate ; break ; } } mBlockLocationCache . put ( options . getBlockInfo ( ) . getBlockId ( ) , address ) ; return address ;
public class BProgramJsProxy { /** * sync ( " bp . sync " ) related code */ @ Override public void sync ( NativeObject jsRWB , Object data ) { } }
synchronizationPoint ( jsRWB , null , data ) ;
public class ConfigConcatenation { /** * Add left and right , or their merger , to builder . */ private static void join ( ArrayList < AbstractConfigValue > builder , AbstractConfigValue origRight ) { } }
AbstractConfigValue left = builder . get ( builder . size ( ) - 1 ) ; AbstractConfigValue right = origRight ; // check for an object which can be converted to a list // ( this will be an object with numeric keys , like foo . 0 , foo . 1) if ( left instanceof ConfigObject && right instanceof SimpleConfigList ) { left = DefaultTransformer . transform ( left , ConfigValueType . LIST ) ; } else if ( left instanceof SimpleConfigList && right instanceof ConfigObject ) { right = DefaultTransformer . transform ( right , ConfigValueType . LIST ) ; } // Since this depends on the type of two instances , I couldn ' t think // of much alternative to an instanceof chain . Visitors are sometimes // used for multiple dispatch but seems like overkill . AbstractConfigValue joined = null ; if ( left instanceof ConfigObject && right instanceof ConfigObject ) { joined = right . withFallback ( left ) ; } else if ( left instanceof SimpleConfigList && right instanceof SimpleConfigList ) { joined = ( ( SimpleConfigList ) left ) . concatenate ( ( SimpleConfigList ) right ) ; } else if ( left instanceof ConfigConcatenation || right instanceof ConfigConcatenation ) { throw new BugOrBroken ( "unflattened ConfigConcatenation" ) ; } else if ( left instanceof Unmergeable || right instanceof Unmergeable ) { // leave joined = null , cannot join } else { // handle primitive type or primitive type mixed with object or list String s1 = left . transformToString ( ) ; String s2 = right . transformToString ( ) ; if ( s1 == null || s2 == null ) { throw new WrongType ( left . origin ( ) , "Cannot concatenate object or list with a non-object-or-list, " + left + " and " + right + " are not compatible" ) ; } else { ConfigOrigin joinedOrigin = SimpleConfigOrigin . mergeOrigins ( left . origin ( ) , right . origin ( ) ) ; joined = new ConfigString ( joinedOrigin , s1 + s2 ) ; } } if ( joined == null ) { builder . add ( right ) ; } else { builder . remove ( builder . size ( ) - 1 ) ; builder . add ( joined ) ; }
public class CachingResourceLoaderImpl { /** * Check if any of the Resources used to load the { @ link CachedResource } have been modified . */ protected < T > boolean checkIfModified ( CachedResource < T > cachedResource ) { } }
final Resource resource = cachedResource . getResource ( ) ; // Check if the resource has been modified since it was last loaded . final long lastLoadTime = cachedResource . getLastLoadTime ( ) ; final long mainLastModified = this . getLastModified ( resource ) ; boolean resourceModified = lastLoadTime < mainLastModified ; if ( resourceModified ) { this . logger . trace ( "Resource {} was modified at {}, reloading" , new Object [ ] { resource , mainLastModified } ) ; return true ; } // If the main resource hasn ' t changed check additional resources for modifications for ( final Map . Entry < Resource , Long > additionalResourceEntry : cachedResource . getAdditionalResources ( ) . entrySet ( ) ) { final Resource additionalResource = additionalResourceEntry . getKey ( ) ; final Long resourceLastLoadTime = additionalResourceEntry . getValue ( ) ; final long lastModified = this . getLastModified ( additionalResource ) ; if ( resourceLastLoadTime < lastModified ) { this . logger . trace ( "Additional resource {} for {} was modified at {}, reloading" , new Object [ ] { additionalResource , resource , lastModified } ) ; return true ; } } this . logger . trace ( "{} has not been modified since last loaded {}, returning" , cachedResource , lastLoadTime ) ; return false ;
import java . util . * ; class SortNestedLists { /** * Function to sort a nested list based on list length and elements . * The function first sorts the sublists by their values , and then sorts * the entire list by the length of sublists . * @ param inputList The list of lists to be sorted . * @ return The sorted input list . * Examples : * > > > sortNestedLists ( [ [ 2 ] , [ 0 ] , [ 1 , 3 ] , [ 0 , 7 ] , [ 9 , 11 ] , [ 13 , 15 , 17 ] ] ) * [ [ 0 ] , [ 2 ] , [ 0 , 7 ] , [ 1 , 3 ] , [ 9 , 11 ] , [ 13 , 15 , 17 ] ] * > > > sortNestedLists ( [ [ 1 ] , [ 2 , 3 ] , [ 4 , 5 , 6 ] , [ 7 ] , [ 10 , 11 ] ] ) * [ [ 1 ] , [ 7 ] , [ 2 , 3 ] , [ 10 , 11 ] , [ 4 , 5 , 6 ] ] * > > > sortNestedLists ( [ [ ' python ' ] , [ ' java ' , ' C ' , ' C + + ' ] , [ ' DBMS ' ] , [ ' SQL ' , ' HTML ' ] ] ) * [ [ ' DBMS ' ] , [ ' python ' ] , [ ' SQL ' , ' HTML ' ] , [ ' java ' , ' C ' , ' C + + ' ] ] */ public List < List < String > > sortNestedLists ( List < List < String > > inputList ) { } }
for ( List < String > list : inputList ) { Collections . sort ( list ) ; } inputList . sort ( new Comparator < List < String > > ( ) { @ Override public int compare ( List < String > list1 , List < String > list2 ) { return Integer . compare ( list1 . size ( ) , list2 . size ( ) ) ; } } ) ; return inputList ;
public class SecurityConfig { /** * Configures the unsecured public resources . * @ param web web sec object * @ throws Exception ex */ @ Override public void configure ( WebSecurity web ) throws Exception { } }
web . ignoring ( ) . requestMatchers ( IgnoredRequestMatcher . INSTANCE ) ; DefaultHttpFirewall firewall = new DefaultHttpFirewall ( ) ; firewall . setAllowUrlEncodedSlash ( true ) ; web . httpFirewall ( firewall ) ; // web . debug ( true ) ;
public class Node { /** * Return if this node has the same columns and lines than another one . * @ param node2 * the node to compare * @ return if this node has the same columns and lines than another one . */ public boolean isInEqualLocation ( Node node2 ) { } }
if ( ! isNewNode ( ) && ! node2 . isNewNode ( ) ) { return getBeginLine ( ) == node2 . getBeginLine ( ) && getBeginColumn ( ) == node2 . getBeginColumn ( ) && getEndLine ( ) == node2 . getEndLine ( ) && getEndColumn ( ) == node2 . getEndColumn ( ) ; } return false ;
public class ZonalDateTime { /** * benutzt in ChronoFormatter / FractionProcessor */ @ Override public < V > V getMaximum ( ChronoElement < V > element ) { } }
V max ; if ( this . timestamp . contains ( element ) ) { max = this . timestamp . getMaximum ( element ) ; } else { max = this . moment . getMaximum ( element ) ; } if ( ( element == SECOND_OF_MINUTE ) && ( this . timestamp . getYear ( ) >= 1972 ) ) { PlainTimestamp ts = this . timestamp . with ( element , max ) ; if ( ! this . zone . isInvalid ( ts , ts ) ) { Moment transformed = ts . in ( this . zone ) ; Moment test = transformed . plus ( 1 , SI . SECONDS ) ; if ( test . isLeapSecond ( ) ) { return element . getType ( ) . cast ( Integer . valueOf ( 60 ) ) ; } } } return max ;
public class CommerceShipmentPersistenceImpl { /** * Removes the commerce shipment with the primary key from the database . Also notifies the appropriate model listeners . * @ param primaryKey the primary key of the commerce shipment * @ return the commerce shipment that was removed * @ throws NoSuchShipmentException if a commerce shipment with the primary key could not be found */ @ Override public CommerceShipment remove ( Serializable primaryKey ) throws NoSuchShipmentException { } }
Session session = null ; try { session = openSession ( ) ; CommerceShipment commerceShipment = ( CommerceShipment ) session . get ( CommerceShipmentImpl . class , primaryKey ) ; if ( commerceShipment == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchShipmentException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( commerceShipment ) ; } catch ( NoSuchShipmentException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class PrincipalComponentAnalysis { /** * Converts a vector from sample space into eigen space . * @ param sampleData Sample space data . * @ return Eigen space projection . */ public double [ ] sampleToEigenSpace ( double [ ] sampleData ) { } }
if ( sampleData . length != A . getNumCols ( ) ) throw new IllegalArgumentException ( "Unexpected sample length" ) ; DMatrixRMaj mean = DMatrixRMaj . wrap ( A . getNumCols ( ) , 1 , this . mean ) ; DMatrixRMaj s = new DMatrixRMaj ( A . getNumCols ( ) , 1 , true , sampleData ) ; DMatrixRMaj r = new DMatrixRMaj ( numComponents , 1 ) ; CommonOps_DDRM . subtract ( s , mean , s ) ; CommonOps_DDRM . mult ( V_t , s , r ) ; return r . data ;
public class Representation { /** * Creates a representation based on the byte array specified . The length of the byte array * will be reflected in the returned representation metadata ( property { @ link NFO # FILE _ SIZE } ) . * Note that the byte array should not be changed after calling this method , as modification * could be ( partially ) reflected in the returned representation . * @ param bytes * the byte array containing the binary data of the representation * @ return the created representation */ public static Representation create ( final byte [ ] bytes ) { } }
final Representation representation = new Representation ( new ByteArrayInputStream ( bytes ) ) ; representation . metadata . set ( NFO . FILE_SIZE , ( long ) bytes . length ) ; return representation ;
public class TokenSub { /** * Given a static config map , substitute occurrences of $ { HERON _ * } variables * in the provided path string * @ param config a static map config object of key value pairs * @ param pathString string representing a path including $ { HERON _ * } variables * @ return String string that represents the modified path */ public static String substitute ( Config config , String pathString ) { } }
// trim the leading and trailing spaces String trimmedPath = pathString . trim ( ) ; if ( isURL ( trimmedPath ) ) { return substituteURL ( config , trimmedPath ) ; } // get platform independent file separator String fileSeparator = Matcher . quoteReplacement ( File . separator ) ; // split the trimmed path into a list of components List < String > fixedList = Arrays . asList ( trimmedPath . split ( fileSeparator ) ) ; List < String > list = new LinkedList < > ( fixedList ) ; // substitute various variables for ( int i = 0 ; i < list . size ( ) ; i ++ ) { String elem = list . get ( i ) ; if ( "${HOME}" . equals ( elem ) || "~" . equals ( elem ) ) { list . set ( i , System . getProperty ( "user.home" ) ) ; } else if ( "${JAVA_HOME}" . equals ( elem ) ) { String javaPath = System . getenv ( "JAVA_HOME" ) ; if ( javaPath != null ) { list . set ( i , javaPath ) ; } } else if ( isToken ( elem ) ) { Matcher m = TOKEN_PATTERN . matcher ( elem ) ; if ( m . matches ( ) ) { String token = m . group ( 1 ) ; try { // For backwards compatibility the $ { TOPOLOGY } token will match Key . TOPOLOGY _ NAME if ( "TOPOLOGY" . equals ( token ) ) { token = "TOPOLOGY_NAME" ; } Key key = Key . valueOf ( token ) ; String value = config . getStringValue ( key ) ; if ( value == null ) { throw new IllegalArgumentException ( String . format ( "Config value %s contains " + "substitution token %s but the corresponding config setting %s not found" , pathString , elem , key . value ( ) ) ) ; } list . set ( i , value ) ; } catch ( IllegalArgumentException e ) { LOG . fine ( String . format ( "Config value %s contains substitution token %s which is " + "not defined in the Key enum, which is required for token substitution" , pathString , elem ) ) ; } } } } return combinePaths ( list ) ;
public class DirectoryPoller { /** * Adds the given < code > directory < / code > into this instance . * The < code > directory < / code > will be polled in the next coming poll - cycle . * Registering an already registered directory will be ignored . * @ param directory implementation of { @ link PolledDirectory } . * @ throws NullPointerException if the given argument is null . */ public void addPolledDirectory ( PolledDirectory directory ) { } }
if ( directory == null ) { throw new NullPointerException ( NULL_ARGUMENT_ERROR ) ; } scheduledRunnable . addDirectory ( directory ) ;
public class LayerWorkspaceMgr { /** * Get the pointer to the helper memory . Usually used for CUDNN workspace memory sharing . * NOTE : Don ' t use this method unless you are fully aware of how it is used to manage CuDNN memory ! * Will ( by design ) throw a NPE if the underlying map ( set from MultiLayerNetwork or ComputationGraph ) is not set . * @ param key Key for the helper workspace pointer * @ param < T > Pointer type * @ return Pointer for that key , or null if none exists */ public < T extends Pointer > T getHelperWorkspace ( String key ) { } }
return helperWorkspacePointers == null ? null : ( T ) helperWorkspacePointers . get ( key ) ;
public class IfcConstraintImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcConstraintRelationship > getRelatesConstraints ( ) { } }
return ( EList < IfcConstraintRelationship > ) eGet ( Ifc2x3tc1Package . Literals . IFC_CONSTRAINT__RELATES_CONSTRAINTS , true ) ;
public class NFSFileVec { /** * Make a new NFSFileVec key which holds the filename implicitly . This name * is used by the Chunks to load data on - demand . Blocking * @ return A NFSFileVec mapped to this file . */ public static NFSFileVec make ( File f ) { } }
Futures fs = new Futures ( ) ; NFSFileVec nfs = make ( f , fs ) ; fs . blockForPending ( ) ; return nfs ;
public class RangeToken { /** * for RANGE : Creates complement . for NRANGE : Creates the same meaning * RANGE . */ static Token complementRanges ( Token token ) { } }
if ( token . type != RANGE && token . type != NRANGE ) throw new IllegalArgumentException ( "Token#complementRanges(): must be RANGE: " + token . type ) ; RangeToken tok = ( RangeToken ) token ; tok . sortRanges ( ) ; tok . compactRanges ( ) ; int len = tok . ranges . length + 2 ; if ( tok . ranges [ 0 ] == 0 ) len -= 2 ; int last = tok . ranges [ tok . ranges . length - 1 ] ; if ( last == UTF16_MAX ) len -= 2 ; RangeToken ret = Token . createRange ( ) ; ret . ranges = new int [ len ] ; int wp = 0 ; if ( tok . ranges [ 0 ] > 0 ) { ret . ranges [ wp ++ ] = 0 ; ret . ranges [ wp ++ ] = tok . ranges [ 0 ] - 1 ; } for ( int i = 1 ; i < tok . ranges . length - 2 ; i += 2 ) { ret . ranges [ wp ++ ] = tok . ranges [ i ] + 1 ; ret . ranges [ wp ++ ] = tok . ranges [ i + 1 ] - 1 ; } if ( last != UTF16_MAX ) { ret . ranges [ wp ++ ] = last + 1 ; ret . ranges [ wp ] = UTF16_MAX ; } ret . setCompacted ( ) ; return ret ;
public class JMArrays { /** * Build array with delimiter string [ ] . * @ param stringWithDelimiter the string with delimiter * @ param delimiter the delimiter * @ return the string [ ] */ public static String [ ] buildArrayWithDelimiter ( String stringWithDelimiter , String delimiter ) { } }
return toArray ( JMCollections . buildListWithDelimiter ( stringWithDelimiter , delimiter ) ) ;
public class AbstractItemLink { /** * Declare the receiver stable * This is used to drive the softening / disposal of the * indirect reference to the receivers item . * The item can be discarded if the link is stable and * available ( always assuming the storage strategy allows ) . * If the item becomes unstable or unavailable the link * should not allow the item to be discarded . * @ throws SevereMessageStoreException */ public final void persistentRepresentationIsStable ( ) throws SevereMessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "persistentRepresentationIsStable" ) ; boolean notifyRequired = false ; AbstractItem item = null ; synchronized ( this ) { if ( ! _persistentRepresentationIsStable ) { // if we have become stable and were releasable , then // notify cache . notifyRequired = _itemIsDiscardableIfPersistentRepresentationStable ; // 274012 if ( notifyRequired ) { item = _getItemNoRestore ( ) ; // Defect 601995 _strongReferenceToItem = NULL_STRONG_REF ; } _persistentRepresentationIsStable = true ; } } if ( notifyRequired ) { _declareReleasable ( item ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "persistentRepresentationIsStable" ) ;
public class FileUtils { /** * < p > Attempts to get the base name for a given file / directory . Removes the suffix from the name as well . This * command should work in the same way as the unix < code > basename < / code > command . < / p > * @ param file The file path / name * @ param suffix The suffix to remove * @ return The file name if given a file , the directory name if given a directory , or null if given a null or empty * string . */ public static String baseName ( String file , @ NonNull String suffix ) { } }
if ( StringUtils . isNullOrBlank ( file ) ) { return StringUtils . EMPTY ; } file = StringUtils . trim ( file ) ; int index = indexOfLastSeparator ( file ) ; if ( index == - 1 ) { return file . replaceAll ( Pattern . quote ( suffix ) + "$" , "" ) ; } else if ( index == file . length ( ) - 1 ) { return baseName ( file . substring ( 0 , file . length ( ) - 1 ) ) ; } return file . substring ( index + 1 ) . replaceAll ( Pattern . quote ( suffix ) + "$" , "" ) ;
public class CodedConstant { /** * Gets the descriptor . * @ param cls the cls * @ return the descriptor * @ throws IOException Signals that an I / O exception has occurred . */ public static Descriptor getDescriptor ( Class < ? > cls ) throws IOException { } }
String idl = ProtobufIDLGenerator . getIDL ( cls ) ; ProtoFile file = ProtoSchemaParser . parse ( ProtobufIDLProxy . DEFAULT_FILE_NAME , idl ) ; FileDescriptorProtoPOJO fileDescriptorProto = new FileDescriptorProtoPOJO ( ) ; fileDescriptorProto . name = ProtobufIDLProxy . DEFAULT_FILE_NAME ; fileDescriptorProto . pkg = file . getPackageName ( ) ; fileDescriptorProto . dependencies = file . getDependencies ( ) ; fileDescriptorProto . publicDependency = convertList ( file . getPublicDependencies ( ) ) ; fileDescriptorProto . weakDependency = null ; // XXX fileDescriptorProto . messageTypes = new ArrayList < DescriptorProtoPOJO > ( ) ; fileDescriptorProto . enumTypes = new ArrayList < EnumDescriptorProtoPOJO > ( ) ; fileDescriptorProto . services = new ArrayList < ServiceDescriptorProtoPOJO > ( ) ; Set < String > messageSet = new HashSet < String > ( ) ; Set < String > enumSet = new HashSet < String > ( ) ; List < com . squareup . protoparser . Type > typeElements = file . getTypes ( ) ; if ( typeElements != null ) { for ( com . squareup . protoparser . Type typeElement : typeElements ) { if ( typeElement instanceof MessageType ) { messageSet . add ( typeElement . getName ( ) ) ; } else if ( typeElement instanceof EnumType ) { enumSet . add ( typeElement . getName ( ) ) ; } } for ( com . squareup . protoparser . Type typeElement : typeElements ) { if ( typeElement instanceof MessageType ) { fileDescriptorProto . messageTypes . add ( getDescritorProtoPOJO ( fileDescriptorProto , ( MessageType ) typeElement , messageSet , enumSet ) ) ; } else if ( typeElement instanceof EnumType ) { fileDescriptorProto . enumTypes . add ( getDescritorProtoPOJO ( fileDescriptorProto , ( EnumType ) typeElement , messageSet , enumSet ) ) ; } } } FileDescriptorProto fileproto ; try { Codec < FileDescriptorProtoPOJO > descriptorCodec = ProtobufProxy . create ( FileDescriptorProtoPOJO . class , ProtobufProxy . isCacheEnabled ( ) , ProtobufProxy . OUTPUT_PATH . get ( ) ) ; byte [ ] bs = descriptorCodec . encode ( fileDescriptorProto ) ; fileproto = FileDescriptorProto . parseFrom ( bs ) ; } catch ( InvalidProtocolBufferException e ) { throw new IOException ( "Failed to parse protocol buffer descriptor for generated code." , e ) ; } FileDescriptor fileDescriptor ; try { fileDescriptor = FileDescriptor . buildFrom ( fileproto , new com . google . protobuf . Descriptors . FileDescriptor [ ] { } ) ; } catch ( DescriptorValidationException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } return fileDescriptor . getMessageTypes ( ) . get ( 0 ) ;
public class PluginWrapper { /** * Called when there appears to be a core or plugin version which is too old for a stated dependency . * Normally records an error in { @ link # dependencyErrors } . * But if one or both versions { @ link # isSnapshot } , just issue a warning ( JENKINS - 52665 ) . */ private void versionDependencyError ( String message , String actual , String minimum ) { } }
if ( isSnapshot ( actual ) || isSnapshot ( minimum ) ) { LOGGER . log ( WARNING , "Suppressing dependency error in {0} v{1}: {2}" , new Object [ ] { getLongName ( ) , getVersion ( ) , message } ) ; } else { dependencyErrors . put ( message , false ) ; }
public class IpHelper { /** * Determines if a specified address is a valid Cisco Wildcard ( cisco ' s representation of a netmask ) * @ param wildcard * InetAddress * @ return boolean */ public static boolean isValidCiscoWildcard ( InetAddress wildcard ) { } }
byte [ ] segments = wildcard . getAddress ( ) ; for ( int i = 0 ; i < segments . length ; i ++ ) { assert ( ( ( byte ) ~ ( byte ) ~ segments [ i ] ) == segments [ i ] ) ; segments [ i ] = ( byte ) ~ segments [ i ] ; } return isValidNetmask ( segments ) ;
public class AWSOrganizationsClient { /** * Removes a member account from its parent organization . This version of the operation is performed by the account * that wants to leave . To remove a member account as a user in the master account , use * < a > RemoveAccountFromOrganization < / a > instead . * This operation can be called only from a member account in the organization . * < important > * < ul > * < li > * The master account in an organization with all features enabled can set service control policies ( SCPs ) that can * restrict what administrators of member accounts can do , including preventing them from successfully calling * < code > LeaveOrganization < / code > and leaving the organization . * < / li > * < li > * You can leave an organization as a member account only if the account is configured with the information required * to operate as a standalone account . When you create an account in an organization using the AWS Organizations * console , API , or CLI commands , the information required of standalone accounts is < i > not < / i > automatically * collected . For each account that you want to make standalone , you must accept the End User License Agreement * ( EULA ) , choose a support plan , provide and verify the required contact information , and provide a current payment * method . AWS uses the payment method to charge for any billable ( not free tier ) AWS activity that occurs while the * account is not attached to an organization . Follow the steps at < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info " * > To leave an organization when all required account information has not yet been provided < / a > in the < i > AWS * Organizations User Guide < / i > . * < / li > * < li > * You can leave an organization only after you enable IAM user access to billing in your account . For more * information , see < a href = * " http : / / docs . aws . amazon . com / awsaccountbilling / latest / aboutv2 / grantaccess . html # ControllingAccessWebsite - Activate " * > Activating Access to the Billing and Cost Management Console < / a > in the < i > AWS Billing and Cost Management User * Guide < / i > . * < / li > * < / ul > * < / important > * @ param leaveOrganizationRequest * @ return Result of the LeaveOrganization operation returned by the service . * @ throws AccessDeniedException * You don ' t have permissions to perform the requested operation . The user or role that is making the * request must have at least one IAM permissions policy attached that grants the required permissions . For * more information , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / access . html " > Access * Management < / a > in the < i > IAM User Guide < / i > . * @ throws AccountNotFoundException * We can ' t find an AWS account with the < code > AccountId < / code > that you specified , or the account whose * credentials you used to make this request isn ' t a member of an organization . * @ throws AWSOrganizationsNotInUseException * Your account isn ' t a member of an organization . To make this request , you must use the credentials of an * account that belongs to an organization . * @ throws ConcurrentModificationException * The target of the operation is currently being modified by a different request . Try again later . * @ throws ConstraintViolationException * Performing this operation violates a minimum or maximum value limit . For example , attempting to remove * the last service control policy ( SCP ) from an OU or root , inviting or creating too many accounts to the * organization , or attaching too many policies to an account , OU , or root . This exception includes a reason * that contains additional information about the violated limit . < / p > * Some of the reasons in the following list might not be applicable to this specific API or operation : * < ul > * < li > * ACCOUNT _ NUMBER _ LIMIT _ EXCEEDED : You attempted to exceed the limit on the number of accounts in an * organization . If you need more accounts , contact < a * href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > to request an increase in your * limit . * Or the number of invitations that you tried to send would cause you to exceed the limit of accounts in * your organization . Send fewer invitations or contact AWS Support to request an increase in the number of * accounts . * < note > * Deleted and closed accounts still count toward your limit . * < / note > < important > * If you get receive this exception when running a command immediately after creating the organization , * wait one hour and try again . If after an hour it continues to fail with this error , contact < a * href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > . * < / important > < / li > * < li > * HANDSHAKE _ RATE _ LIMIT _ EXCEEDED : You attempted to exceed the number of handshakes that you can send in one * day . * < / li > * < li > * OU _ NUMBER _ LIMIT _ EXCEEDED : You attempted to exceed the number of OUs that you can have in an organization . * < / li > * < li > * OU _ DEPTH _ LIMIT _ EXCEEDED : You attempted to create an OU tree that is too many levels deep . * < / li > * < li > * ORGANIZATION _ NOT _ IN _ ALL _ FEATURES _ MODE : You attempted to perform an operation that requires the * organization to be configured to support all features . An organization that supports only consolidated * billing features can ' t perform this operation . * < / li > * < li > * POLICY _ NUMBER _ LIMIT _ EXCEEDED . You attempted to exceed the number of policies that you can have in an * organization . * < / li > * < li > * MAX _ POLICY _ TYPE _ ATTACHMENT _ LIMIT _ EXCEEDED : You attempted to exceed the number of policies of a certain * type that can be attached to an entity at one time . * < / li > * < li > * MIN _ POLICY _ TYPE _ ATTACHMENT _ LIMIT _ EXCEEDED : You attempted to detach a policy from an entity that would * cause the entity to have fewer than the minimum number of policies of a certain type required . * < / li > * < li > * ACCOUNT _ CANNOT _ LEAVE _ WITHOUT _ EULA : You attempted to remove an account from the organization that doesn ' t * yet have enough information to exist as a standalone account . This account requires you to first agree to * the AWS Customer Agreement . Follow the steps at < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info " * > To leave an organization when all required account information has not yet been provided < / a > in the * < i > AWS Organizations User Guide < / i > . * < / li > * < li > * ACCOUNT _ CANNOT _ LEAVE _ WITHOUT _ PHONE _ VERIFICATION : You attempted to remove an account from the organization * that doesn ' t yet have enough information to exist as a standalone account . This account requires you to * first complete phone verification . Follow the steps at < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info " * > To leave an organization when all required account information has not yet been provided < / a > in the * < i > AWS Organizations User Guide < / i > . * < / li > * < li > * MASTER _ ACCOUNT _ PAYMENT _ INSTRUMENT _ REQUIRED : To create an organization with this master account , you first * must associate a valid payment instrument , such as a credit card , with the account . Follow the steps at * < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info " * > To leave an organization when all required account information has not yet been provided < / a > in the * < i > AWS Organizations User Guide < / i > . * < / li > * < li > * MEMBER _ ACCOUNT _ PAYMENT _ INSTRUMENT _ REQUIRED : To complete this operation with this member account , you * first must associate a valid payment instrument , such as a credit card , with the account . Follow the * steps at < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ accounts _ remove . html # leave - without - all - info " * > To leave an organization when all required account information has not yet been provided < / a > in the * < i > AWS Organizations User Guide < / i > . * < / li > * < li > * ACCOUNT _ CREATION _ RATE _ LIMIT _ EXCEEDED : You attempted to exceed the number of accounts that you can create * in one day . * < / li > * < li > * MASTER _ ACCOUNT _ ADDRESS _ DOES _ NOT _ MATCH _ MARKETPLACE : To create an account in this organization , you first * must migrate the organization ' s master account to the marketplace that corresponds to the master * account ' s address . For example , accounts with India addresses must be associated with the AISPL * marketplace . All accounts in an organization must be associated with the same marketplace . * < / li > * < li > * MASTER _ ACCOUNT _ MISSING _ CONTACT _ INFO : To complete this operation , you must first provide contact a valid * address and phone number for the master account . Then try the operation again . * < / li > * < li > * MASTER _ ACCOUNT _ NOT _ GOVCLOUD _ ENABLED : To complete this operation , the master account must have an * associated account in the AWS GovCloud ( US - West ) Region . For more information , see < a * href = " http : / / docs . aws . amazon . com / govcloud - us / latest / UserGuide / govcloud - organizations . html " > AWS * Organizations < / a > in the < i > AWS GovCloud User Guide . < / i > * < / li > * @ throws InvalidInputException * The requested operation failed because you provided invalid values for one or more of the request * parameters . This exception includes a reason that contains additional information about the violated * limit : < / p > < note > * Some of the reasons in the following list might not be applicable to this specific API or operation : * < / note > * < ul > * < li > * IMMUTABLE _ POLICY : You specified a policy that is managed by AWS and can ' t be modified . * < / li > * < li > * INPUT _ REQUIRED : You must include a value for all required parameters . * < / li > * < li > * INVALID _ ENUM : You specified a value that isn ' t valid for that parameter . * < / li > * < li > * INVALID _ FULL _ NAME _ TARGET : You specified a full name that contains invalid characters . * < / li > * < li > * INVALID _ LIST _ MEMBER : You provided a list to a parameter that contains at least one invalid value . * < / li > * < li > * INVALID _ PARTY _ TYPE _ TARGET : You specified the wrong type of entity ( account , organization , or email ) as a * party . * < / li > * < li > * INVALID _ PAGINATION _ TOKEN : Get the value for the < code > NextToken < / code > parameter from the response to a * previous call of the operation . * < / li > * < li > * INVALID _ PATTERN : You provided a value that doesn ' t match the required pattern . * < / li > * < li > * INVALID _ PATTERN _ TARGET _ ID : You specified a policy target ID that doesn ' t match the required pattern . * < / li > * < li > * INVALID _ ROLE _ NAME : You provided a role name that isn ' t valid . A role name can ' t begin with the reserved * prefix < code > AWSServiceRoleFor < / code > . * < / li > * < li > * INVALID _ SYNTAX _ ORGANIZATION _ ARN : You specified an invalid Amazon Resource Name ( ARN ) for the * organization . * < / li > * < li > * INVALID _ SYNTAX _ POLICY _ ID : You specified an invalid policy ID . * < / li > * < li > * MAX _ FILTER _ LIMIT _ EXCEEDED : You can specify only one filter parameter for the operation . * < / li > * < li > * MAX _ LENGTH _ EXCEEDED : You provided a string parameter that is longer than allowed . * < / li > * < li > * MAX _ VALUE _ EXCEEDED : You provided a numeric parameter that has a larger value than allowed . * < / li > * < li > * MIN _ LENGTH _ EXCEEDED : You provided a string parameter that is shorter than allowed . * < / li > * < li > * MIN _ VALUE _ EXCEEDED : You provided a numeric parameter that has a smaller value than allowed . * < / li > * < li > * MOVING _ ACCOUNT _ BETWEEN _ DIFFERENT _ ROOTS : You can move an account only between entities in the same root . * < / li > * @ throws MasterCannotLeaveOrganizationException * You can ' t remove a master account from an organization . If you want the master account to become a member * account in another organization , you must first delete the current organization of the master account . * @ throws ServiceException * AWS Organizations can ' t complete your request because of an internal service error . Try again later . * @ throws TooManyRequestsException * You ' ve sent too many requests in too short a period of time . The limit helps protect against * denial - of - service attacks . Try again later . < / p > * For information on limits that affect Organizations , see < a * href = " https : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ reference _ limits . html " > Limits of * AWS Organizations < / a > in the < i > AWS Organizations User Guide < / i > . * @ sample AWSOrganizations . LeaveOrganization * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / organizations - 2016-11-28 / LeaveOrganization " * target = " _ top " > AWS API Documentation < / a > */ @ Override public LeaveOrganizationResult leaveOrganization ( LeaveOrganizationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeLeaveOrganization ( request ) ;
public class CopyProductRequest { /** * The copy options . If the value is < code > CopyTags < / code > , the tags from the source product are copied to the * target product . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCopyOptions ( java . util . Collection ) } or { @ link # withCopyOptions ( java . util . Collection ) } if you want to * override the existing values . * @ param copyOptions * The copy options . If the value is < code > CopyTags < / code > , the tags from the source product are copied to * the target product . * @ return Returns a reference to this object so that method calls can be chained together . * @ see CopyOption */ public CopyProductRequest withCopyOptions ( String ... copyOptions ) { } }
if ( this . copyOptions == null ) { setCopyOptions ( new java . util . ArrayList < String > ( copyOptions . length ) ) ; } for ( String ele : copyOptions ) { this . copyOptions . add ( ele ) ; } return this ;
public class N { /** * Mostly it ' s designed for one - step operation to complete the operation in one step . * < code > java . util . stream . Stream < / code > is preferred for multiple phases operation . * @ param a * @ param fromIndex * @ param toIndex * @ param func * @ return */ public static < T , E extends Exception > ShortList mapToShort ( final T [ ] a , final int fromIndex , final int toIndex , final Try . ToShortFunction < ? super T , E > func ) throws E { } }
checkFromToIndex ( fromIndex , toIndex , len ( a ) ) ; N . checkArgNotNull ( func ) ; if ( N . isNullOrEmpty ( a ) ) { return new ShortList ( ) ; } final ShortList result = new ShortList ( toIndex - fromIndex ) ; for ( int i = fromIndex ; i < toIndex ; i ++ ) { result . add ( func . applyAsShort ( a [ i ] ) ) ; } return result ;
public class HttpFields { /** * Sets the value of a date field . * @ param name the field name * @ param date the field date value */ public void addDateField ( String name , long date ) { } }
String d = DateGenerator . formatDate ( date ) ; add ( name , d ) ;
public class SequenceLabelerFactory { /** * Creates the { @ link AdaptiveFeatureGenerator } . Usually this is a set of * generators contained in the { @ link AggregatedFeatureGenerator } . * Note : The generators are created on every call to this method . * @ return the feature generator or null if there is no descriptor in the * model */ public AdaptiveFeatureGenerator createFeatureGenerators ( ) { } }
if ( this . featureGeneratorBytes == null && this . artifactProvider != null ) { this . featureGeneratorBytes = ( byte [ ] ) this . artifactProvider . getArtifact ( SequenceLabelerModel . GENERATOR_DESCRIPTOR_ENTRY_NAME ) ; } if ( this . featureGeneratorBytes == null ) { System . err . println ( "WARNING: loading the default feature generator descriptor!!" ) ; this . featureGeneratorBytes = loadDefaultFeatureGeneratorBytes ( ) ; } final InputStream descriptorIn = new ByteArrayInputStream ( this . featureGeneratorBytes ) ; AdaptiveFeatureGenerator generator = null ; try { generator = GeneratorFactory . create ( descriptorIn , new FeatureGeneratorResourceProvider ( ) { @ Override public Object getResource ( final String key ) { if ( SequenceLabelerFactory . this . artifactProvider != null ) { return SequenceLabelerFactory . this . artifactProvider . getArtifact ( key ) ; } else { return SequenceLabelerFactory . this . resources . get ( key ) ; } } } ) ; } catch ( final InvalidFormatException e ) { // It is assumed that the creation of the feature generation does not // fail after it succeeded once during model loading . // But it might still be possible that such an exception is thrown , // in this case the caller should not be forced to handle the exception // and a Runtime Exception is thrown instead . // If the re - creation of the feature generation fails it is assumed // that this can only be caused by a programming mistake and therefore // throwing a Runtime Exception is reasonable throw new SequenceLabelerModel . FeatureGeneratorCreationError ( e ) ; } catch ( final IOException e ) { throw new IllegalStateException ( "Reading from mem cannot result in an I/O error" , e ) ; } return generator ;
public class VodClient { /** * Publish the specific media resource managed by VOD service , so that it can be access and played . * The caller < i > must < / i > authenticate with a valid BCE Access Key / Private Key pair . * @ param mediaId The unique ID for each media resource * @ return empty response will be returned */ public PublishMediaResourceResponse publishMediaResource ( String mediaId ) { } }
PublishMediaResourceRequest request = new PublishMediaResourceRequest ( ) . withMediaId ( mediaId ) ; return publishMediaResource ( request ) ;
public class PeerManager { /** * Determines the owner of the specified lock , waiting for any resolution to complete before * notifying the supplied listener . */ public void queryLock ( NodeObject . Lock lock , ResultListener < String > listener ) { } }
// if it ' s being resolved , add the listener to the list LockHandler handler = _locks . get ( lock ) ; if ( handler != null ) { handler . listeners . add ( listener ) ; return ; } // otherwise , return its present value listener . requestCompleted ( queryLock ( lock ) ) ;
public class RxUtils { /** * Add a log statement to the onNext , onError , and onCompleted parts of an observable * @ param name the log tag name * @ param < T > the transformation type * @ return the transformer */ public static < T > Observable . Transformer < T , T > applyLogging ( final String name , final Logger logger ) { } }
return new Observable . Transformer < T , T > ( ) { @ Override public Observable < T > call ( Observable < T > observable ) { return observable . doOnNext ( new Action1 < T > ( ) { @ Override public void call ( T t ) { logger . log ( Log . DEBUG , "[%s] onNext(%s)" , name , t ) ; } } ) . doOnError ( new Action1 < Throwable > ( ) { @ Override public void call ( Throwable throwable ) { logger . log ( throwable , Log . DEBUG , "[%s] onError()" , name ) ; } } ) . doOnCompleted ( new Action0 ( ) { @ Override public void call ( ) { logger . log ( Log . DEBUG , "[%s] onCompleted()" , name ) ; } } ) ; } } ;
public class QueryEngine { /** * refreshes the cached Namespace information */ public synchronized void refreshNamespaces ( ) { } }
/* * cache namespaces */ if ( this . namespaceCache == null ) { this . namespaceCache = new TreeMap < String , Namespace > ( ) ; Node [ ] nodes = client . getNodes ( ) ; for ( Node node : nodes ) { try { String namespaceString = Info . request ( getInfoPolicy ( ) , node , "namespaces" ) ; if ( ! namespaceString . isEmpty ( ) ) { String [ ] namespaceList = namespaceString . split ( ";" ) ; for ( String namespace : namespaceList ) { Namespace ns = this . namespaceCache . get ( namespace ) ; if ( ns == null ) { ns = new Namespace ( namespace ) ; this . namespaceCache . put ( namespace , ns ) ; } refreshNamespaceData ( node , ns ) ; } } } catch ( AerospikeException e ) { log . error ( "Error geting Namespaces " , e ) ; } } }
public class WaveformPreviewComponent { /** * Clear the playback state stored for a player , such as when it has unloaded the track . * @ param player the player number whose playback state is no longer valid * @ since 0.5.0 */ public synchronized void clearPlaybackState ( int player ) { } }
long oldMaxPosition = 0 ; PlaybackState furthestState = getFurthestPlaybackState ( ) ; if ( furthestState != null ) { oldMaxPosition = furthestState . position ; } PlaybackState oldState = playbackStateMap . remove ( player ) ; long newMaxPosition = 0 ; furthestState = getFurthestPlaybackState ( ) ; if ( furthestState != null ) { newMaxPosition = furthestState . position ; } repaintDueToPlaybackStateChange ( oldMaxPosition , newMaxPosition , oldState , null ) ;
public class FlatRowAdapter { /** * { @ inheritDoc } Convert a { @ link FlatRow } to a { @ link Result } . */ @ Override public Result adaptResponse ( FlatRow flatRow ) { } }
// flatRow shouldn ' t ever have a null row key . The second check is defensive only . if ( flatRow == null || flatRow . getRowKey ( ) == null ) { return Result . EMPTY_RESULT ; } byte [ ] RowKey = ByteStringer . extract ( flatRow . getRowKey ( ) ) ; List < FlatRow . Cell > cells = flatRow . getCells ( ) ; List < Cell > hbaseCells = new ArrayList < > ( cells . size ( ) ) ; byte [ ] previousFamilyBytes = null ; String previousFamily = null ; for ( FlatRow . Cell cell : cells ) { // Cells with labels are for internal use , do not return them . // TODO : the filtering logic should be moved into a WhileMatchFilter specific extension . if ( cell . getLabels ( ) . isEmpty ( ) ) { String family = cell . getFamily ( ) ; byte [ ] familyBytes = ! Objects . equal ( family , previousFamily ) ? Bytes . toBytes ( family ) : previousFamilyBytes ; hbaseCells . add ( toRowCell ( RowKey , cell , familyBytes ) ) ; previousFamily = family ; previousFamilyBytes = familyBytes ; } } return Result . create ( hbaseCells ) ;
public class Value { /** * The FAST path get - POJO as a { @ link Freezable } - final method for speed . * Will ( re ) build the POJO from the _ mem array . Never returns NULL . * @ return The POJO , probably the cached instance . */ public final < T extends Freezable > T getFreezable ( ) { } }
touch ( ) ; Freezable pojo = _pojo ; // Read once ! if ( pojo != null ) return ( T ) pojo ; pojo = TypeMap . newFreezable ( _type ) ; pojo . reloadFromBytes ( memOrLoad ( ) ) ; return ( T ) ( _pojo = pojo ) ;
public class TypeValidator { /** * Expect that the first type can be assigned to a symbol of the second type . * @ param t The node traversal . * @ param n The node to issue warnings on . * @ param rightType The type on the RHS of the assign . * @ param leftType The type of the symbol on the LHS of the assign . * @ param owner The owner of the property being assigned to . * @ param propName The name of the property being assigned to . * @ return True if the types matched , false otherwise . */ boolean expectCanAssignToPropertyOf ( Node n , JSType rightType , JSType leftType , Node owner , String propName ) { } }
// The NoType check is a hack to make typedefs work OK . if ( ! leftType . isNoType ( ) && ! rightType . isSubtypeOf ( leftType ) ) { // Do not type - check interface methods , because we expect that // they will have dummy implementations that do not match the type // annotations . JSType ownerType = getJSType ( owner ) ; if ( ownerType . isFunctionPrototypeType ( ) ) { FunctionType ownerFn = ownerType . toObjectType ( ) . getOwnerFunction ( ) ; if ( ownerFn . isInterface ( ) && rightType . isFunctionType ( ) && leftType . isFunctionType ( ) ) { return true ; } } mismatch ( n , "assignment to property " + propName + " of " + typeRegistry . getReadableTypeName ( owner ) , rightType , leftType ) ; return false ; } else if ( ! leftType . isNoType ( ) && ! rightType . isSubtypeWithoutStructuralTyping ( leftType ) ) { TypeMismatch . recordImplicitInterfaceUses ( this . implicitInterfaceUses , n , rightType , leftType ) ; TypeMismatch . recordImplicitUseOfNativeObject ( this . mismatches , n , rightType , leftType ) ; } return true ;
public class SharedObject { /** * Send update notification over data channel of RTMP connection */ protected synchronized void sendUpdates ( ) { } }
log . debug ( "sendUpdates" ) ; // get the current version final int currentVersion = getVersion ( ) ; log . debug ( "Current version: {}" , currentVersion ) ; // get the name final String name = getName ( ) ; // get owner events Set < ISharedObjectEvent > ownerEvents = ownerMessage . getEvents ( ) ; if ( ! ownerEvents . isEmpty ( ) ) { // get all current owner events - single ordered set going to the event owner final TreeSet < ISharedObjectEvent > events = new TreeSet < > ( ownerEvents ) ; ownerEvents . removeAll ( events ) ; // send update to " owner " of this update request if ( source != null ) { final RTMPConnection con = ( RTMPConnection ) source ; // create a worker SharedObjectService . submitTask ( ( ) -> { Red5 . setConnectionLocal ( con ) ; con . sendSharedObjectMessage ( name , currentVersion , persistent , events ) ; Red5 . setConnectionLocal ( null ) ; } ) ; } } else if ( log . isTraceEnabled ( ) ) { log . trace ( "No owner events to send" ) ; } // tell all the listeners if ( ! syncEvents . isEmpty ( ) ) { // get all current sync events final TreeSet < ISharedObjectEvent > events = new TreeSet < > ( syncEvents ) ; syncEvents . removeAll ( events ) ; // get the listeners Set < IEventListener > listeners = getListeners ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Listeners: {}" , listeners ) ; } // updates all registered clients of this shared object listeners . stream ( ) . filter ( listener -> listener != source ) . forEach ( listener -> { final RTMPConnection con = ( RTMPConnection ) listener ; if ( con . isConnected ( ) ) { // create a worker SharedObjectService . submitTask ( ( ) -> { Red5 . setConnectionLocal ( con ) ; con . sendSharedObjectMessage ( name , currentVersion , persistent , events ) ; Red5 . setConnectionLocal ( null ) ; } ) ; } else { log . debug ( "Skipping {} connection: {}" , RTMP . states [ con . getStateCode ( ) ] , con . getId ( ) ) ; // if the connection is ' disconnected ' remove it if ( con . isDisconnected ( ) ) { unregister ( con ) ; } } } ) ; } else if ( log . isTraceEnabled ( ) ) { log . trace ( "No sync events to send" ) ; }
public class SuggestedAdUnitServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . admanager . axis . v201805 . SuggestedAdUnitServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201805 . SuggestedAdUnitServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201805 . SuggestedAdUnitServiceSoapBindingStub ( new java . net . URL ( SuggestedAdUnitServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getSuggestedAdUnitServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class CmsPushButton { /** * Sets the size . < p > * @ param size the size to set */ public void setSize ( I_CmsButton . Size size ) { } }
if ( m_size != null ) { removeStyleName ( m_size . getCssClassName ( ) ) ; } if ( size != null ) { addStyleName ( size . getCssClassName ( ) ) ; } m_size = size ;
public class ServiceInstanceUtils { /** * Validate the ServiceInstance Metadata . * @ param metadata * the service instance metadata map . * @ throws ServiceException */ public static void validateMetadata ( Map < String , String > metadata ) throws ServiceException { } }
for ( String key : metadata . keySet ( ) ) { if ( ! validateRequiredField ( key , metaKeyRegEx ) ) { throw new ServiceException ( ErrorCode . SERVICE_INSTANCE_METAKEY_FORMAT_ERROR , ErrorCode . SERVICE_INSTANCE_METAKEY_FORMAT_ERROR . getMessageTemplate ( ) , key ) ; } }
public class OneDReader { /** * Records the size of successive runs of white and black pixels in a row , starting at a given point . * The values are recorded in the given array , and the number of runs recorded is equal to the size * of the array . If the row starts on a white pixel at the given start point , then the first count * recorded is the run of white pixels starting from that point ; likewise it is the count of a run * of black pixels if the row begin on a black pixels at that point . * @ param row row to count from * @ param start offset into row to start at * @ param counters array into which to record counts * @ throws NotFoundException if counters cannot be filled entirely from row before running out * of pixels */ protected static void recordPattern ( BitArray row , int start , int [ ] counters ) throws NotFoundException { } }
int numCounters = counters . length ; Arrays . fill ( counters , 0 , numCounters , 0 ) ; int end = row . getSize ( ) ; if ( start >= end ) { throw NotFoundException . getNotFoundInstance ( ) ; } boolean isWhite = ! row . get ( start ) ; int counterPosition = 0 ; int i = start ; while ( i < end ) { if ( row . get ( i ) != isWhite ) { counters [ counterPosition ] ++ ; } else { if ( ++ counterPosition == numCounters ) { break ; } else { counters [ counterPosition ] = 1 ; isWhite = ! isWhite ; } } i ++ ; } // If we read fully the last section of pixels and filled up our counters - - or filled // the last counter but ran off the side of the image , OK . Otherwise , a problem . if ( ! ( counterPosition == numCounters || ( counterPosition == numCounters - 1 && i == end ) ) ) { throw NotFoundException . getNotFoundInstance ( ) ; }
public class VariableTranslator { /** * Deserializes variable string values to runtime objects . * @ param pkg workflow package * @ param type variable type * @ param value string value * @ return deserialized object */ public static Object realToObject ( Package pkg , String type , String value ) { } }
if ( StringHelper . isEmpty ( value ) ) return null ; com . centurylink . mdw . variable . VariableTranslator trans = getTranslator ( pkg , type ) ; if ( trans instanceof DocumentReferenceTranslator ) return ( ( DocumentReferenceTranslator ) trans ) . realToObject ( value ) ; else return trans . toObject ( value ) ;
public class SiteMapServlet { /** * Gets the most recent of the last modified of all views applicable to the given * book and accessible to the search engines . If any view returns { @ code null } * from { @ link View # getLastModified ( javax . servlet . ServletContext , javax . servlet . http . HttpServletRequest , javax . servlet . http . HttpServletResponse , com . semanticcms . core . model . Page ) } , * the sitemap overall will not have any last modified time . * @ return the most recently last modified or { @ code null } if unknown */ static ReadableInstant getLastModified ( final ServletContext servletContext , final HttpServletRequest req , final HttpServletResponse resp , final SortedSet < View > views , final Book book ) throws ServletException , IOException { } }
assert book . equals ( SemanticCMS . getInstance ( servletContext ) . getPublishedBooks ( ) . get ( book . getBookRef ( ) . getPath ( ) ) ) : "Book not published: " + book ; assert book . isAccessible ( ) ; // The most recent is kept here , but set to null the first time a missing // per page / view last modified time is found final ReadableInstant [ ] result = new ReadableInstant [ 1 ] ; CapturePage . traversePagesAnyOrder ( servletContext , req , resp , book . getContentRoot ( ) , CaptureLevel . META , new CapturePage . PageHandler < Boolean > ( ) { @ Override public Boolean handlePage ( Page page ) throws ServletException , IOException { // TODO : Chance for more concurrency here by view ? for ( View view : views ) { if ( view . getAllowRobots ( servletContext , req , resp , page ) && view . isApplicable ( servletContext , req , resp , page ) ) { ReadableInstant lastModified = view . getLastModified ( servletContext , req , resp , page ) ; if ( lastModified == null ) { // Stop searching , return null for this book result [ 0 ] = null ; return false ; } else { if ( result [ 0 ] == null || lastModified . compareTo ( result [ 0 ] ) > 0 ) { result [ 0 ] = lastModified ; } } } } return null ; } } , new CapturePage . TraversalEdges ( ) { @ Override public Set < ChildRef > getEdges ( Page page ) { return page . getChildRefs ( ) ; } } , new CapturePage . EdgeFilter ( ) { @ Override public boolean applyEdge ( PageRef childPage ) { return book . getBookRef ( ) . equals ( childPage . getBookRef ( ) ) ; } } ) ; return result [ 0 ] ;
public class Visibility { /** * The default implementation of this method returns a null Animator . Subclasses should * override this method to make targets appear with the desired transition . The * method should only be called from * { @ link # onAppear ( ViewGroup , TransitionValues , int , TransitionValues , int ) } . * @ param sceneRoot The root of the transition hierarchy * @ param view The View to make appear . This will be in the target scene ' s View hierarchy and * will be VISIBLE . * @ param startValues The target values in the start scene * @ param endValues The target values in the end scene * @ return An Animator to be started at the appropriate time in the * overall transition for this scene change . A null value means no animation * should be run . */ @ Nullable public Animator onAppear ( @ NonNull ViewGroup sceneRoot , @ NonNull View view , @ Nullable TransitionValues startValues , @ Nullable TransitionValues endValues ) { } }
return null ;
public class IdentityMap { /** * Returns an iterator for the keys in the map . Remove is supported . Note that the same iterator instance is returned each * time this method is called . Use the { @ link Entries } constructor for nested or multithreaded iteration . */ public Keys < K > keys ( ) { } }
if ( keys == null ) keys = new Keys ( this ) ; else keys . reset ( ) ; return keys ;
public class PagingParams { /** * Gets the number of items to return in a page . * @ param maxTake the maximum number of items to return . * @ return the number of items to return . */ public long getTake ( long maxTake ) { } }
if ( _take == null ) return maxTake ; if ( _take < 0 ) return 0 ; if ( _take > maxTake ) return maxTake ; return _take ;
public class KabschAlignment { /** * Perform an alignment . * This method aligns to set of atoms which should have been specified * prior to this call */ public void align ( ) { } }
Matrix tmp ; // get center of gravity and translate both to 0,0,0 this . cm1 = new Point3d ( ) ; this . cm2 = new Point3d ( ) ; this . cm1 = getCenterOfMass ( p1 , atwt1 ) ; this . cm2 = getCenterOfMass ( p2 , atwt2 ) ; // move the points for ( int i = 0 ; i < this . npoint ; i ++ ) { p1 [ i ] . x = p1 [ i ] . x - this . cm1 . x ; p1 [ i ] . y = p1 [ i ] . y - this . cm1 . y ; p1 [ i ] . z = p1 [ i ] . z - this . cm1 . z ; p2 [ i ] . x = p2 [ i ] . x - this . cm2 . x ; p2 [ i ] . y = p2 [ i ] . y - this . cm2 . y ; p2 [ i ] . z = p2 [ i ] . z - this . cm2 . z ; } // get the R matrix double [ ] [ ] tR = new double [ 3 ] [ 3 ] ; for ( int i = 0 ; i < this . npoint ; i ++ ) { wts [ i ] = 1.0 ; } for ( int i = 0 ; i < this . npoint ; i ++ ) { tR [ 0 ] [ 0 ] += p1 [ i ] . x * p2 [ i ] . x * wts [ i ] ; tR [ 0 ] [ 1 ] += p1 [ i ] . x * p2 [ i ] . y * wts [ i ] ; tR [ 0 ] [ 2 ] += p1 [ i ] . x * p2 [ i ] . z * wts [ i ] ; tR [ 1 ] [ 0 ] += p1 [ i ] . y * p2 [ i ] . x * wts [ i ] ; tR [ 1 ] [ 1 ] += p1 [ i ] . y * p2 [ i ] . y * wts [ i ] ; tR [ 1 ] [ 2 ] += p1 [ i ] . y * p2 [ i ] . z * wts [ i ] ; tR [ 2 ] [ 0 ] += p1 [ i ] . z * p2 [ i ] . x * wts [ i ] ; tR [ 2 ] [ 1 ] += p1 [ i ] . z * p2 [ i ] . y * wts [ i ] ; tR [ 2 ] [ 2 ] += p1 [ i ] . z * p2 [ i ] . z * wts [ i ] ; } double [ ] [ ] R = new double [ 3 ] [ 3 ] ; tmp = new Matrix ( tR ) ; R = tmp . transpose ( ) . getArray ( ) ; // now get the RtR ( = R ' R ) matrix double [ ] [ ] RtR = new double [ 3 ] [ 3 ] ; Matrix jamaR = new Matrix ( R ) ; tmp = tmp . times ( jamaR ) ; RtR = tmp . getArray ( ) ; // get eigenvalues of RRt ( a ' s ) Matrix jamaRtR = new Matrix ( RtR ) ; EigenvalueDecomposition ed = jamaRtR . eig ( ) ; double [ ] mu = ed . getRealEigenvalues ( ) ; double [ ] [ ] a = ed . getV ( ) . getArray ( ) ; // Jama returns the eigenvalues in increasing order so // swap the eigenvalues and vectors double tmp2 = mu [ 2 ] ; mu [ 2 ] = mu [ 0 ] ; mu [ 0 ] = tmp2 ; for ( int i = 0 ; i < 3 ; i ++ ) { tmp2 = a [ i ] [ 2 ] ; a [ i ] [ 2 ] = a [ i ] [ 0 ] ; a [ i ] [ 0 ] = tmp2 ; } // make sure that the a3 = a1 x a2 a [ 0 ] [ 2 ] = ( a [ 1 ] [ 0 ] * a [ 2 ] [ 1 ] ) - ( a [ 1 ] [ 1 ] * a [ 2 ] [ 0 ] ) ; a [ 1 ] [ 2 ] = ( a [ 0 ] [ 1 ] * a [ 2 ] [ 0 ] ) - ( a [ 0 ] [ 0 ] * a [ 2 ] [ 1 ] ) ; a [ 2 ] [ 2 ] = ( a [ 0 ] [ 0 ] * a [ 1 ] [ 1 ] ) - ( a [ 0 ] [ 1 ] * a [ 1 ] [ 0 ] ) ; // lets work out the b vectors double [ ] [ ] b = new double [ 3 ] [ 3 ] ; for ( int i = 0 ; i < 3 ; i ++ ) { for ( int j = 0 ; j < 3 ; j ++ ) { for ( int k = 0 ; k < 3 ; k ++ ) { b [ i ] [ j ] += R [ i ] [ k ] * a [ k ] [ j ] ; } b [ i ] [ j ] = b [ i ] [ j ] / Math . sqrt ( mu [ j ] ) ; } } // normalize and set b3 = b1 x b2 double norm1 = 0. ; double norm2 = 0. ; for ( int i = 0 ; i < 3 ; i ++ ) { norm1 += b [ i ] [ 0 ] * b [ i ] [ 0 ] ; norm2 += b [ i ] [ 1 ] * b [ i ] [ 1 ] ; } norm1 = Math . sqrt ( norm1 ) ; norm2 = Math . sqrt ( norm2 ) ; for ( int i = 0 ; i < 3 ; i ++ ) { b [ i ] [ 0 ] = b [ i ] [ 0 ] / norm1 ; b [ i ] [ 1 ] = b [ i ] [ 1 ] / norm2 ; } b [ 0 ] [ 2 ] = ( b [ 1 ] [ 0 ] * b [ 2 ] [ 1 ] ) - ( b [ 1 ] [ 1 ] * b [ 2 ] [ 0 ] ) ; b [ 1 ] [ 2 ] = ( b [ 0 ] [ 1 ] * b [ 2 ] [ 0 ] ) - ( b [ 0 ] [ 0 ] * b [ 2 ] [ 1 ] ) ; b [ 2 ] [ 2 ] = ( b [ 0 ] [ 0 ] * b [ 1 ] [ 1 ] ) - ( b [ 0 ] [ 1 ] * b [ 1 ] [ 0 ] ) ; // get the rotation matrix double [ ] [ ] tU = new double [ 3 ] [ 3 ] ; for ( int i = 0 ; i < 3 ; i ++ ) { for ( int j = 0 ; j < 3 ; j ++ ) { for ( int k = 0 ; k < 3 ; k ++ ) { tU [ i ] [ j ] += b [ i ] [ k ] * a [ j ] [ k ] ; } } } // take the transpose U = new double [ 3 ] [ 3 ] ; for ( int i = 0 ; i < 3 ; i ++ ) { for ( int j = 0 ; j < 3 ; j ++ ) { U [ i ] [ j ] = tU [ j ] [ i ] ; } } // now eval the RMS error // first , rotate the second set of points and . . . this . rp = new Point3d [ this . npoint ] ; for ( int i = 0 ; i < this . npoint ; i ++ ) { this . rp [ i ] = new Point3d ( U [ 0 ] [ 0 ] * p2 [ i ] . x + U [ 0 ] [ 1 ] * p2 [ i ] . y + U [ 0 ] [ 2 ] * p2 [ i ] . z , U [ 1 ] [ 0 ] * p2 [ i ] . x + U [ 1 ] [ 1 ] * p2 [ i ] . y + U [ 1 ] [ 2 ] * p2 [ i ] . z , U [ 2 ] [ 0 ] * p2 [ i ] . x + U [ 2 ] [ 1 ] * p2 [ i ] . y + U [ 2 ] [ 2 ] * p2 [ i ] . z ) ; } // . . . then eval rms double rms = 0. ; for ( int i = 0 ; i < this . npoint ; i ++ ) { rms += ( p1 [ i ] . x - this . rp [ i ] . x ) * ( p1 [ i ] . x - this . rp [ i ] . x ) + ( p1 [ i ] . y - this . rp [ i ] . y ) * ( p1 [ i ] . y - this . rp [ i ] . y ) + ( p1 [ i ] . z - this . rp [ i ] . z ) * ( p1 [ i ] . z - this . rp [ i ] . z ) ; } this . rmsd = Math . sqrt ( rms / this . npoint ) ;
public class DefaultVfs { /** * 从jar包中找到URL资源 . * @ param url url * @ return url */ private URL findJarForResource ( URL url ) { } }
// 如果URL的文件部分本身是一个URL , 那么该URL可能指向JAR try { long startTime = System . currentTimeMillis ( ) ; while ( true ) { url = new URL ( url . getFile ( ) ) ; if ( System . currentTimeMillis ( ) - startTime > MAX_LIMIT ) { break ; } } } catch ( MalformedURLException expected ) { // This will happen at some point and serves as a break in the loop } // Look for the . jar extension and chop off everything after that StringBuilder jarUrl = new StringBuilder ( url . toExternalForm ( ) ) ; int index = jarUrl . lastIndexOf ( ".jar" ) ; if ( index >= 0 ) { jarUrl . setLength ( index + 4 ) ; } else { return null ; } // Try to open and test it try { URL testUrl = new URL ( jarUrl . toString ( ) ) ; if ( this . isJar ( testUrl ) ) { return testUrl ; } // WebLogic fix : check if the URL ' s file exists in the filesystem . jarUrl . replace ( 0 , jarUrl . length ( ) , testUrl . getFile ( ) ) ; File file = new File ( jarUrl . toString ( ) ) ; // File name might be URL - encoded if ( ! file . exists ( ) ) { file = new File ( URLEncoder . encode ( jarUrl . toString ( ) , "UTF-8" ) ) ; } if ( file . exists ( ) ) { testUrl = file . toURI ( ) . toURL ( ) ; if ( isJar ( testUrl ) ) { return testUrl ; } } } catch ( MalformedURLException e ) { log . warn ( "Invalid JAR URL: " + jarUrl ) ; } catch ( UnsupportedEncodingException e ) { throw new ZealotException ( "Unsupported encoding? UTF-8? That's unpossible." ) ; } return null ;
public class PositiveDepthConstraintCheck { /** * Checks to see if a single point meets the constraint . * @ param viewA View of the 3D point from the first camera . Calibrated coordinates . * @ param viewB View of the 3D point from the second camera . Calibrated coordinates . * @ param fromAtoB Transform from the B to A camera frame . * @ return If the triangulated point appears in front of both cameras . */ public boolean checkConstraint ( Point2D_F64 viewA , Point2D_F64 viewB , Se3_F64 fromAtoB ) { } }
triangulate . triangulate ( viewA , viewB , fromAtoB , P ) ; if ( P . z > 0 ) { SePointOps_F64 . transform ( fromAtoB , P , P ) ; return P . z > 0 ; } return false ;
public class Manager { /** * This method reconfigures the manager . * It is NOT invoked DIRECTLY by iPojo anymore . */ @ Override public void reconfigure ( ) { } }
// Update the messaging client this . logger . info ( "Reconfiguration requested in the DM." ) ; if ( this . messagingClient != null ) { this . messagingClient . setDomain ( this . domain ) ; this . messagingClient . switchMessagingType ( this . messagingType ) ; try { if ( this . messagingClient . isConnected ( ) ) this . messagingClient . listenToTheDm ( ListenerCommand . START ) ; } catch ( IOException e ) { this . logger . log ( Level . WARNING , "Cannot start to listen to the debug queue" , e ) ; } } // We must update instance states after we switched the messaging configuration . restoreAllInstances ( ) ; this . logger . info ( "The DM was successfully (re)configured." ) ;
public class DataGrid { /** * Sets the border attribute for the HTML table tag . * @ param border * @ jsptagref . attributedescription The border attribute for the HTML table tag . * @ jsptagref . attributesyntaxvalue < i > string _ dir < / i > * @ netui : attribute required = " false " rtexprvalue = " true " description = " The border attribute for the HTML table tag . " */ public void setBorder ( String border ) { } }
_tableState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , HtmlConstants . BORDER , border ) ;
public class TmdbCompanies { /** * This method is used to retrieve the basic information about a production company on TMDb . * @ param companyId * @ return * @ throws MovieDbException */ public Company getCompanyInfo ( int companyId ) throws MovieDbException { } }
TmdbParameters parameters = new TmdbParameters ( ) ; parameters . add ( Param . ID , companyId ) ; URL url = new ApiUrl ( apiKey , MethodBase . COMPANY ) . buildUrl ( parameters ) ; String webpage = httpTools . getRequest ( url ) ; try { return MAPPER . readValue ( webpage , Company . class ) ; } catch ( IOException ex ) { throw new MovieDbException ( ApiExceptionType . MAPPING_FAILED , "Failed to get company information" , url , ex ) ; }
public class TrieMap { /** * { @ inheritDoc } */ @ Override public Collection < V > values ( ) { } }
final Collection < V > v = values ; return v != null ? v : ( values = new Values ( ) ) ;
public class AtomSymbol { /** * Create a new atom symbol ( from this symbol ) but with the specified * alignment . * @ param alignment element alignment * @ return new atom symbol */ AtomSymbol alignTo ( SymbolAlignment alignment ) { } }
return new AtomSymbol ( element , adjuncts , annotationAdjuncts , alignment , hull ) ;
public class RpcLoggerFactory { /** * 获取日志对象 * @ param name 日志的名字 * @ return 日志实现 */ public static org . slf4j . Logger getLogger ( String name , String appname ) { } }
// 从 " com / alipay / sofa / rpc / log " 中获取 rpc 的日志配置并寻找对应logger对象 , log 为默认添加的后缀 if ( name == null || name . isEmpty ( ) ) { return null ; } Map < String , String > properties = new HashMap < String , String > ( ) ; properties . put ( APPNAME , appname == null ? "" : appname ) ; SpaceId spaceId = new SpaceId ( RPC_LOG_SPACE ) ; if ( appname != null ) { spaceId . withTag ( APPNAME , appname ) ; } return LoggerSpaceManager . getLoggerBySpace ( name , spaceId , properties ) ;
public class MathUtil { /** * Find the next larger number with all ones . * Classic bit operation , for signed 32 - bit . Valid for positive integers only * ( - 1 otherwise ) . * @ param x original integer * @ return Next number with all bits set */ public static int nextAllOnesInt ( int x ) { } }
x |= x >>> 1 ; x |= x >>> 2 ; x |= x >>> 4 ; x |= x >>> 8 ; x |= x >>> 16 ; return x ;
public class Windowing { /** * Adapts an iterator to an iterator showing predecessors of the contained * elements . This iterator always yields an alias to the same queue , beware * of aliasing problems . e . g : * < code > * iterable : [ 1,2,3,4 ] , trailSize : 3 - > * [ [ Nothing , Nothing , Just 1 ] , [ Nothing Just 1 , Just 2 ] , [ Just 1 , Just 2 , Just 3 ] , [ Just 2 , Just 3 , Just 4 ] ] * < / code > * @ param < T > the adapted iterator element type * @ param trailSize the trail size * @ param iterable the iterable to be adapted * @ return the adapted iterator */ public static < T > Iterator < Queue < Optional < T > > > trails ( int trailSize , Iterable < T > iterable ) { } }
dbc . precondition ( iterable != null , "cannot create a trails iterator from a null iterable" ) ; return new TrailsIterator < > ( iterable . iterator ( ) , trailSize , Function . identity ( ) ) ;
public class BouncyCastleCertProcessingFactory { /** * Returns a chain of X509Certificate ' s that are instances of X509CertificateObject * This is related to http : / / bugzilla . globus . org / globus / show _ bug . cgi ? id = 4933 * @ param certs input certificate chain * @ return a new chain where all X509Certificate ' s are instances of X509CertificateObject * @ throws GeneralSecurityException when failing to get load certificate from encoding */ protected X509Certificate [ ] getX509CertificateObjectChain ( X509Certificate [ ] certs ) throws GeneralSecurityException { } }
X509Certificate [ ] bcCerts = new X509Certificate [ certs . length ] ; for ( int i = 0 ; i < certs . length ; i ++ ) { if ( ! ( certs [ i ] instanceof X509CertificateObject ) ) { bcCerts [ i ] = CertificateLoadUtil . loadCertificate ( new ByteArrayInputStream ( certs [ i ] . getEncoded ( ) ) ) ; } else { bcCerts [ i ] = certs [ i ] ; } } return bcCerts ;
public class Log { /** * Various writeXXX ( ) methods are used for logging statements . */ void writeStatement ( Session session , String s ) { } }
try { dbLogWriter . writeLogStatement ( session , s ) ; } catch ( IOException e ) { throw Error . error ( ErrorCode . FILE_IO_ERROR , logFileName ) ; } if ( maxLogSize > 0 && dbLogWriter . size ( ) > maxLogSize ) { database . logger . needsCheckpoint = true ; }
public class CmsTemplateFinder { /** * Returns a bean representing the given template resource . < p > * @ param cms the cms context to use for VFS operations * @ param resource the template resource * @ return bean representing the given template resource * @ throws CmsException if something goes wrong */ private CmsClientTemplateBean getTemplateBean ( CmsObject cms , CmsResource resource ) throws CmsException { } }
CmsProperty titleProp = cms . readPropertyObject ( resource , CmsPropertyDefinition . PROPERTY_TITLE , false ) ; CmsProperty descProp = cms . readPropertyObject ( resource , CmsPropertyDefinition . PROPERTY_DESCRIPTION , false ) ; CmsProperty imageProp = cms . readPropertyObject ( resource , CmsPropertyDefinition . PROPERTY_TEMPLATE_IMAGE , false ) ; CmsProperty selectValueProp = cms . readPropertyObject ( resource , CmsPropertyDefinition . PROPERTY_TEMPLATE_PROVIDER , false ) ; String sitePath = cms . getSitePath ( resource ) ; String templateValue = sitePath ; if ( ! selectValueProp . isNullProperty ( ) && ! CmsStringUtil . isEmptyOrWhitespaceOnly ( selectValueProp . getValue ( ) ) ) { String selectValue = selectValueProp . getValue ( ) ; CmsMacroResolver resolver = new CmsMacroResolver ( ) ; resolver . addMacro ( MACRO_TEMPLATEPATH , sitePath ) ; templateValue = resolver . resolveMacros ( selectValue ) ; } return new CmsClientTemplateBean ( titleProp . getValue ( ) , descProp . getValue ( ) , templateValue , imageProp . getValue ( ) ) ;
public class TimeBasedKeys { /** * Create a new generator that uses the specified number of bits for the counter portion of the keys . * @ param bitsUsedInCounter the number of bits in the counter portion of the keys ; must be a positive number for which theere * is enough space to left shift without overflowing . * @ return the generator instance ; never null */ public static TimeBasedKeys create ( int bitsUsedInCounter ) { } }
CheckArg . isPositive ( bitsUsedInCounter , "bitsUsedInCounter" ) ; int maxAvailableBitsToShift = Long . numberOfLeadingZeros ( System . currentTimeMillis ( ) ) ; CheckArg . isLessThan ( bitsUsedInCounter , maxAvailableBitsToShift , "bitsUsedInCounter" ) ; return new TimeBasedKeys ( ( short ) bitsUsedInCounter ) ;
public class Priority { /** * Converts this priority to its two - character CUA code . * @ return the CUA code ( e . g . " B1 " for " 4 " ) or null if the priority cannot * be converted to a CUA code */ public String toCuaPriority ( ) { } }
if ( value == null || value < 1 || value > 9 ) { return null ; } int letter = ( ( value - 1 ) / 3 ) + 'A' ; int number = ( ( value - 1 ) % 3 ) + 1 ; return ( char ) letter + "" + number ;
public class CommsByteBufferPool { /** * Gets a CommsByteBuffer from the pool . * @ return CommsString */ public synchronized CommsByteBuffer allocate ( ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "allocate" ) ; // Remove a CommsByteBuffer from the pool CommsByteBuffer buff = ( CommsByteBuffer ) pool . remove ( ) ; // If the buffer is null then there was none available in the pool // So create a new one if ( buff == null ) { if ( tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "No buffer available from pool - creating a new one" ) ; buff = createNew ( ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "allocate" , buff ) ; return buff ;
public class TableDemo { /** * Initializes the frame by creating its contents . */ private void init ( ) { } }
setTitle ( "Validation Framework Test" ) ; setDefaultCloseOperation ( WindowConstants . EXIT_ON_CLOSE ) ; // Add contents JPanel contentPane = new JPanel ( new MigLayout ( "fill, wrap 1" , "[]" , "[grow]unrelated[]" ) ) ; setContentPane ( contentPane ) ; contentPane . add ( new JScrollPane ( table ) , "grow" ) ; installValidators ( table ) ; contentPane . add ( changeButton , "align right" ) ; // Set size pack ( ) ; Dimension size = getSize ( ) ; size . width += 100 ; setMinimumSize ( size ) ; // Set location Dimension screenSize = Toolkit . getDefaultToolkit ( ) . getScreenSize ( ) ; setLocation ( ( screenSize . width - size . width ) / 2 , ( screenSize . height - size . height ) / 3 ) ;
public class ProcessManagerImpl { /** * - - - - - Process Properties */ public synchronized Properties getProcessProperties ( final String processName ) { } }
final NamedProcess namedProcess = processes . get ( processName ) ; return namedProcess == null ? null : namedProcess . getProperties ( ) ;
public class StreamUtils { /** * Determines if a byte array is compressed . The java . util . zip GZip * implementation does not expose the GZip header so it is difficult to determine * if a string is compressed . * Copied from Helix GZipCompressionUtil * @ param bytes an array of bytes * @ return true if the array is compressed or false otherwise */ public static boolean isCompressed ( byte [ ] bytes ) { } }
if ( ( bytes == null ) || ( bytes . length < 2 ) ) { return false ; } else { return ( ( bytes [ 0 ] == ( byte ) ( GZIPInputStream . GZIP_MAGIC ) ) && ( bytes [ 1 ] == ( byte ) ( GZIPInputStream . GZIP_MAGIC >> 8 ) ) ) ; }
public class BeanUtil { /** * 使用Map填充Bean对象 * @ param < T > Bean类型 * @ param map Map * @ param bean Bean * @ param isIgnoreError 是否忽略注入错误 * @ return Bean */ public static < T > T fillBeanWithMap ( Map < ? , ? > map , T bean , boolean isIgnoreError ) { } }
return fillBeanWithMap ( map , bean , false , isIgnoreError ) ;
public class Predictors { /** * Gets a { @ code Predictor } that puts all of its probability mass on * { @ code outputValue } . * @ param outputValue * @ return */ public static < I , O > Predictor < I , O > constant ( O outputValue ) { } }
Map < O , Double > outputProbabilities = Maps . newHashMap ( ) ; outputProbabilities . put ( outputValue , 1.0 ) ; return new ConstantPredictor < I , O > ( outputProbabilities ) ;
public class UpdateIdentityPoolRequest { /** * Optional key : value pairs mapping provider names to provider app IDs . * @ param supportedLoginProviders * Optional key : value pairs mapping provider names to provider app IDs . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateIdentityPoolRequest withSupportedLoginProviders ( java . util . Map < String , String > supportedLoginProviders ) { } }
setSupportedLoginProviders ( supportedLoginProviders ) ; return this ;
public class TaskQueue { /** * Fetch a TaskQueue by name . If the TaskQueue doesn ' t already exist , * creates the TaskQueue . * @ param logger An optional logger . * @ param strQueueName The unique name for the queue . * @ return The TaskQueue associated with the specified name . */ public static TaskQueue getInstance ( ILogger logger , String strQueueName ) { } }
if ( strQueueName == null ) { return null ; } TaskQueue taskQueue = QUEUE_MAP . get ( strQueueName ) ; if ( taskQueue == null ) { taskQueue = new TaskQueue ( logger , strQueueName ) ; QUEUE_MAP . put ( strQueueName , taskQueue ) ; } return taskQueue ;