signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RelationshipTuple { /** * TODO : Consider getting rid of this method */ public String getObjectPID ( ) { } }
if ( object != null && ! isLiteral && object . startsWith ( "info:fedora/" ) ) { String PID = object . substring ( 12 ) ; return PID ; } return null ;
public class Evaluators { /** * Closes all EvaluatorManager instances managed . */ @ Override public void close ( ) { } }
LOG . log ( Level . FINER , "Closing the evaluators - begin" ) ; final List < EvaluatorManager > evaluatorsCopy ; synchronized ( this ) { evaluatorsCopy = new ArrayList < > ( this . evaluators . values ( ) ) ; } for ( final EvaluatorManager evaluatorManager : evaluatorsCopy ) { if ( ! evaluatorManager . isClosedOrClosing ( ) ) { LOG . log ( Level . WARNING , "Unclean shutdown of evaluator {0}" , evaluatorManager . getId ( ) ) ; evaluatorManager . close ( ) ; } } LOG . log ( Level . FINER , "Closing the evaluators - end" ) ;
public class IzouModule { /** * Used to log messages at fatal level * @ param msg the message * @ param e the Throwable */ @ Override public void fatal ( String msg , Throwable e ) { } }
log . logIfEnabled ( FQCN , Level . FATAL , null , msg , e ) ;
public class BooleanConditionBuilder { /** * Returns the { @ link BooleanCondition } represented by this builder . * @ return a new boolean condition */ @ Override public BooleanCondition build ( ) { } }
return new BooleanCondition ( boost , must . stream ( ) . map ( ConditionBuilder :: build ) . collect ( toList ( ) ) , should . stream ( ) . map ( ConditionBuilder :: build ) . collect ( toList ( ) ) , not . stream ( ) . map ( ConditionBuilder :: build ) . collect ( toList ( ) ) , maxClauses ) ;
public class PlatformDefaultImpl { /** * @ see Platform # setNullForStatement ( PreparedStatement , int , int ) */ public void setNullForStatement ( PreparedStatement ps , int index , int sqlType ) throws SQLException { } }
ps . setNull ( index , sqlType ) ;
public class SyncGroupsInner { /** * Cancels a sync group synchronization . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database on which the sync group is hosted . * @ param syncGroupName The name of the sync group . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > cancelSyncAsync ( String resourceGroupName , String serverName , String databaseName , String syncGroupName ) { } }
return cancelSyncWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , syncGroupName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class SuffixModifierUtils { /** * Get modifier suffix if fullName contains any otherwise returns null . * In contains no validation of input parameters as it assumes the validation * has been already done by { @ link # removeModifierSuffix ( String ) } * @ param fullName * @ param baseName as returned by { @ link # removeModifierSuffix ( String ) } * @ return modifier suffix or null if no suffix is present */ public static String getModifierSuffix ( String fullName , String baseName ) { } }
if ( fullName . equals ( baseName ) ) { return null ; } int indexOfOpeningBracket = fullName . indexOf ( MODIFIER_OPENING_TOKEN ) ; return fullName . substring ( indexOfOpeningBracket , fullName . length ( ) ) ;
public class MsgpackIOUtil { /** * Creates a msgpack pipe from a byte array . */ public static Pipe newPipe ( byte [ ] data , int offset , int length , boolean numeric ) throws IOException { } }
ArrayBufferInput in = new ArrayBufferInput ( data , offset , length ) ; return newPipe ( in , numeric ) ;
public class InterceptorMetaDataFactory { /** * Processes interceptor methods on the bean class . * @ throws EJBConfigurationException */ private void processBeanInterceptors ( ) // d630717 throws EJBConfigurationException { } }
ivBeanInterceptorProxyMap = createInterceptorProxyMap ( ivEjbClass , - 1 ) ; // F743-1751 - Not all bean types collect bean methods . For those // that do , find the methods that are declared on the bean . if ( ivBeanLifecycleMethods != null ) { for ( InterceptorMethodKind kind : InterceptorMethodKind . values ( ) ) { int mid = kind . getMethodID ( ) ; if ( mid != - 1 ) { List < InterceptorProxy > proxyList = ivBeanInterceptorProxyMap . get ( kind ) ; if ( proxyList != null ) { for ( InterceptorProxy proxy : proxyList ) { Method m = proxy . ivInterceptorMethod ; if ( m . getDeclaringClass ( ) == ivEjbClass ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "found bean " + LifecycleInterceptorWrapper . TRACE_NAMES [ mid ] + " method: " + m ) ; ivBeanLifecycleMethods [ mid ] = m ; break ; } } } } } }
public class ReflectionUtils { /** * < p > Work out method ' s bundle key . * < h2 > Bundle key resolution < / h2 > * < p > Bundle key is generated as follows : * < ul > * < li > If there are no { @ link com . github . rodionmoiseev . c10n . C10NKey } annotations , key is the < code > Class FQDN ' . ' Method Name < / code > . * If method has arguments , method name is post - fixed with argument types delimited * with ' _ ' , e . g . < code > myMethod _ String _ int < / code > < / li > * < li > If declaring interface or any of the super - interfaces contain { @ link com . github . rodionmoiseev . c10n . C10NKey } * annotation < code > C < / code > then * < ul > * < li > For methods without { @ link com . github . rodionmoiseev . c10n . C10NKey } annotation , key becomes < code > C ' . ' Method Name < / code > < / li > * < li > For methods with { @ link com . github . rodionmoiseev . c10n . C10NKey } annotation < code > M < / code > , key is < code > C ' . ' M < / code > < / li > * < li > For methods with { @ link com . github . rodionmoiseev . c10n . C10NKey } annotation < code > M < / code > , value for which starts with * a ' . ' , the key is just < code > M < / code > ( i . e . key is assumed to be absolute ) < / li > * < / ul > * < / li > * < li > If no declaring interfaces have { @ link com . github . rodionmoiseev . c10n . C10NKey } annotation , but a method contains annotation * < code > M < / code > , then key is just < code > M < / code > . < / li > * < li > Lastly , if global key prefix is specified , it is always prepended to the final key , delimited by ' . ' < / li > * < / ul > * < h2 > Looking for c10n key in parent interfaces < / h2 > * < p > The lookup of c10n key in parent interfaces is done breadth - first , starting from the declaring class . * That is , if the declaring class does not have c10n key , all interfaces it extends are checked in declaration * order first . If no key is found , this check is repeated for each of the super interfaces in the same order . * @ param keyPrefix global key prefix * @ param method method to extract the key from * @ return method c10n bundle key ( not null ) */ public static String getC10NKey ( String keyPrefix , Method method ) { } }
String key = getKeyAnnotationBasedKey ( method ) ; if ( null == key ) { // fallback to default key based on class FQDN and method name key = ReflectionUtils . getDefaultKey ( method ) ; } if ( keyPrefix . length ( ) > 0 ) { key = keyPrefix + "." + key ; } return key ;
public class Coercions { /** * Coerces a value to the given primitive number class */ public static Number coerceToPrimitiveNumber ( Object pValue , Class pClass , Logger pLogger ) throws ELException { } }
if ( pValue == null || "" . equals ( pValue ) ) { return coerceToPrimitiveNumber ( 0 , pClass ) ; } else if ( pValue instanceof Character ) { char val = ( ( Character ) pValue ) . charValue ( ) ; return coerceToPrimitiveNumber ( ( short ) val , pClass ) ; } else if ( pValue instanceof Boolean ) { if ( pLogger . isLoggingError ( ) ) { pLogger . logError ( Constants . BOOLEAN_TO_NUMBER , pValue , pClass . getName ( ) ) ; } return coerceToPrimitiveNumber ( 0 , pClass ) ; } else if ( pValue . getClass ( ) == pClass ) { return ( Number ) pValue ; } else if ( pValue instanceof Number ) { return coerceToPrimitiveNumber ( ( Number ) pValue , pClass ) ; } else if ( pValue instanceof String ) { try { return coerceToPrimitiveNumber ( ( String ) pValue , pClass ) ; } catch ( Exception exc ) { if ( pLogger . isLoggingError ( ) ) { pLogger . logError ( Constants . STRING_TO_NUMBER_EXCEPTION , ( String ) pValue , pClass . getName ( ) ) ; } return coerceToPrimitiveNumber ( 0 , pClass ) ; } } else { if ( pLogger . isLoggingError ( ) ) { pLogger . logError ( Constants . COERCE_TO_NUMBER , pValue . getClass ( ) . getName ( ) , pClass . getName ( ) ) ; } return coerceToPrimitiveNumber ( 0 , pClass ) ; }
public class RequestQueue { /** * Starts the dispatchers in this queue . */ public void start ( ) { } }
stop ( ) ; // Make sure any currently running dispatchers are stopped . // Create the cache dispatcher and start it . mCacheDispatcher = new CacheDispatcher ( mCacheQueue , mNetworkQueue , mCache , mDelivery ) ; mCacheDispatcher . start ( ) ; // Create network dispatchers ( and corresponding threads ) up to the pool size . for ( int i = 0 ; i < mDispatchers . length ; i ++ ) { NetworkDispatcher networkDispatcher = new NetworkDispatcher ( mNetworkQueue , mNetwork , mCache , mDelivery ) ; mDispatchers [ i ] = networkDispatcher ; networkDispatcher . start ( ) ; }
public class MediaservicesInner { /** * Synchronizes Storage Account Keys . * Synchronizes storage account keys for a storage account associated with the Media Service account . * @ param resourceGroupName The name of the resource group within the Azure subscription . * @ param accountName The Media Services account name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > syncStorageKeysAsync ( String resourceGroupName , String accountName ) { } }
return syncStorageKeysWithServiceResponseAsync ( resourceGroupName , accountName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class JXLayer { /** * { @ inheritDoc } */ @ Override public void doLayout ( ) { } }
if ( contentPane != null ) { setPreferredSize ( contentPane . getPreferredSize ( ) ) ; contentPane . setLocation ( 0 , 0 ) ; contentPane . setSize ( getWidth ( ) , getHeight ( ) ) ; } if ( glassPane != null ) { glassPane . setLocation ( 0 , 0 ) ; glassPane . setSize ( getWidth ( ) , getHeight ( ) ) ; }
public class OSchemaHelper { /** * Create if required abstract { @ link OClass } * @ param className name of a class to create * @ param superClasses list of superclasses * @ return this helper */ public OSchemaHelper oAbstractClass ( String className , String ... superClasses ) { } }
return oClass ( className , true , superClasses ) ;
public class AmazonSQSMessagingClientWrapper { /** * Calls < code > getQueueUrl < / code > and wraps < code > AmazonClientException < / code > * @ param getQueueUrlRequest * Container for the necessary parameters to execute the * getQueueUrl service method on AmazonSQS . * @ return The response from the GetQueueUrl service method , as returned by * AmazonSQS , which will include queue ` s URL * @ throws JMSException */ public GetQueueUrlResult getQueueUrl ( GetQueueUrlRequest getQueueUrlRequest ) throws JMSException { } }
try { prepareRequest ( getQueueUrlRequest ) ; return amazonSQSClient . getQueueUrl ( getQueueUrlRequest ) ; } catch ( AmazonClientException e ) { throw handleException ( e , "getQueueUrl" ) ; }
public class ResourceManager { /** * Returns an InputStream from this manager ' s classloader for the given path . */ protected InputStream getInputStreamFromClasspath ( final String fullyQualifiedPath ) { } }
return AccessController . doPrivileged ( new PrivilegedAction < InputStream > ( ) { public InputStream run ( ) { return _loader . getResourceAsStream ( fullyQualifiedPath ) ; } } ) ;
public class CoronaTaskTracker { /** * Main service loop . Will stay in this loop forever . */ private void heartbeatToClusterManager ( ) throws IOException { } }
CoronaConf coronaConf = new CoronaConf ( fConf ) ; int numCpu = coronaConf . getInt ( "mapred.coronatasktracker.num.cpus" , resourceCalculatorPlugin . getNumProcessors ( ) ) ; if ( numCpu == ResourceCalculatorPlugin . UNAVAILABLE ) { numCpu = 1 ; } LOG . info ( "Will report " + numCpu + " CPUs" ) ; int totalMemoryMB = ( int ) ( resourceCalculatorPlugin . getPhysicalMemorySize ( ) / 1024D / 1024 ) ; ComputeSpecs total = new ComputeSpecs ( ( short ) numCpu ) ; total . setNetworkMBps ( ( short ) 100 ) ; total . setMemoryMB ( totalMemoryMB ) ; total . setDiskGB ( ( int ) ( getDiskSpace ( false ) / 1024D / 1024 / 1024 ) ) ; String appInfo = null ; if ( getLocalHostAddress ( ) != null ) { appInfo = getLocalHostAddress ( ) + ":" + actionServerAddr . getPort ( ) ; } else { appInfo = getLocalHostname ( ) + ":" + actionServerAddr . getPort ( ) ; } Map < ResourceType , String > resourceInfos = new EnumMap < ResourceType , String > ( ResourceType . class ) ; resourceInfos . put ( ResourceType . MAP , appInfo ) ; resourceInfos . put ( ResourceType . REDUCE , appInfo ) ; resourceInfos . put ( ResourceType . JOBTRACKER , appInfo ) ; while ( running && ! shuttingDown ) { try { long now = System . currentTimeMillis ( ) ; Thread . sleep ( heartbeatCMInterval ) ; float cpuUsage = resourceCalculatorPlugin . getCpuUsage ( ) ; if ( cpuUsage == ResourceCalculatorPlugin . UNAVAILABLE ) { cpuUsage = 0 ; } ComputeSpecs free = new ComputeSpecs ( ( short ) ( numCpu * cpuUsage / 100D ) ) ; // TODO find free network . free . setNetworkMBps ( ( short ) 100 ) ; int availableMemoryMB = ( int ) ( resourceCalculatorPlugin . getAvailablePhysicalMemorySize ( ) / 1024D / 1024 ) ; free . setMemoryMB ( availableMemoryMB ) ; long freeDiskSpace = getDiskSpace ( true ) ; long freeLogDiskSpace = getLogDiskFreeSpace ( ) ; free . setDiskGB ( ( int ) ( Math . min ( freeDiskSpace , freeLogDiskSpace ) / 1024D / 1024 / 1024 ) ) ; // TT puts it ' s MR specific host : port tuple here ClusterNodeInfo node = new ClusterNodeInfo ( this . getName ( ) , clusterManagerCallbackServerAddr , total ) ; node . setFree ( free ) ; node . setResourceInfos ( resourceInfos ) ; LOG . debug ( "ClusterManager heartbeat: " + node . toString ( ) ) ; if ( client == null ) { initializeClusterManagerClient ( ) ; } NodeHeartbeatResponse nodeHeartbeatResponse = client . nodeHeartbeat ( node ) ; if ( nodeHeartbeatResponse . restartFlag ) { LOG . fatal ( "Get CM notice to exit" ) ; System . exit ( 0 ) ; } clusterManagerConnectRetries = 0 ; lastCMHeartbeat = System . currentTimeMillis ( ) ; markUnresponsiveTasks ( ) ; killOverflowingTasks ( ) ; // we ' ve cleaned up , resume normal operation if ( ! acceptNewTasks && isIdle ( ) ) { acceptNewTasks = true ; } // The check below may not be required every iteration but we are // erring on the side of caution here . We have seen many cases where // the call to jetty ' s getLocalPort ( ) returns different values at // different times . Being a real paranoid here . checkJettyPort ( ) ; } catch ( InterruptedException ie ) { LOG . info ( "Interrupted. Closing down." ) ; return ; } catch ( DisallowedNode ex ) { LOG . error ( "CM has excluded node, shutting down TT" ) ; shutdown ( ) ; } catch ( SafeModeException e ) { LOG . info ( "Cluster Manager is in Safe Mode" ) ; try { ClusterManagerAvailabilityChecker . waitWhileClusterManagerInSafeMode ( coronaConf ) ; } catch ( IOException ie ) { LOG . error ( "Could not wait while Cluster Manager is in Safe Mode " , ie ) ; } } catch ( TException ex ) { if ( ! shuttingDown ) { LOG . error ( "Error connecting to CM. " + clusterManagerConnectRetries + "th retry. Retry in 10 seconds." , ex ) ; closeClusterManagerClient ( ) ; if ( ++ clusterManagerConnectRetries >= MAX_CM_CONNECT_RETRIES ) { LOG . error ( "Cannot connect to CM " + clusterManagerConnectRetries + " times. Shutting down TT" ) ; shutdown ( ) ; } try { Thread . sleep ( 10000L ) ; } catch ( InterruptedException ie ) { } ClusterManagerAvailabilityChecker . waitWhileClusterManagerInSafeMode ( coronaConf ) ; } } }
public class DiagnosticOrder { /** * syntactic sugar */ public DiagnosticOrderItemComponent addItem ( ) { } }
DiagnosticOrderItemComponent t = new DiagnosticOrderItemComponent ( ) ; if ( this . item == null ) this . item = new ArrayList < DiagnosticOrderItemComponent > ( ) ; this . item . add ( t ) ; return t ;
public class CPOptionValuePersistenceImpl { /** * Returns the first cp option value in the ordered set where CPOptionId = & # 63 ; . * @ param CPOptionId the cp option ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp option value , or < code > null < / code > if a matching cp option value could not be found */ @ Override public CPOptionValue fetchByCPOptionId_First ( long CPOptionId , OrderByComparator < CPOptionValue > orderByComparator ) { } }
List < CPOptionValue > list = findByCPOptionId ( CPOptionId , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class ScalingTriggerMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ScalingTrigger scalingTrigger , ProtocolMarshaller protocolMarshaller ) { } }
if ( scalingTrigger == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( scalingTrigger . getCloudWatchAlarmDefinition ( ) , CLOUDWATCHALARMDEFINITION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Execution { /** * Trigger a new checkpoint on the task of this execution . * @ param checkpointId of th checkpoint to trigger * @ param timestamp of the checkpoint to trigger * @ param checkpointOptions of the checkpoint to trigger * @ param advanceToEndOfEventTime Flag indicating if the source should inject a { @ code MAX _ WATERMARK } in the pipeline * to fire any registered event - time timers */ public void triggerSynchronousSavepoint ( long checkpointId , long timestamp , CheckpointOptions checkpointOptions , boolean advanceToEndOfEventTime ) { } }
triggerCheckpointHelper ( checkpointId , timestamp , checkpointOptions , advanceToEndOfEventTime ) ;
public class MinimalResolver { /** * Implement resolve */ public Selector resolve ( Identifier id , PositionAssigner positionAssigner ) { } }
positionAssigner . assign ( id ) ; return id ;
public class ComputationGraph { /** * This method ADDS additional TrainingListener to existing listeners * @ param listeners Listeners to add */ @ Override public void addListeners ( TrainingListener ... listeners ) { } }
if ( this . trainingListeners == null ) { setListeners ( listeners ) ; return ; } else { List < TrainingListener > newListeners = new ArrayList < > ( this . trainingListeners ) ; // To avoid immutable list issues Collections . addAll ( newListeners , listeners ) ; setListeners ( newListeners ) ; } if ( solver != null ) { solver . setListeners ( this . trainingListeners ) ; }
public class AbstractCommonShapeFileReader { /** * Set the reading position , excluding the header . * @ param recordIndex is the index of the next record to read . * @ param byteIndex is the index of the next byte to read ( excluding the header ) . * @ throws IOException in case of error . */ protected void setReadingPosition ( int recordIndex , int byteIndex ) throws IOException { } }
if ( this . seekEnabled ) { this . nextExpectedRecordIndex = recordIndex ; this . buffer . position ( byteIndex ) ; } else { throw new SeekOperationDisabledException ( ) ; }
public class AbstractGrpcServerFactory { /** * Configures limits such as max message sizes that should be used by the server . * @ param builder The server builder to configure . */ protected void configureLimits ( final T builder ) { } }
final Integer maxInboundMessageSize = this . properties . getMaxInboundMessageSize ( ) ; if ( maxInboundMessageSize != null ) { builder . maxInboundMessageSize ( maxInboundMessageSize ) ; }
public class RepositoryFileApi { /** * Get file from repository . Allows you to receive information about file in repository like name , size , content . * Note that file content is Base64 encoded . * < pre > < code > GitLab Endpoint : GET / projects / : id / repository / files < / code > < / pre > * @ param filePath ( required ) - Full path to new file . Ex . lib / class . rb * @ param projectId ( required ) - the project ID * @ param ref ( required ) - The name of branch , tag or commit * @ return a RepositoryFile instance with the file info * @ throws GitLabApiException if any exception occurs * @ deprecated Will be removed in version 5.0 */ @ Deprecated protected RepositoryFile getFileV3 ( String filePath , Integer projectId , String ref ) throws GitLabApiException { } }
Form form = new Form ( ) ; addFormParam ( form , "file_path" , filePath , true ) ; addFormParam ( form , "ref" , ref , true ) ; Response response = get ( Response . Status . OK , form . asMap ( ) , "projects" , projectId , "repository" , "files" ) ; return ( response . readEntity ( RepositoryFile . class ) ) ;
public class NamespaceContextParser { /** * Parses all variable definitions and adds those to the bean definition * builder as property value . * @ param builder the target bean definition builder . * @ param element the source element . */ private void parseNamespaceDefinitions ( BeanDefinitionBuilder builder , Element element ) { } }
Map < String , String > namespaces = new LinkedHashMap < String , String > ( ) ; for ( Element namespace : DomUtils . getChildElementsByTagName ( element , "namespace" ) ) { namespaces . put ( namespace . getAttribute ( "prefix" ) , namespace . getAttribute ( "uri" ) ) ; } if ( ! namespaces . isEmpty ( ) ) { builder . addPropertyValue ( "namespaceMappings" , namespaces ) ; }
public class CmsAttributeValueView { /** * Shows a validation error message . < p > * @ param message the error message */ public void setErrorMessage ( String message ) { } }
m_messageText . setInnerHTML ( message ) ; addStyleName ( formCss ( ) . hasError ( ) ) ; m_hasError = true ;
public class DateTimeFormatter { /** * Returns a locale specific date - time formatter for the ISO chronology . * This returns a formatter that will format or parse a date - time . * The exact format pattern used varies by locale . * The locale is determined from the formatter . The formatter returned directly by * this method will use the { @ link Locale # getDefault ( Locale . Category ) default FORMAT locale } . * The locale can be controlled using { @ link DateTimeFormatter # withLocale ( Locale ) withLocale ( Locale ) } * on the result of this method . * Note that the localized pattern is looked up lazily . * This { @ code DateTimeFormatter } holds the style required and the locale , * looking up the pattern required on demand . * The returned formatter has a chronology of ISO set to ensure dates in * other calendar systems are correctly converted . * It has no override zone and uses the { @ link ResolverStyle # SMART SMART } resolver style . * @ param dateTimeStyle the formatter style to obtain , not null * @ return the date - time formatter , not null */ public static DateTimeFormatter ofLocalizedDateTime ( FormatStyle dateTimeStyle ) { } }
Objects . requireNonNull ( dateTimeStyle , "dateTimeStyle" ) ; return new DateTimeFormatterBuilder ( ) . appendLocalized ( dateTimeStyle , dateTimeStyle ) . toFormatter ( ResolverStyle . SMART , IsoChronology . INSTANCE ) ;
public class HawkbitCommonUtil { /** * Returns the input text within html bold tag < b > . . < / b > . * @ param text * is the text to be converted in to Bold * @ return null if the input text param is null returns text with < b > . . . < / b > * tags . */ public static String getBoldHTMLText ( final String text ) { } }
String boldStr = null ; if ( text != null ) { final StringBuilder updatedMsg = new StringBuilder ( "<b>" ) ; updatedMsg . append ( text ) ; updatedMsg . append ( "</b>" ) ; boldStr = updatedMsg . toString ( ) ; } return boldStr ;
public class XMLParser { /** * Find the data between these XML tags . * @ param strData The XML code to find the tags in . * @ param strTag The tag to find . */ public String getTagData ( String strData , String strTag ) { } }
int iStartData = strData . indexOf ( '<' + strTag + '>' ) ; if ( iStartData == - 1 ) return null ; iStartData = iStartData + strTag . length ( ) + 2 ; int iEndData = strData . indexOf ( "</" + strTag + '>' ) ; if ( iStartData == - 1 ) return null ; return strData . substring ( iStartData , iEndData ) ;
public class HndlI18nRequest { /** * < p > Reveals language preference and adds preferences * into cookie if need . < / p > * @ param pRequestData Request Data * @ param pLangs lang . list * @ param pDecSeps dec . sep . list * @ param pDecGrSeps dec . gr . sep . list * @ param pLangPrefs lang . pref . list * @ param pIsFirstReq if first request * @ return lang . preferences * @ throws Exception - an exception */ public final LangPreferences revealLangPreferences ( final IRequestData pRequestData , final List < Languages > pLangs , final List < DecimalSeparator > pDecSeps , final List < DecimalGroupSeparator > pDecGrSeps , final List < LangPreferences > pLangPrefs , final boolean pIsFirstReq ) throws Exception { } }
boolean isDbgSh = this . logger . getDbgSh ( this . getClass ( ) ) && this . logger . getDbgFl ( ) < 5001 && this . logger . getDbgCl ( ) > 5003 ; LangPreferences lpf = null ; boolean needSetCookie = false ; // check request changing preferences : String lang = pRequestData . getParameter ( "lang" ) ; String decSep = pRequestData . getParameter ( "decSep" ) ; String decGrSep = pRequestData . getParameter ( "decGrSep" ) ; String digInGr = pRequestData . getParameter ( "digInGr" ) ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Request's lang/decSep/decGrSep/digInGr: " + lang + "/" + decSep + "/" + decGrSep + "/" + digInGr ) ; } if ( lang == null || lang . length ( ) == 0 || decSep == null || decSep . length ( ) == 0 || decGrSep == null || decGrSep . length ( ) == 0 || digInGr == null || digInGr . length ( ) == 0 ) { lang = null ; decSep = null ; decGrSep = null ; digInGr = null ; } else { needSetCookie = true ; } if ( decGrSep == null && decSep == null && lang == null ) { lang = pRequestData . getCookieValue ( "lang" ) ; decSep = pRequestData . getCookieValue ( "decSep" ) ; decGrSep = pRequestData . getCookieValue ( "decGrSep" ) ; digInGr = pRequestData . getCookieValue ( "digInGr" ) ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Client's cookie lang/decSep/decGrSep/digInGr: " + lang + "/" + decSep + "/" + decGrSep + "/" + digInGr ) ; } } if ( decGrSep != null && decSep != null && lang != null && digInGr != null ) { if ( decGrSep . equals ( decSep ) ) { if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Separators are same!! decSep/decGrSep: " + decSep ) ; } } else { // try to match from cookies or changed by user : lpf = new LangPreferences ( ) ; lpf . setDigitsInGroup ( Integer . parseInt ( digInGr ) ) ; for ( Languages ln : pLangs ) { if ( ln . getItsId ( ) . equals ( lang ) ) { lpf . setLang ( ln ) ; break ; } } if ( lpf . getLang ( ) == null ) { lpf = null ; } else { for ( DecimalSeparator ds : pDecSeps ) { if ( ds . getItsId ( ) . equals ( decSep ) ) { lpf . setDecimalSep ( ds ) ; break ; } } if ( lpf . getDecimalSep ( ) == null ) { lpf = null ; } else { for ( DecimalGroupSeparator dgs : pDecGrSeps ) { if ( dgs . getItsId ( ) . equals ( decGrSep ) ) { lpf . setDecimalGroupSep ( dgs ) ; break ; } } if ( lpf . getDecimalGroupSep ( ) == null ) { lpf = null ; } } } } } if ( lpf == null ) { // try match client ' s locale , if not - default or the first : String ccountry = null ; String clang = null ; if ( pRequestData . getLocale ( ) != null ) { ccountry = pRequestData . getLocale ( ) . getCountry ( ) ; clang = pRequestData . getLocale ( ) . getLanguage ( ) ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Client prefers lang/country: " + clang + "/" + ccountry ) ; } } LangPreferences lpfMf = null ; LangPreferences lpfMl = null ; LangPreferences lpfDef = null ; for ( LangPreferences lpft : pLangPrefs ) { if ( lpft . getCountry ( ) . getItsId ( ) . equals ( ccountry ) && lpft . getLang ( ) . getItsId ( ) . equals ( clang ) ) { lpfMf = lpft ; break ; } if ( lpft . getLang ( ) . getItsId ( ) . equals ( clang ) ) { lpfMl = lpft ; } if ( lpft . getIsDefault ( ) ) { lpfDef = lpft ; } else if ( lpfDef == null ) { lpfDef = lpft ; } } if ( lpfMf != null ) { lpf = lpfMf ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Full match lang/decSep/decGrSep/digInGr: " + lpf . getLang ( ) . getItsId ( ) + "/" + lpf . getDecimalSep ( ) . getItsId ( ) + "/" + lpf . getDecimalGroupSep ( ) . getItsId ( ) + "/" + lpf . getDigitsInGroup ( ) ) ; } } else if ( lpfMl != null ) { lpf = lpfMl ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Lang match lang/decSep/decGrSep/digInGr: " + lpf . getLang ( ) . getItsId ( ) + "/" + lpf . getDecimalSep ( ) . getItsId ( ) + "/" + lpf . getDecimalGroupSep ( ) . getItsId ( ) + "/" + lpf . getDigitsInGroup ( ) ) ; } } else { lpf = lpfDef ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "No match, default lang/decSep/decGrSep/digInGr: " + lpf . getLang ( ) . getItsId ( ) + "/" + lpf . getDecimalSep ( ) . getItsId ( ) + "/" + lpf . getDecimalGroupSep ( ) . getItsId ( ) + "/" + lpf . getDigitsInGroup ( ) ) ; } } needSetCookie = true ; } if ( needSetCookie ) { pRequestData . setCookieValue ( "digInGr" , lpf . getDigitsInGroup ( ) . toString ( ) ) ; pRequestData . setCookieValue ( "lang" , lpf . getLang ( ) . getItsId ( ) ) ; pRequestData . setCookieValue ( "decSep" , lpf . getDecimalSep ( ) . getItsId ( ) ) ; pRequestData . setCookieValue ( "decGrSep" , lpf . getDecimalGroupSep ( ) . getItsId ( ) ) ; if ( pIsFirstReq || isDbgSh ) { this . logger . debug ( null , HndlI18nRequest . class , "Cookie is set lang/decSep/decGrSep: " + lpf . getLang ( ) . getItsId ( ) + "/" + lpf . getDecimalSep ( ) . getItsId ( ) + "/" + lpf . getDecimalGroupSep ( ) . getItsId ( ) ) ; } } return lpf ;
public class GatewayMicroService { /** * The cache store component . */ protected void registerCacheStoreComponent ( ) { } }
String componentPropName = GatewayConfigProperties . COMPONENT_PREFIX + ICacheStoreComponent . class . getSimpleName ( ) ; setConfigProperty ( componentPropName , ESCacheStoreComponent . class . getName ( ) ) ; setConfigProperty ( componentPropName + ".client.type" , "jest" ) ; setConfigProperty ( componentPropName + ".client.protocol" , "${apiman.es.protocol}" ) ; setConfigProperty ( componentPropName + ".client.host" , "${apiman.es.host}" ) ; setConfigProperty ( componentPropName + ".client.port" , "${apiman.es.port}" ) ; setConfigProperty ( componentPropName + ".client.index" , "apiman_cache" ) ; setConfigProperty ( componentPropName + ".client.username" , "${apiman.es.username}" ) ; setConfigProperty ( componentPropName + ".client.password" , "${apiman.es.password}" ) ;
public class CLI { /** * Main method to do Named Entity tagging . * @ param inputStream * the input stream containing the content to tag * @ param outputStream * the output stream providing the named entities * @ throws IOException * exception if problems in input or output streams * @ throws JDOMException * if xml formatting problems */ public final void annotate ( final InputStream inputStream , final OutputStream outputStream ) throws IOException , JDOMException { } }
BufferedReader breader = new BufferedReader ( new InputStreamReader ( inputStream , UTF_8 ) ) ; BufferedWriter bwriter = new BufferedWriter ( new OutputStreamWriter ( outputStream , UTF_8 ) ) ; // read KAF document from inputstream KAFDocument kaf = KAFDocument . createFromStream ( breader ) ; // load parameters into a properties String model = parsedArguments . getString ( MODEL ) ; String outputFormat = parsedArguments . getString ( "outputFormat" ) ; String lexer = parsedArguments . getString ( "lexer" ) ; String dictTag = parsedArguments . getString ( "dictTag" ) ; String dictPath = parsedArguments . getString ( "dictPath" ) ; String clearFeatures = parsedArguments . getString ( "clearFeatures" ) ; // language parameter String lang = null ; if ( parsedArguments . getString ( "language" ) != null ) { lang = parsedArguments . getString ( "language" ) ; if ( ! kaf . getLang ( ) . equalsIgnoreCase ( lang ) ) { System . err . println ( "Language parameter in NAF and CLI do not match!!" ) ; } } else { lang = kaf . getLang ( ) ; } Properties properties = setAnnotateProperties ( model , lang , lexer , dictTag , dictPath , clearFeatures ) ; KAFDocument . LinguisticProcessor newLp = kaf . addLinguisticProcessor ( "entities" , IXA_PIPE_NERC + Files . getNameWithoutExtension ( model ) , version + "-" + commit ) ; newLp . setBeginTimestamp ( ) ; Annotate annotator = new Annotate ( properties ) ; annotator . annotateNEsToKAF ( kaf ) ; newLp . setEndTimestamp ( ) ; String kafToString = null ; if ( outputFormat . equalsIgnoreCase ( "conll03" ) ) { kafToString = annotator . annotateNEsToCoNLL2003 ( kaf ) ; } else if ( outputFormat . equalsIgnoreCase ( "conll02" ) ) { kafToString = annotator . annotateNEsToCoNLL2002 ( kaf ) ; } else if ( outputFormat . equalsIgnoreCase ( "opennlp" ) ) { kafToString = annotator . annotateNEsToOpenNLP ( kaf ) ; } else { kafToString = kaf . toString ( ) ; } bwriter . write ( kafToString ) ; bwriter . close ( ) ; breader . close ( ) ;
public class OmemoStore { /** * Generate ' count ' new PreKeys beginning with id ' startId ' . * These preKeys are published and can be used by contacts to establish sessions with us . * @ param startId start id * @ param count how many keys do we want to generate * @ return Map of new preKeys */ public TreeMap < Integer , T_PreKey > generateOmemoPreKeys ( int startId , int count ) { } }
return keyUtil ( ) . generateOmemoPreKeys ( startId , count ) ;
public class SyslogChecker { /** * synchronized so it does one check for all involved threads . */ public synchronized void checkAndWaitForSyslogSynchronized ( Object sourceAttribute , String schema , MithraDatabaseObject databaseObject ) { } }
long now = System . currentTimeMillis ( ) ; if ( now > nextTimeToCheck ) { this . checkAndWaitForSyslog ( sourceAttribute , schema , databaseObject ) ; }
public class BaseMapServlet { /** * Send an error to the client with an exception . * @ param httpServletResponse the http response to send the error to * @ param e the error that occurred */ protected final void error ( final HttpServletResponse httpServletResponse , final Throwable e ) { } }
httpServletResponse . setContentType ( "text/plain" ) ; httpServletResponse . setStatus ( HttpStatus . INTERNAL_SERVER_ERROR . value ( ) ) ; try ( PrintWriter out = httpServletResponse . getWriter ( ) ) { out . println ( "Error while processing request:" ) ; LOGGER . error ( "Error while processing request" , e ) ; } catch ( IOException ex ) { throw ExceptionUtils . getRuntimeException ( ex ) ; }
public class DestinationManager { /** * Gets the link destination from the set of destinations * @ param linkName * @ return */ public final LinkHandler getLink ( String linkName ) { } }
LinkTypeFilter filter = new LinkTypeFilter ( ) ; return ( LinkHandler ) linkIndex . findByName ( linkName , filter ) ;
public class MurckoFragmenter { /** * This returns the frameworks and ring systems from a Murcko fragmentation . * To get frameworks , ring systems and side chains seperately , use the * respective functions * @ return a String [ ] of the fragments . * @ see # getRingSystems ( ) * @ see # getRingSystemsAsContainers ( ) * @ see # getFrameworks ( ) * @ see # getFrameworksAsContainers ( ) */ @ Override public String [ ] getFragments ( ) { } }
List < String > allfrags = new ArrayList < String > ( ) ; allfrags . addAll ( getSmilesFromAtomContainers ( frameMap . values ( ) ) ) ; allfrags . addAll ( getSmilesFromAtomContainers ( ringMap . values ( ) ) ) ; return allfrags . toArray ( new String [ ] { } ) ;
public class Instrumented { /** * Updates a timer only if it is defined . * @ param timer an Optional & lt ; { @ link com . codahale . metrics . Timer } & gt ; * @ param duration * @ param unit */ public static void updateTimer ( Optional < Timer > timer , final long duration , final TimeUnit unit ) { } }
timer . transform ( new Function < Timer , Timer > ( ) { @ Override public Timer apply ( @ Nonnull Timer input ) { input . update ( duration , unit ) ; return input ; } } ) ;
public class RouteTablesInner { /** * Create or updates a route table in a specified resource group . * @ param resourceGroupName The name of the resource group . * @ param routeTableName The name of the route table . * @ param parameters Parameters supplied to the create or update route table operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RouteTableInner > createOrUpdateAsync ( String resourceGroupName , String routeTableName , RouteTableInner parameters , final ServiceCallback < RouteTableInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , routeTableName , parameters ) , serviceCallback ) ;
public class Strings { /** * Substitute sub - strings in side of a string . * @ param buff Stirng buffer to use for substitution ( buffer is not reset ) * @ param from String to substitute from * @ param to String to substitute to * @ param string String to look for from in * @ return Substituted string */ public static String subst ( final StringBuffer buff , final String from , final String to , final String string ) { } }
int begin = 0 , end = 0 ; while ( ( end = string . indexOf ( from , end ) ) != - 1 ) { // append the first part of the string buff . append ( string . substring ( begin , end ) ) ; // append the replaced string buff . append ( to ) ; // update positions begin = end + from . length ( ) ; end = begin ; } // append the rest of the string buff . append ( string . substring ( begin , string . length ( ) ) ) ; return buff . toString ( ) ;
public class IntegrationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Integration integration , ProtocolMarshaller protocolMarshaller ) { } }
if ( integration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( integration . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( integration . getHttpMethod ( ) , HTTPMETHOD_BINDING ) ; protocolMarshaller . marshall ( integration . getUri ( ) , URI_BINDING ) ; protocolMarshaller . marshall ( integration . getConnectionType ( ) , CONNECTIONTYPE_BINDING ) ; protocolMarshaller . marshall ( integration . getConnectionId ( ) , CONNECTIONID_BINDING ) ; protocolMarshaller . marshall ( integration . getCredentials ( ) , CREDENTIALS_BINDING ) ; protocolMarshaller . marshall ( integration . getRequestParameters ( ) , REQUESTPARAMETERS_BINDING ) ; protocolMarshaller . marshall ( integration . getRequestTemplates ( ) , REQUESTTEMPLATES_BINDING ) ; protocolMarshaller . marshall ( integration . getPassthroughBehavior ( ) , PASSTHROUGHBEHAVIOR_BINDING ) ; protocolMarshaller . marshall ( integration . getContentHandling ( ) , CONTENTHANDLING_BINDING ) ; protocolMarshaller . marshall ( integration . getTimeoutInMillis ( ) , TIMEOUTINMILLIS_BINDING ) ; protocolMarshaller . marshall ( integration . getCacheNamespace ( ) , CACHENAMESPACE_BINDING ) ; protocolMarshaller . marshall ( integration . getCacheKeyParameters ( ) , CACHEKEYPARAMETERS_BINDING ) ; protocolMarshaller . marshall ( integration . getIntegrationResponses ( ) , INTEGRATIONRESPONSES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GetMetricStatisticsResult { /** * The data points for the specified metric . * @ return The data points for the specified metric . */ public java . util . List < Datapoint > getDatapoints ( ) { } }
if ( datapoints == null ) { datapoints = new com . amazonaws . internal . SdkInternalList < Datapoint > ( ) ; } return datapoints ;
public class MongoDBUtils { /** * Gets the compound key columns . * @ param m * the m * @ param id * the id * @ param compoundKey * the compound key * @ param metaModel * @ return the compound key columns */ public static BasicDBObject getCompoundKeyColumns ( EntityMetadata m , Object id , EmbeddableType compoundKey , MetamodelImpl metaModel ) { } }
BasicDBObject compoundKeyObj = new BasicDBObject ( ) ; Set < Attribute > attribs = compoundKey . getDeclaredAttributes ( ) ; Field [ ] fields = m . getIdAttribute ( ) . getBindableJavaType ( ) . getDeclaredFields ( ) ; // To ensure order . for ( Attribute attr : attribs ) { Field f = ( Field ) attr . getJavaMember ( ) ; if ( ! ReflectUtils . isTransientOrStatic ( f ) ) { if ( f . getAnnotation ( Embedded . class ) != null ) { EmbeddableType emb = metaModel . embeddable ( f . getType ( ) ) ; Object val = PropertyAccessorHelper . getObject ( id , f ) ; BasicDBObject dbVal = getCompoundKeyColumns ( m , val , emb , metaModel ) ; compoundKeyObj . put ( ( ( AbstractAttribute ) attr ) . getJPAColumnName ( ) , dbVal ) ; } else { compoundKeyObj . put ( ( ( AbstractAttribute ) attr ) . getJPAColumnName ( ) , populateValue ( PropertyAccessorHelper . getObject ( id , ( Field ) attr . getJavaMember ( ) ) , ( ( AbstractAttribute ) attr ) . getBindableJavaType ( ) ) ) ; } } } return compoundKeyObj ;
public class Humanize { /** * Returns a { @ link MaskFormat } instance for the current thread . * @ param mask * The pattern mask * @ return a { @ link MaskFormat } instance */ public static MaskFormat maskFormat ( final String mask ) { } }
MaskFormat maskFmt = context . get ( ) . getMaskFormat ( ) ; maskFmt . setMask ( mask ) ; return maskFmt ;
public class Notification { /** * When a notification is sent out when a metric violates the trigger threshold , set this notification active for that trigger , metric combination * @ param triggerThe Trigger that caused this notification * @ param metricThe metric that caused this notification * @ param active Whether to set the notification to active */ public void setActiveForTriggerAndMetric ( Trigger trigger , Metric metric , boolean active ) { } }
String key = _hashTriggerAndMetric ( trigger , metric ) ; this . activeStatusByTriggerAndMetric . put ( key , active ) ;
public class ZWaveMultiLevelSensorCommandClass { /** * { @ inheritDoc } */ @ Override public void handleApplicationCommandRequest ( SerialMessage serialMessage , int offset , int endpoint ) { } }
logger . trace ( "Handle Message Sensor Multi Level Request" ) ; logger . debug ( String . format ( "Received Sensor Multi Level Request for Node ID = %d" , this . getNode ( ) . getNodeId ( ) ) ) ; int command = serialMessage . getMessagePayloadByte ( offset ) ; switch ( command ) { case SENSOR_MULTI_LEVEL_GET : case SENSOR_MULTI_LEVEL_SUPPORTED_GET : case SENSOR_MULTI_LEVEL_SUPPORTED_REPORT : logger . warn ( String . format ( "Command 0x%02X not implemented." , command ) ) ; return ; case SENSOR_MULTI_LEVEL_REPORT : logger . trace ( "Process Multi Level Sensor Report" ) ; logger . debug ( String . format ( "Sensor Multi Level report from nodeId = %d" , this . getNode ( ) . getNodeId ( ) ) ) ; // TODO : sensor type not used for anything currently // TODO : should extend to support filtering of sensor results based on type int sensorTypeCode = serialMessage . getMessagePayloadByte ( offset + 1 ) ; logger . debug ( String . format ( "Sensor Type = (0x%02x)" , sensorTypeCode ) ) ; byte [ ] valueData = Arrays . copyOfRange ( serialMessage . getMessagePayload ( ) , offset + 2 , serialMessage . getMessagePayload ( ) . length ) ; BigDecimal value = extractValue ( valueData ) ; ZWaveEvent zEvent = new ZWaveEvent ( ZWaveEventType . SENSOR_EVENT , this . getNode ( ) . getNodeId ( ) , endpoint , value ) ; this . getController ( ) . notifyEventListeners ( zEvent ) ; break ; default : logger . warn ( String . format ( "Unsupported Command 0x%02X for command class %s (0x%02X)." , command , this . getCommandClass ( ) . getLabel ( ) , this . getCommandClass ( ) . getKey ( ) ) ) ; }
public class RadixTreeImpl { /** * recursively visit the tree based on the supplied " key " . calls the Visitor * for the node those key matches the given prefix * @ param prefix * The key o prefix to search in the tree * @ param visitor * The Visitor that will be called if a node with " key " as its * key is found * @ param node * The Node from where onward to search */ private < R > void visit ( String prefix , Visitor < T , R > visitor , RadixTreeNode < T > parent , RadixTreeNode < T > node ) { } }
int numberOfMatchingCharacters = node . getNumberOfMatchingCharacters ( prefix ) ; // if the node key and prefix match , we found a match ! if ( numberOfMatchingCharacters == prefix . length ( ) && numberOfMatchingCharacters == node . getKey ( ) . length ( ) ) { visitor . visit ( prefix , parent , node ) ; } else if ( node . getKey ( ) . equals ( "" ) == true // either we are at the // root || ( numberOfMatchingCharacters < prefix . length ( ) && numberOfMatchingCharacters >= node . getKey ( ) . length ( ) ) ) { // OR we need to // traverse the childern String newText = prefix . substring ( numberOfMatchingCharacters , prefix . length ( ) ) ; for ( RadixTreeNode < T > child : node . getChildern ( ) ) { // recursively search the child nodes if ( child . getKey ( ) . startsWith ( newText . charAt ( 0 ) + "" ) ) { visit ( newText , visitor , node , child ) ; break ; } } }
public class LibertySseEventSinkContextProvider { /** * / * ( non - Javadoc ) * @ see org . apache . cxf . jaxrs . ext . ContextProvider # createContext ( org . apache . cxf . message . Message ) */ @ Override public SseEventSink createContext ( Message message ) { } }
MessageBodyWriter < OutboundSseEvent > writer = new OutboundSseEventBodyWriter ( ServerProviderFactory . getInstance ( message ) , message . getExchange ( ) ) ; LibertySseEventSinkImpl impl = new LibertySseEventSinkImpl ( writer , message ) ; message . put ( SseEventSink . class . getName ( ) , impl ) ; // treat SSE methods like async methods AsyncResponseImpl ar = new AsyncResponseImpl ( message ) ; message . put ( AsyncResponseImpl . class . getName ( ) , ar ) ; return impl ;
public class ClassReflectionIndex { /** * Get a collection of methods declared on this object . * @ return the ( possibly empty ) collection of all declared methods */ public Collection < Method > getMethods ( ) { } }
final Collection < Method > methods = new ArrayList < Method > ( ) ; for ( Map . Entry < String , Map < ParamList , Map < Class < ? > , Method > > > entry : this . methods . entrySet ( ) ) { final Map < ParamList , Map < Class < ? > , Method > > nameMap = entry . getValue ( ) ; for ( Map < Class < ? > , Method > map : nameMap . values ( ) ) { methods . addAll ( map . values ( ) ) ; } } return methods ;
public class ProtoParser { /** * Reads an rpc and returns it . */ private RpcElement readRpc ( Location location , String documentation ) { } }
String name = reader . readName ( ) ; reader . require ( '(' ) ; boolean requestStreaming = false ; String requestType ; String word = reader . readWord ( ) ; if ( word . equals ( "stream" ) ) { requestStreaming = true ; requestType = reader . readDataType ( ) ; } else { requestType = reader . readDataType ( word ) ; } reader . require ( ')' ) ; if ( ! reader . readWord ( ) . equals ( "returns" ) ) throw reader . unexpected ( "expected 'returns'" ) ; reader . require ( '(' ) ; boolean responseStreaming = false ; String responseType ; word = reader . readWord ( ) ; if ( word . equals ( "stream" ) ) { responseStreaming = true ; responseType = reader . readDataType ( ) ; } else { responseType = reader . readDataType ( word ) ; } reader . require ( ')' ) ; ImmutableList . Builder < OptionElement > options = ImmutableList . builder ( ) ; if ( reader . peekChar ( '{' ) ) { while ( true ) { String rpcDocumentation = reader . readDocumentation ( ) ; if ( reader . peekChar ( '}' ) ) { break ; } Object declared = readDeclaration ( rpcDocumentation , Context . RPC ) ; if ( declared instanceof OptionElement ) { options . add ( ( OptionElement ) declared ) ; } } } else { reader . require ( ';' ) ; } return new RpcElement ( location , name , documentation , requestType , responseType , requestStreaming , responseStreaming , options . build ( ) ) ;
public class Logging { /** * Log an exception at the ' severe ' level . * @ param e Exception */ public void exception ( Throwable e ) { } }
final String msg = e . getMessage ( ) ; log ( Level . SEVERE , msg != null ? msg : "An exception occurred." , e ) ;
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param labelPosition position of this element reported to label * @ param < T > the element which calls this method * @ return this element * @ see < a href = " http : / / www . w3schools . com / xpath / xpath _ axes . asp " > http : / / www . w3schools . com / xpath / xpath _ axes . asp " < / a > */ @ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setLabelPosition ( final String labelPosition ) { } }
this . labelPosition = labelPosition ; return ( T ) this ;
public class DefaultOpenAPIModelFilter { /** * { @ inheritDoc } */ @ Override public Callback visitCallback ( Context context , String key , Callback callback ) { } }
return callback ;
public class CmsSliderBar { /** * Set overlay ' s opacity . < p > * @ param alpha An opacity percentage , between 100 ( fully opaque ) and 0 ( invisible ) . * @ param layer which bar to change opacity for , 1-4 */ public void setLayerOpacity ( int alpha , int layer ) { } }
if ( ( alpha >= 0 ) && ( alpha <= 100 ) && isAttached ( ) ) { Element colorbar ; switch ( layer ) { case COLORBAR_A : colorbar = m_colorA . getElement ( ) ; break ; case COLORBAR_B : colorbar = m_colorB . getElement ( ) ; break ; case COLORBAR_C : colorbar = m_colorC . getElement ( ) ; break ; case COLORBAR_D : colorbar = m_colorD . getElement ( ) ; break ; default : return ; } colorbar . getStyle ( ) . setOpacity ( ( 1.0 * alpha ) / 100 ) ; CmsTransparencyImpl . setTransparency ( colorbar , alpha ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TrackType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link TrackType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/transportation/1.0" , name = "Track" , substitutionHeadNamespace = "http://www.opengis.net/citygml/transportation/1.0" , substitutionHeadName = "TransportationComplex" ) public JAXBElement < TrackType > createTrack ( TrackType value ) { } }
return new JAXBElement < TrackType > ( _Track_QNAME , TrackType . class , null , value ) ;
public class FunctionInformationMap { /** * < code > repeated group Module = 101 { . . . } < / code > */ public com . google . javascript . jscomp . FunctionInformationMap . Module getModule ( int index ) { } }
return module_ . get ( index ) ;
public class PasswordEncryptor { /** * Hash and hex password with the given salt . * @ param password * the password * @ param salt * the salt * @ return the generated { @ link String } object * @ throws NoSuchAlgorithmException * is thrown if instantiation of the MessageDigest object fails . * @ throws UnsupportedEncodingException * is thrown by get the byte array of the private key String object fails . * @ throws NoSuchPaddingException * is thrown if instantiation of the cypher object fails . * @ throws InvalidKeyException * the invalid key exception is thrown if initialization of the cypher object fails . * @ throws BadPaddingException * is thrown if { @ link Cipher # doFinal ( byte [ ] ) } fails . * @ throws IllegalBlockSizeException * is thrown if { @ link Cipher # doFinal ( byte [ ] ) } fails . * @ throws InvalidAlgorithmParameterException * is thrown if initialization of the cypher object fails . * @ throws InvalidKeySpecException * is thrown if generation of the SecretKey object fails . */ public String hashAndHexPassword ( final String password , final String salt ) throws NoSuchAlgorithmException , InvalidKeyException , UnsupportedEncodingException , NoSuchPaddingException , IllegalBlockSizeException , BadPaddingException , InvalidKeySpecException , InvalidAlgorithmParameterException { } }
return hashAndHexPassword ( password , salt , DEFAULT_ALGORITHM , DEFAULT_CHARSET ) ;
public class DefaultStreamTokenizer { /** * Returns all tokens as list of Strings * @ return List of tokens */ @ Override public List < String > getTokens ( ) { } }
// List < String > tokens = new ArrayList < > ( ) ; if ( ! tokens . isEmpty ( ) ) return tokens ; log . info ( "Starting prebuffering..." ) ; while ( streamHasMoreTokens ( ) ) { tokens . add ( nextTokenFromStream ( ) ) ; } log . info ( "Tokens prefetch finished. Tokens size: [" + tokens . size ( ) + "]" ) ; return tokens ;
public class BoardsApi { /** * Get a Pager of all issue boards for the specified project . * < pre > < code > GitLab Endpoint : GET / projects / : id / boards < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param itemsPerPage the number of items per page * @ return a Pager of project ' s issue boards * @ throws GitLabApiException if any exception occurs */ public Pager < Board > getBoards ( Object projectIdOrPath , int itemsPerPage ) throws GitLabApiException { } }
return ( new Pager < Board > ( this , Board . class , itemsPerPage , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "boards" ) ) ;
public class CreateFlowLogsResult { /** * Information about the flow logs that could not be created successfully . * @ return Information about the flow logs that could not be created successfully . */ public java . util . List < UnsuccessfulItem > getUnsuccessful ( ) { } }
if ( unsuccessful == null ) { unsuccessful = new com . amazonaws . internal . SdkInternalList < UnsuccessfulItem > ( ) ; } return unsuccessful ;
public class AbstractSARLBasedClasspathContainer { /** * Replies the list of the symbolic names of the bundle dependencies . * @ return the bundle symbolic names of the dependencies . */ public final Set < String > getBundleDependencies ( ) { } }
final Set < String > bundles = new TreeSet < > ( ) ; updateBundleList ( bundles ) ; return bundles ;
public class ValueFactory { /** * 处理的数据是集合类型 * < p > Function : createValueList < / p > * < p > Description : < / p > * @ author acexy @ thankjava . com * @ date 2015-1-27 下午4:59:12 * @ version 1.0 * @ param targetFieldType * @ param targetObject * @ param originValue * @ return * @ throws ClassNotFoundException */ static Object createValueList ( Field targetField , Class < ? > targetFieldType , Object targetObject , Object originValue ) { } }
if ( originValue == null ) { return null ; } List < ? > originList = ( List < ? > ) originValue ; if ( originList . size ( ) == 0 ) { return null ; } Class < ? > proxyType = originList . get ( 0 ) . getClass ( ) ; // Type type = targetField . getGenericType ( ) ; // Type [ ] types = ( Type [ ] ) ReflectUtil . getFieldVal ( type , " actualTypeArguments " ) ; // System . out . println ( types ) ; // Class < ? > proxyType = null ; // try { // System . out . println ( ReflectUtil . getFieldVal ( types [ 0 ] , " name " ) . toString ( ) ) ; // proxyType = Class . forName ( ReflectUtil . getFieldVal ( types [ 0 ] , " name " ) . toString ( ) ) ; // } catch ( ClassNotFoundException e ) { // e . printStackTrace ( ) ; // return null ; List < Object > targetList = new ArrayList < Object > ( ) ; for ( Object object : originList ) { targetList . add ( ValueCast . createValueCore ( targetField , proxyType , targetList , object ) ) ; } return targetList ;
public class AbstractQueueBrowserQuery { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . network . packet . AbstractPacket # unserializeFrom ( net . timewalker . ffmq4 . utils . RawDataInputStream ) */ @ Override protected void unserializeFrom ( RawDataBuffer in ) { } }
super . unserializeFrom ( in ) ; browserId = new IntegerID ( in . readInt ( ) ) ;
public class TargetAssignmentOperations { /** * Check wether the maintenance window is valid or not * @ param maintenanceWindowLayout * the maintenance window layout * @ param notification * the UI Notification * @ return boolean if maintenance window is valid or not */ public static boolean isMaintenanceWindowValid ( final MaintenanceWindowLayout maintenanceWindowLayout , final UINotification notification ) { } }
if ( maintenanceWindowLayout . isEnabled ( ) ) { try { MaintenanceScheduleHelper . validateMaintenanceSchedule ( maintenanceWindowLayout . getMaintenanceSchedule ( ) , maintenanceWindowLayout . getMaintenanceDuration ( ) , maintenanceWindowLayout . getMaintenanceTimeZone ( ) ) ; } catch ( final InvalidMaintenanceScheduleException e ) { LOG . error ( "Maintenance window is not valid" , e ) ; notification . displayValidationError ( e . getMessage ( ) ) ; return false ; } } return true ;
public class SmjpegDecoder { /** * Optional video header : * 4 bytes magic - " _ VID " * Uint32 video header length * Uint32 number of frames * Uint16 width * Uint16 height * 4 bytes video encoding ( " JFIF " = jpeg ) */ private void decodeVideoHeader ( ) throws IOException { } }
int length = file . readInt ( ) ; file . skipBytes ( 4 ) ; // video frame count is always zero with ffmpeg encoded files . . . videoWidth = file . readUnsignedShort ( ) ; videoHeight = file . readUnsignedShort ( ) ; byte [ ] videoEncodingBytes = new byte [ 4 ] ; file . readFully ( videoEncodingBytes ) ; videoEncoding = SmjpegVideoEncoding . fromMagic ( videoEncodingBytes ) ; file . skipBytes ( Math . max ( 0 , length - 4 - 4 - 2 - 2 ) ) ;
public class GridIndex { /** * Updates the grid index to reflect changes to the triangulation . Note that added * triangles outside the indexed region will force to recompute the whole index * with the enlarged region . * @ param updatedTriangles changed triangles of the triangulation . This may be added triangles , * removed triangles or both . All that matter is that they cover the * changed area . */ public void updateIndex ( Iterator < Triangle > updatedTriangles ) { } }
// Gather the bounding box of the updated area BoundingBox updatedRegion = new BoundingBox ( ) ; while ( updatedTriangles . hasNext ( ) ) { updatedRegion = updatedRegion . unionWith ( updatedTriangles . next ( ) . getBoundingBox ( ) ) ; } if ( updatedRegion . isNull ( ) ) // No update . . . return ; // Bad news - the updated region lies outside the indexed region . // The whole index must be recalculated if ( ! indexRegion . contains ( updatedRegion ) ) { init ( indexDelaunay , ( int ) ( indexRegion . getWidth ( ) / x_size ) , ( int ) ( indexRegion . getHeight ( ) / y_size ) , indexRegion . unionWith ( updatedRegion ) ) ; } else { // Find the cell region to be updated Vector2i minInvalidCell = getCellOf ( updatedRegion . getMinPoint ( ) ) ; Vector2i maxInvalidCell = getCellOf ( updatedRegion . getMaxPoint ( ) ) ; // And update it with fresh triangles Triangle adjacentValidTriangle = findValidTriangle ( minInvalidCell ) ; updateCellValues ( minInvalidCell . getX ( ) , minInvalidCell . getY ( ) , maxInvalidCell . getX ( ) , maxInvalidCell . getY ( ) , adjacentValidTriangle ) ; }
public class AmazonEC2Client { /** * Describes one or more of your network ACLs . * For more information , see < a href = " https : / / docs . aws . amazon . com / AmazonVPC / latest / UserGuide / VPC _ ACLs . html " > Network * ACLs < / a > in the < i > Amazon Virtual Private Cloud User Guide < / i > . * @ param describeNetworkAclsRequest * @ return Result of the DescribeNetworkAcls operation returned by the service . * @ sample AmazonEC2 . DescribeNetworkAcls * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeNetworkAcls " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeNetworkAclsResult describeNetworkAcls ( DescribeNetworkAclsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeNetworkAcls ( request ) ;
public class ConfigUtils { /** * Method is used to parse hosts and ports * @ param config * @ param key * @ return */ public static List < HostAndPort > hostAndPorts ( AbstractConfig config , String key ) { } }
return hostAndPorts ( config , key , null ) ;
public class PublicKeyReader { /** * Read public key . * @ param publicKeyBytes * the public key bytes * @ param algorithm * the algorithm for the { @ link KeyFactory } * @ return the public key * @ throws NoSuchAlgorithmException * is thrown if instantiation of the cypher object fails . * @ throws InvalidKeySpecException * is thrown if generation of the SecretKey object fails . * @ throws NoSuchProviderException * is thrown if the specified provider is not registered in the security provider * list . */ public static PublicKey readPublicKey ( final byte [ ] publicKeyBytes , final String algorithm ) throws NoSuchAlgorithmException , InvalidKeySpecException , NoSuchProviderException { } }
final X509EncodedKeySpec keySpec = new X509EncodedKeySpec ( publicKeyBytes ) ; final KeyFactory keyFactory = KeyFactory . getInstance ( algorithm ) ; final PublicKey publicKey = keyFactory . generatePublic ( keySpec ) ; return publicKey ;
public class MetadataServiceInjector { /** * Returns the { @ link MetadataService } instance from the specified { @ link ServiceRequestContext } . */ public static MetadataService getMetadataService ( ServiceRequestContext ctx ) { } }
final MetadataService mds = ctx . attr ( METADATA_SERVICE_ATTRIBUTE_KEY ) . get ( ) ; if ( mds != null ) { return mds ; } throw new IllegalStateException ( "No metadata service instance exists." ) ;
public class BaseCommunicator { /** * ( non - Javadoc ) * @ see * com . taskadapter . redmineapi . internal . comm . Communicator # sendRequest ( org . apache . http * . HttpRequest ) */ @ Override public < R > R sendRequest ( HttpRequest request , ContentHandler < HttpResponse , R > handler ) throws RedmineException { } }
logger . debug ( request . getRequestLine ( ) . toString ( ) ) ; request . addHeader ( "Accept-Encoding" , "gzip" ) ; final HttpClient httpclient = client ; try { final HttpResponse httpResponse = httpclient . execute ( ( HttpUriRequest ) request ) ; try { return handler . processContent ( httpResponse ) ; } finally { EntityUtils . consume ( httpResponse . getEntity ( ) ) ; } } catch ( ClientProtocolException e1 ) { throw new RedmineFormatException ( e1 ) ; } catch ( IOException e1 ) { throw new RedmineTransportException ( "Cannot fetch data from " + getMessageURI ( request ) + " : " + e1 . toString ( ) , e1 ) ; }
public class NamesAreUniqueValidationHelper { /** * { @ inheritDoc } * The cancel indicator will be queried everytime a description has been processed . * It should provide a fast answer about its canceled state . */ @ Override public void checkUniqueNames ( Iterable < IEObjectDescription > descriptions , CancelIndicator cancelIndicator , ValidationMessageAcceptor acceptor ) { } }
Iterator < IEObjectDescription > iter = descriptions . iterator ( ) ; if ( ! iter . hasNext ( ) ) return ; Map < EClass , Map < QualifiedName , IEObjectDescription > > clusterToNames = Maps . newHashMap ( ) ; while ( iter . hasNext ( ) ) { IEObjectDescription description = iter . next ( ) ; checkDescriptionForDuplicatedName ( description , clusterToNames , acceptor ) ; operationCanceledManager . checkCanceled ( cancelIndicator ) ; }
public class ApiAbstraction { /** * Initializes an annotation class * @ param name The name of the annotation class * @ return The instance of the annotation . Returns a dummy if the class was * not found */ @ SuppressWarnings ( "unchecked" ) protected Class < ? extends Annotation > annotationTypeForName ( String name ) { } }
try { return ( Class < ? extends Annotation > ) resourceLoader . classForName ( name ) ; } catch ( ResourceLoadingException cnfe ) { return DUMMY_ANNOTATION ; }
public class TextProcessUtility { /** * 加载一个文件夹下的所有语料 * @ param path * @ return */ public static Map < String , String [ ] > loadCorpus ( String path ) { } }
Map < String , String [ ] > dataSet = new TreeMap < String , String [ ] > ( ) ; File root = new File ( path ) ; File [ ] folders = root . listFiles ( ) ; if ( folders == null ) return null ; for ( File folder : folders ) { if ( folder . isFile ( ) ) continue ; File [ ] files = folder . listFiles ( ) ; if ( files == null ) continue ; String [ ] documents = new String [ files . length ] ; for ( int i = 0 ; i < files . length ; i ++ ) { documents [ i ] = IOUtil . readTxt ( files [ i ] . getAbsolutePath ( ) ) ; } dataSet . put ( folder . getName ( ) , documents ) ; } return dataSet ;
public class EventMetadataUtils { /** * Get failure messages * @ return The concatenated failure messages from all the task states */ public static String getTaskFailureExceptions ( List < TaskState > taskStates ) { } }
StringBuffer sb = new StringBuffer ( ) ; // Add task failure messages in a group followed by task failure exceptions appendTaskStateValues ( taskStates , sb , TASK_FAILURE_MESSAGE_KEY ) ; appendTaskStateValues ( taskStates , sb , ConfigurationKeys . TASK_FAILURE_EXCEPTION_KEY ) ; return sb . toString ( ) ;
public class File { /** * Read a character . * @ exception IOException if an error occurred while accessing the file * associated with this object , or EOFException if the object * reached the end of the file */ @ JSFunction public String readChar ( ) throws IOException { } }
int i = getReader ( ) . read ( ) ; if ( i == - 1 ) return null ; char [ ] charArray = { ( char ) i } ; return new String ( charArray ) ;
public class AStar { /** * Run the A * algorithm assuming that the graph is oriented is * an orientation tool was passed to the constructor . * < p > The orientation of the graph may also be overridden by the implementations * of the { @ link AStarNode A * nodes } . * @ param startPoint is the starting point . * @ param endPoint is the point to reach . * @ return the found path , or < code > null < / code > if none found . */ @ Pure public GP solve ( PT startPoint , PT endPoint ) { } }
return solve ( node ( startPoint , 0f , estimate ( startPoint , endPoint ) , null ) , endPoint ) ;
public class SparseBitmap { /** * Computes the bit - wise logical exclusive and of two bitmaps . * @ param container * where the data will be stored * @ param bitmap1 * the first bitmap * @ param bitmap2 * the second bitmap */ public static void and2by2 ( BitmapContainer container , SparseBitmap bitmap1 , SparseBitmap bitmap2 ) { } }
int it1 = 0 ; int it2 = 0 ; int p1 = bitmap1 . buffer . get ( it1 ) , p2 = bitmap2 . buffer . get ( it2 ) ; int buff ; while ( true ) { if ( p1 < p2 ) { if ( it1 + 2 >= bitmap1 . buffer . size ( ) ) break ; it1 += 2 ; p1 += bitmap1 . buffer . get ( it1 ) + 1 ; } else if ( p1 > p2 ) { if ( it2 + 2 >= bitmap2 . buffer . size ( ) ) break ; it2 += 2 ; p2 += bitmap2 . buffer . get ( it2 ) + 1 ; } else { if ( ( buff = bitmap1 . buffer . get ( it1 + 1 ) & bitmap2 . buffer . get ( it2 + 1 ) ) != 0 ) { container . add ( buff , p1 ) ; } if ( ( it1 + 2 >= bitmap1 . buffer . size ( ) ) || ( it2 + 2 >= bitmap2 . buffer . size ( ) ) ) break ; it1 += 2 ; it2 += 2 ; p1 += bitmap1 . buffer . get ( it1 ) + 1 ; p2 += bitmap2 . buffer . get ( it2 ) + 1 ; } }
public class UnifiedResponse { /** * Sets a response header to the response according to the passed name and * value . An existing header entry with the same name is overridden . < br > * < b > ATTENTION : < / b > You should only use the APIs that { @ link UnifiedResponse } * directly offers . Use this method only in emergency and make sure you * validate the header field and allowed value ! * @ param sName * Name of the header . May neither be < code > null < / code > nor empty . * @ param sValue * Value of the header . May neither be < code > null < / code > nor empty . */ public final void setCustomResponseHeader ( @ Nonnull @ Nonempty final String sName , @ Nonnull @ Nonempty final String sValue ) { } }
ValueEnforcer . notEmpty ( sName , "Name" ) ; ValueEnforcer . notEmpty ( sValue , "Value" ) ; m_aResponseHeaderMap . setHeader ( sName , sValue ) ;
public class DisambiguateProperties { /** * Returns the type in the chain from the given type that contains the given * field or null if it is not found anywhere . * Can return a subtype of the input type . */ private ObjectType getTypeWithProperty ( String field , JSType type ) { } }
if ( type == null ) { return null ; } ObjectType foundType = gtwpCacheGet ( field , type ) ; if ( foundType != null ) { return foundType . equals ( bottomObjectType ) ? null : foundType ; } if ( type . isEnumElementType ( ) ) { foundType = getTypeWithProperty ( field , type . getEnumeratedTypeOfEnumElement ( ) ) ; gtwpCachePut ( field , type , foundType == null ? bottomObjectType : foundType ) ; return foundType ; } if ( ! type . isObjectType ( ) ) { if ( type . isBoxableScalar ( ) ) { foundType = getTypeWithProperty ( field , type . autobox ( ) ) ; gtwpCachePut ( field , type , foundType == null ? bottomObjectType : foundType ) ; return foundType ; } else { gtwpCachePut ( field , type , bottomObjectType ) ; return null ; } } // Ignore the prototype itself at all times . if ( "prototype" . equals ( field ) ) { gtwpCachePut ( field , type , bottomObjectType ) ; return null ; } // We look up the prototype chain to find the highest place ( if any ) that // this appears . This will make references to overridden properties look // like references to the initial property , so they are renamed alike . ObjectType objType = type . toMaybeObjectType ( ) ; if ( objType != null && objType . getConstructor ( ) != null && objType . getConstructor ( ) . isInterface ( ) ) { ObjectType topInterface = objType . getTopDefiningInterface ( field ) ; if ( topInterface != null && topInterface . getConstructor ( ) != null ) { foundType = topInterface . getImplicitPrototype ( ) ; } } else { while ( objType != null && ! Objects . equals ( objType . getImplicitPrototype ( ) , objType ) ) { if ( objType . hasOwnProperty ( field ) ) { foundType = objType ; } objType = objType . getImplicitPrototype ( ) ; } } // If the property does not exist on the referenced type but the original // type is an object type , see if any subtype has the property . if ( foundType == null ) { JSType subtypeWithProp = type . getGreatestSubtypeWithProperty ( field ) ; ObjectType maybeType = subtypeWithProp == null ? null : subtypeWithProp . toMaybeObjectType ( ) ; // getGreatestSubtypeWithProperty does not guarantee that the property // is defined on the returned type , it just indicates that it might be , // so we have to double check . if ( maybeType != null && maybeType . hasOwnProperty ( field ) ) { foundType = maybeType ; } } // Unwrap templatized types , they are not unique at runtime . if ( foundType != null && foundType . isGenericObjectType ( ) ) { foundType = foundType . getRawType ( ) ; } // Since disambiguation just looks at names , we must return a uniquely named type rather // than an " equivalent " type . In particular , we must manually unwrap named types // so that the returned type has the correct name . if ( foundType != null && foundType . isNamedType ( ) ) { foundType = foundType . toMaybeNamedType ( ) . getReferencedType ( ) . toMaybeObjectType ( ) ; } gtwpCachePut ( field , type , foundType == null ? bottomObjectType : foundType ) ; return foundType ;
public class HiveConvertExtractor { /** * There is only one record ( { @ link QueryBasedHiveConversionEntity } ) to be read . This { @ link QueryBasedHiveConversionEntity } is * removed from { @ link # conversionEntities } list after it is read . So when gobblin runtime calls this method the second time , it returns a null */ @ Override public QueryBasedHiveConversionEntity readRecord ( QueryBasedHiveConversionEntity reuse ) throws DataRecordException , IOException { } }
if ( this . conversionEntities . isEmpty ( ) ) { return null ; } return this . conversionEntities . remove ( 0 ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public ObjectAreaSizeSizeType createObjectAreaSizeSizeTypeFromString ( EDataType eDataType , String initialValue ) { } }
ObjectAreaSizeSizeType result = ObjectAreaSizeSizeType . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class MjdbcPoolBinder { /** * Returns new Pooled { @ link DataSource } implementation * In case this function won ' t work - use { @ link # createDataSource ( java . util . Properties ) } * @ param url Database connection url * @ return new Pooled { @ link DataSource } implementation * @ throws SQLException */ public static DataSource createDataSource ( String url ) throws SQLException { } }
assertNotNull ( url ) ; BasicDataSource ds = new BasicDataSource ( ) ; ds . setUrl ( url ) ; return ds ;
public class BugInstance { /** * Look up a property by name . * @ param name * name of the property to look for * @ return the BugProperty with the given name , or null if the property has * not been set */ public BugProperty lookupProperty ( String name ) { } }
BugProperty prop = propertyListHead ; while ( prop != null ) { if ( prop . getName ( ) . equals ( name ) ) { break ; } prop = prop . getNext ( ) ; } return prop ;
public class CClassLoader { /** * initialize the loaders hierarchy * @ param config * the loaders config object */ public static final void init ( final CClassLoaderConfig config ) { } }
CClassLoader . installURLStreamHandlerFactory ( ) ; if ( CClassLoader . getRootLoader ( ) . isInit ( ) ) { return ; } // initialize the loaders default parameters . for ( final Iterator it = config . getLoadersInfoMap ( ) . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { final Map . Entry entry = ( Map . Entry ) it . next ( ) ; final String loaderPath = ( String ) entry . getKey ( ) ; final CClassLoaderConfig . CLoaderInfo info = ( CClassLoaderConfig . CLoaderInfo ) entry . getValue ( ) ; final CClassLoader loader = CClassLoader . getLoader ( loaderPath ) ; loader . config = config ; loader . booAlone = info . isAlone ( ) ; loader . booDoNotForwardToParent = info . isDoNotForwardToParent ( ) ; loader . booMandatory = info . isMandatory ( ) ; if ( loader . isMandatory ( ) ) { CClassLoader . mandatoryLoadersMap . put ( loader . getPath ( ) , loader ) ; } loader . booResourceOnly = info . isResourceOnly ( ) ; } // lookup classes / resources in filesystem for ( final Iterator it = config . getFilesMap ( ) . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { final Map . Entry entry = ( Map . Entry ) it . next ( ) ; final String loaderPath = ( String ) entry . getKey ( ) ; final List list = ( List ) entry . getValue ( ) ; final CClassLoader loader = CClassLoader . getLoader ( loaderPath ) ; for ( final Iterator f = list . iterator ( ) ; f . hasNext ( ) ; ) { final Object obj = f . next ( ) ; if ( obj instanceof URL ) { final URL file = ( URL ) obj ; String name = file . toString ( ) ; final int index = name . indexOf ( loaderPath ) ; if ( index != - 1 ) { name = name . substring ( index + loaderPath . length ( ) ) ; } if ( name . endsWith ( ".jar" ) ) { loader . addResource ( name , file ) ; loader . readDirectories ( file ) ; } else { loader . addResource ( name , file ) ; if ( ! loader . booResourceOnly && name . endsWith ( ".class" ) ) { name = name . substring ( 0 , name . lastIndexOf ( '.' ) ) ; name = name . replace ( '\\' , '/' ) ; name = name . replace ( '/' , '.' ) ; loader . addClass ( name , file ) ; } else if ( name . startsWith ( "native/" ) ) { String system = name . substring ( 7 ) ; system = system . substring ( 0 , system . indexOf ( '/' ) ) ; if ( ! loader . dllMap . containsKey ( system ) ) { loader . dllMap . put ( system , file ) ; } if ( ! loader . resourcesMap . containsKey ( name ) ) { loader . resourcesMap . put ( name , file ) ; } else { final Object to = loader . resourcesMap . get ( name ) ; if ( to instanceof URL ) { final URL uo = ( URL ) to ; final List l = new ArrayList ( ) ; l . add ( uo ) ; l . add ( file ) ; loader . resourcesMap . put ( name , l ) ; } else if ( to instanceof List ) { final List uo = ( List ) to ; uo . add ( file ) ; loader . resourcesMap . put ( name , uo ) ; } } } } } } } // set init state CClassLoader . setInit ( CClassLoader . getRootLoader ( ) ) ;
public class StringUtil { /** * Compares two strings * Strings will be uppercased in english and compared * equivalent to s1 . equalsIgnoreCase ( s2) * throws NPE if s1 is null * @ param s1 first string to compare * @ param s2 second string to compare * @ return true if the two upppercased ENGLISH values are equal * return false if s2 is null */ public static boolean SQLEqualsIgnoreCase ( String s1 , String s2 ) { } }
if ( s2 == null ) { return false ; } else { return SQLToUpperCase ( s1 ) . equals ( SQLToUpperCase ( s2 ) ) ; }
public class PropertyWriteUtil { /** * Writes property attributes into XML . * @ param xmlStreamWriter XML writer * @ param property property * @ throws XMLStreamException { @ link XMLStreamException } */ public static void writeAttributes ( XMLStreamWriter xmlStreamWriter , HierarchicalProperty property ) throws XMLStreamException { } }
Map < String , String > attributes = property . getAttributes ( ) ; Iterator < String > keyIter = attributes . keySet ( ) . iterator ( ) ; while ( keyIter . hasNext ( ) ) { String attrName = keyIter . next ( ) ; String attrValue = attributes . get ( attrName ) ; xmlStreamWriter . writeAttribute ( attrName , attrValue ) ; }
public class ParadataManager { /** * Return all paradata for a resource of all types from other submitters for the resource * @ param resourceUrl * @ return * @ throws Exception */ public List < ISubmission > getExternalSubmissions ( String resourceUrl ) throws Exception { } }
return new SubmitterSubmissionsFilter ( ) . omit ( getSubmissions ( resourceUrl ) , submitter ) ;
public class MappedParametrizedObjectEntry { /** * Parse named parameter as Integer . * @ param name * parameter name * @ param defaultValue * default Integer value * @ return Integer value */ public Integer getParameterInteger ( String name , Integer defaultValue ) { } }
String value = getParameterValue ( name , null ) ; if ( value != null ) { try { return StringNumberParser . parseInt ( value ) ; } catch ( NumberFormatException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } } return defaultValue ;
public class SasFileParser { /** * The function to convert an array of bytes with any order of bytes into { @ link ByteBuffer } . * { @ link ByteBuffer } has the order of bytes defined in the file located at the * { @ link SasFileConstants # ALIGN _ 2 _ OFFSET } offset . * Later the parser converts result { @ link ByteBuffer } into a number . * @ param data the input array of bytes with the little - endian or big - endian order . * @ return { @ link ByteBuffer } with the order of bytes defined in the file located at * the { @ link SasFileConstants # ALIGN _ 2 _ OFFSET } offset . */ private ByteBuffer byteArrayToByteBuffer ( byte [ ] data ) { } }
ByteBuffer byteBuffer = ByteBuffer . wrap ( data ) ; if ( sasFileProperties . getEndianness ( ) == 0 ) { return byteBuffer ; } else { return byteBuffer . order ( ByteOrder . LITTLE_ENDIAN ) ; }
public class BottomSheet { /** * Adapts the width of the bottom sheet . */ private void adaptWidth ( ) { } }
adapter . setWidth ( width ) ; if ( rootView != null ) { rootView . setWidth ( width ) ; rootView . requestLayout ( ) ; }
public class CounterShardData { /** * Helper method to compute the shard number index from an instance of { @ link Key } of type { @ link CounterShardData } . * This method assumes that the " name " field of a counter shard key will end in a dash , followed by the shard * index . * @ param counterShardDataKey * @ return */ public static Integer computeShardIndex ( final Key < CounterShardData > counterShardDataKey ) { } }
Preconditions . checkNotNull ( counterShardDataKey ) ; final String key = counterShardDataKey . getName ( ) ; int lastDashIndex = key . lastIndexOf ( "-" ) ; final String shardIndexAsString = key . substring ( ( lastDashIndex + 1 ) , key . length ( ) ) ; return Integer . valueOf ( shardIndexAsString ) ;
public class ImageUtil { /** * Creates and returns a new image consisting of the supplied image traced with the given * color , thickness and alpha transparency . */ public static BufferedImage createTracedImage ( ImageCreator isrc , BufferedImage src , Color tcolor , int thickness , float startAlpha , float endAlpha ) { } }
// create the destination image int wid = src . getWidth ( ) , hei = src . getHeight ( ) ; BufferedImage dest = isrc . createImage ( wid , hei , Transparency . TRANSLUCENT ) ; return createTracedImage ( src , dest , tcolor , thickness , startAlpha , endAlpha ) ;
public class TableInfo { /** * Return true if this table information has a field with this columnName as set by * { @ link DatabaseField # columnName ( ) } or the field name if not set . */ public boolean hasColumnName ( String columnName ) { } }
for ( FieldType fieldType : fieldTypes ) { if ( fieldType . getColumnName ( ) . equals ( columnName ) ) { return true ; } } return false ;
public class MysqldProcess { /** * Work - around to get Executable in hooks where it ' s not provided and as * all init is done in base class constructor , local vars are still not * initialized : / */ private MysqldExecutable getExecutable ( ) { } }
try { Field f = AbstractProcess . class . getDeclaredField ( "executable" ) ; f . setAccessible ( true ) ; return ( MysqldExecutable ) f . get ( this ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class ParserDDL { /** * Responsible for handling the creation of table columns during the process * of executing CREATE TABLE or ADD COLUMN etc . statements . * @ param table this table * @ param hsqlName column name * @ param constraintList list of constraints * @ return a Column object with indicated attributes */ ColumnSchema readColumnDefinitionOrNull ( Table table , HsqlName hsqlName , HsqlArrayList constraintList ) { } }
boolean isIdentity = false ; boolean isPKIdentity = false ; boolean identityAlways = false ; Expression generateExpr = null ; boolean isNullable = true ; Expression defaultExpr = null ; Type typeObject ; NumberSequence sequence = null ; if ( token . tokenType == Tokens . IDENTITY ) { read ( ) ; isIdentity = true ; isPKIdentity = true ; typeObject = Type . SQL_INTEGER ; sequence = new NumberSequence ( null , 0 , 1 , typeObject ) ; } else if ( token . tokenType == Tokens . COMMA ) { ; return null ; } else { typeObject = readTypeDefinition ( true ) ; } if ( isIdentity ) { } else if ( token . tokenType == Tokens . DEFAULT ) { read ( ) ; defaultExpr = readDefaultClause ( typeObject ) ; } else if ( token . tokenType == Tokens . GENERATED && ! isIdentity ) { read ( ) ; if ( token . tokenType == Tokens . BY ) { read ( ) ; readThis ( Tokens . DEFAULT ) ; } else { readThis ( Tokens . ALWAYS ) ; identityAlways = true ; } readThis ( Tokens . AS ) ; if ( token . tokenType == Tokens . IDENTITY ) { read ( ) ; sequence = new NumberSequence ( null , typeObject ) ; sequence . setAlways ( identityAlways ) ; if ( token . tokenType == Tokens . OPENBRACKET ) { read ( ) ; readSequenceOptions ( sequence , false , false ) ; readThis ( Tokens . CLOSEBRACKET ) ; } isIdentity = true ; } else if ( token . tokenType == Tokens . OPENBRACKET ) { read ( ) ; generateExpr = XreadValueExpression ( ) ; readThis ( Tokens . CLOSEBRACKET ) ; } } ColumnSchema column = new ColumnSchema ( hsqlName , typeObject , isNullable , false , defaultExpr ) ; readColumnConstraints ( table , column , constraintList ) ; if ( token . tokenType == Tokens . IDENTITY && ! isIdentity ) { read ( ) ; isIdentity = true ; isPKIdentity = true ; sequence = new NumberSequence ( null , 0 , 1 , typeObject ) ; } if ( isIdentity ) { column . setIdentity ( sequence ) ; } if ( isPKIdentity && ! column . isPrimaryKey ( ) ) { OrderedHashSet set = new OrderedHashSet ( ) ; set . add ( column . getName ( ) . name ) ; HsqlName constName = database . nameManager . newAutoName ( "PK" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . CONSTRAINT ) ; Constraint c = new Constraint ( constName , true , set , Constraint . PRIMARY_KEY ) ; constraintList . set ( 0 , c ) ; column . setPrimaryKey ( true ) ; } return column ;
public class SimpleMessageFormatter { /** * TODO : Factor out this logic more to allow subclasses to easily support other types . */ private static void appendFormatted ( StringBuilder out , Object value , FormatChar format , FormatOptions options ) { } }
// Fast path switch statement for commonest cases ( we could handle upper - case as a post // processing step but it ' s so uncommon it doesn ' t seem worth it ) . switch ( format ) { case STRING : // String formatting is by far and away the most common case . if ( ! ( value instanceof Formattable ) ) { if ( options . isDefault ( ) ) { // % s on a non - Formattable instance is the single most common case by far . out . append ( safeToString ( value ) ) ; return ; } break ; } // Rare but easy to deal with efficiently , and a can support wrapped arguments nicely . safeFormatTo ( ( Formattable ) value , out , options ) ; return ; // Some other types are really easy when they don ' t have special format options . case DECIMAL : case BOOLEAN : if ( options . isDefault ( ) ) { out . append ( value ) ; return ; } break ; case HEX : // Check that if the format options are compatible with " easy " hex formatting . This could // be expanded to include width , radix and zero padding ( relatively common for hex ) . if ( options . filter ( FLAG_UPPER_CASE , false , false ) . equals ( options ) ) { // Having called canFormat ( ) , we know the value must be a Number . appendHex ( out , ( Number ) value , options ) ; return ; } break ; case CHAR : if ( options . isDefault ( ) ) { if ( value instanceof Character ) { out . append ( value ) ; return ; } int codePoint = ( ( Number ) value ) . intValue ( ) ; if ( Character . isBmpCodePoint ( codePoint ) ) { out . append ( ( char ) codePoint ) ; return ; } out . append ( Character . toChars ( codePoint ) ) ; return ; } break ; default : // Fall through . } // Default handle for rare cases that need non - tivial formatting . String formatString = format . getDefaultFormatString ( ) ; if ( ! options . isDefault ( ) ) { char chr = format . getChar ( ) ; if ( options . shouldUpperCase ( ) ) { // Clear 6th bit to convert lower case ASCII to upper case . chr &= ( char ) ~ 0x20 ; } formatString = options . appendPrintfOptions ( new StringBuilder ( "%" ) ) . append ( chr ) . toString ( ) ; } out . append ( String . format ( FORMAT_LOCALE , formatString , value ) ) ;