signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BosClient { /** * Initiates a multipart upload and returns an InitiateMultipartUploadResponse * which contains an upload ID . This upload ID associates all the parts in * the specific upload and is used in each of your subsequent uploadPart requests . * You also include this upload ID in the final request to either complete , or abort the multipart * upload request . * @ param bucketName The name of the Bos bucket containing the object to initiate . * @ param key The key of the object to initiate . * @ return An InitiateMultipartUploadResponse from Bos . */ public InitiateMultipartUploadResponse initiateMultipartUpload ( String bucketName , String key ) { } }
return this . initiateMultipartUpload ( new InitiateMultipartUploadRequest ( bucketName , key ) ) ;
public class ListWorkersWithQualificationTypeResult { /** * The list of Qualification elements returned by this call . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setQualifications ( java . util . Collection ) } or { @ link # withQualifications ( java . util . Collection ) } if you want * to override the existing values . * @ param qualifications * The list of Qualification elements returned by this call . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListWorkersWithQualificationTypeResult withQualifications ( Qualification ... qualifications ) { } }
if ( this . qualifications == null ) { setQualifications ( new java . util . ArrayList < Qualification > ( qualifications . length ) ) ; } for ( Qualification ele : qualifications ) { this . qualifications . add ( ele ) ; } return this ;
public class AbstractCypherRuleInterpreterPlugin { /** * Verifies if the given row shall be suppressed . * The primary column is checked if it contains a suppression that matches the * current rule id . * @ param ruleId * The rule id . * @ param row * The row . * @ param primaryColumn * The name of the primary column . * @ return < code > true < / code > if the row shall be suppressed . */ private boolean isSuppressedRow ( String ruleId , Map < String , Object > row , String primaryColumn ) { } }
Object primaryValue = row . get ( primaryColumn ) ; if ( primaryValue != null && Suppress . class . isAssignableFrom ( primaryValue . getClass ( ) ) ) { Suppress suppress = ( Suppress ) primaryValue ; for ( String suppressId : suppress . getSuppressIds ( ) ) { if ( ruleId . equals ( suppressId ) ) { return true ; } } } return false ;
public class Utils { /** * Writes a { @ code T } instance to { @ code dest } using { @ code adapter } . This method can handle * { @ code null } values . When reading this type out of the parcel later , you should use * { @ link # readNullable ( Parcel , TypeAdapter ) } . */ public static < T > void writeNullable ( @ Nullable T value , @ NonNull Parcel dest , int flags , @ NonNull TypeAdapter < T > adapter ) { } }
if ( value == null ) { dest . writeInt ( 0 ) ; } else { dest . writeInt ( 1 ) ; adapter . writeToParcel ( value , dest , flags ) ; }
public class ADictionary { /** * load all the words in the specified lexicon file into the dictionary * @ param config * @ param dic * @ param file * @ param buffer * @ throws IOException * @ throws FileNotFoundException * @ throws NumberFormatException */ public static void loadWords ( JcsegTaskConfig config , ADictionary dic , File file , List < String [ ] > buffer ) throws NumberFormatException , FileNotFoundException , IOException { } }
loadWords ( config , dic , new FileInputStream ( file ) , buffer ) ;
public class Parser { /** * Create a node that can be used to hold lexically scoped variable * definitions ( via let declarations ) . * @ param token the token of the node to create * @ param lineno line number of source * @ return the created node */ protected Scope createScopeNode ( int token , int lineno ) { } }
Scope scope = new Scope ( ) ; scope . setType ( token ) ; scope . setLineno ( lineno ) ; return scope ;
public class AmazonEKSClient { /** * Creates an Amazon EKS control plane . * The Amazon EKS control plane consists of control plane instances that run the Kubernetes software , like * < code > etcd < / code > and the API server . The control plane runs in an account managed by AWS , and the Kubernetes API * is exposed via the Amazon EKS API server endpoint . Each Amazon EKS cluster control plane is single - tenant and * unique , and runs on its own set of Amazon EC2 instances . * The cluster control plane is provisioned across multiple Availability Zones and fronted by an Elastic Load * Balancing Network Load Balancer . Amazon EKS also provisions elastic network interfaces in your VPC subnets to * provide connectivity from the control plane instances to the worker nodes ( for example , to support * < code > kubectl exec < / code > , < code > logs < / code > , and < code > proxy < / code > data flows ) . * Amazon EKS worker nodes run in your AWS account and connect to your cluster ' s control plane via the Kubernetes * API server endpoint and a certificate file that is created for your cluster . * You can use the < code > endpointPublicAccess < / code > and < code > endpointPrivateAccess < / code > parameters to enable or * disable public and private access to your cluster ' s Kubernetes API server endpoint . By default , public access is * enabled and private access is disabled . For more information , see < a * href = " https : / / docs . aws . amazon . com / eks / latest / userguide / cluster - endpoint . html " > Amazon EKS Cluster Endpoint Access * Control < / a > in the < i > < i > Amazon EKS User Guide < / i > < / i > . * You can use the < code > logging < / code > parameter to enable or disable exporting the Kubernetes control plane logs * for your cluster to CloudWatch Logs . By default , cluster control plane logs are not exported to CloudWatch Logs . * For more information , see < a * href = " https : / / docs . aws . amazon . com / eks / latest / userguide / control - plane - logs . html " > Amazon EKS Cluster Control Plane * Logs < / a > in the < i > < i > Amazon EKS User Guide < / i > < / i > . * < note > * CloudWatch Logs ingestion , archive storage , and data scanning rates apply to exported control plane logs . For * more information , see < a href = " http : / / aws . amazon . com / cloudwatch / pricing / " > Amazon CloudWatch Pricing < / a > . * < / note > * Cluster creation typically takes between 10 and 15 minutes . After you create an Amazon EKS cluster , you must * configure your Kubernetes tooling to communicate with the API server and launch worker nodes into your cluster . * For more information , see < a href = " https : / / docs . aws . amazon . com / eks / latest / userguide / managing - auth . html " > Managing * Cluster Authentication < / a > and < a * href = " https : / / docs . aws . amazon . com / eks / latest / userguide / launch - workers . html " > Launching Amazon EKS Worker Nodes < / a > * in the < i > Amazon EKS User Guide < / i > . * @ param createClusterRequest * @ return Result of the CreateCluster operation returned by the service . * @ throws ResourceInUseException * The specified resource is in use . * @ throws ResourceLimitExceededException * You have encountered a service limit on the specified resource . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClientException * These errors are usually caused by a client action . Actions can include using an action or resource on * behalf of a user that doesn ' t have permissions to use the action or resource or specifying an identifier * that is not valid . * @ throws ServerException * These errors are usually caused by a server - side issue . * @ throws ServiceUnavailableException * The service is unavailable . Back off and retry the operation . * @ throws UnsupportedAvailabilityZoneException * At least one of your specified cluster subnets is in an Availability Zone that does not support Amazon * EKS . The exception output specifies the supported Availability Zones for your account , from which you can * choose subnets for your cluster . * @ sample AmazonEKS . CreateCluster * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / eks - 2017-11-01 / CreateCluster " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateClusterResult createCluster ( CreateClusterRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateCluster ( request ) ;
public class PersistableImpl { /** * Only update the persistent copy of the binary data held within this Persistable . * @ param tran The ObjectManager transaction under which the update of the data is carried out . * @ exception ObjectManagerException * @ throws SevereMessageStoreException */ public void updateDataOnly ( Transaction tran , ObjectStore store ) throws PersistenceException , ObjectManagerException , SevereMessageStoreException { } }
updateDataOnly ( tran , store , this ) ;
public class CmsDelete { /** * Performs the delete operation for a single VFS resource . < p > * @ param resource the resource VFS path * @ param deleteOption the delete option for sibling deletion * @ throws CmsException if deleting the resource fails */ protected void performSingleDeleteOperation ( String resource , CmsResourceDeleteMode deleteOption ) throws CmsException { } }
// lock resource if autolock is enabled checkLock ( resource ) ; // delete the resource getCms ( ) . deleteResource ( resource , deleteOption ) ;
public class ClassLoaderFinder { /** * Pop the preferred class loader . */ public static void popPreferredClassLoader ( ) { } }
final ClassLoader sysLoader = ClassLoaderFinder . class . getClassLoader ( ) ; if ( ( dynamicLoader == null ) || ( dynamicLoader == sysLoader ) ) { dynamicLoader = null ; final Thread [ ] threads = new Thread [ Thread . activeCount ( ) ] ; Thread . enumerate ( threads ) ; for ( final Thread t : threads ) { if ( t != null ) { t . setContextClassLoader ( sysLoader ) ; } } return ; } final ClassLoader parent = dynamicLoader . getParent ( ) ; dynamicLoader = ( parent == sysLoader ) ? null : parent ; final Thread [ ] threads = new Thread [ Thread . activeCount ( ) ] ; Thread . enumerate ( threads ) ; for ( final Thread t : threads ) { if ( t != null ) { t . setContextClassLoader ( parent ) ; } }
public class HttpRequestImpl { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . http . HttpRequest # isTrailersReady ( ) */ @ Override public boolean isTrailersReady ( ) { } }
if ( ! message . isChunkedEncodingSet ( ) || ! message . containsHeader ( HttpHeaderKeys . HDR_TRAILER ) || ( ( HttpBaseMessageImpl ) message ) . getTrailersImpl ( ) != null || ( message . getVersionValue ( ) . getMajor ( ) <= 1 && message . getVersionValue ( ) . getMinor ( ) < 1 ) ) return true ; return false ;
public class BeanBuilder { /** * Add to the qualifiers used for bean creation . * @ param qualifiers the additional qualifiers to use */ public BeanBuilder < T > addQualifiers ( Annotation ... qualifiers ) { } }
this . qualifiers . addAll ( Arrays2 . asSet ( qualifiers ) ) ; return this ;
public class ClassicLayoutManager { /** * If there is a SubReport on a Group , we do the layout here * @ param columnsGroup * @ param jgroup */ protected void layoutGroupSubreports ( DJGroup columnsGroup , JRDesignGroup jgroup ) { } }
log . debug ( "Starting subreport layout..." ) ; JRDesignBand footerBand = ( JRDesignBand ) ( ( JRDesignSection ) jgroup . getGroupFooterSection ( ) ) . getBandsList ( ) . get ( 0 ) ; JRDesignBand headerBand = ( JRDesignBand ) ( ( JRDesignSection ) jgroup . getGroupHeaderSection ( ) ) . getBandsList ( ) . get ( 0 ) ; layOutSubReportInBand ( columnsGroup , headerBand , DJConstants . HEADER ) ; layOutSubReportInBand ( columnsGroup , footerBand , DJConstants . FOOTER ) ;
public class MPD9AbstractReader { /** * Process calendar hours . * @ param calendar parent calendar * @ param row calendar hours data * @ param dayIndex day index */ private void processCalendarHours ( ProjectCalendar calendar , Row row , int dayIndex ) { } }
Day day = Day . getInstance ( dayIndex ) ; boolean working = row . getInt ( "CD_WORKING" ) != 0 ; calendar . setWorkingDay ( day , working ) ; if ( working == true ) { ProjectCalendarHours hours = calendar . addCalendarHours ( day ) ; Date start = row . getDate ( "CD_FROM_TIME1" ) ; Date end = row . getDate ( "CD_TO_TIME1" ) ; if ( start != null && end != null ) { hours . addRange ( new DateRange ( start , end ) ) ; } start = row . getDate ( "CD_FROM_TIME2" ) ; end = row . getDate ( "CD_TO_TIME2" ) ; if ( start != null && end != null ) { hours . addRange ( new DateRange ( start , end ) ) ; } start = row . getDate ( "CD_FROM_TIME3" ) ; end = row . getDate ( "CD_TO_TIME3" ) ; if ( start != null && end != null ) { hours . addRange ( new DateRange ( start , end ) ) ; } start = row . getDate ( "CD_FROM_TIME4" ) ; end = row . getDate ( "CD_TO_TIME4" ) ; if ( start != null && end != null ) { hours . addRange ( new DateRange ( start , end ) ) ; } start = row . getDate ( "CD_FROM_TIME5" ) ; end = row . getDate ( "CD_TO_TIME5" ) ; if ( start != null && end != null ) { hours . addRange ( new DateRange ( start , end ) ) ; } }
public class ProtobufIDLProxy { /** * Creates the . * @ param reader the reader * @ param debug the debug * @ param path the path * @ param isUniName the is uni name * @ return the map * @ throws IOException Signals that an I / O exception has occurred . */ public static Map < String , IDLProxyObject > create ( Reader reader , boolean debug , File path , boolean isUniName ) throws IOException { } }
ProtoFile protoFile = ProtoSchemaParser . parse ( DEFAULT_FILE_NAME , reader ) ; List < CodeDependent > cds = new ArrayList < CodeDependent > ( ) ; return doCreate ( protoFile , true , debug , path , false , null , cds , new HashMap < String , String > ( ) , isUniName ) ;
public class DbUtils { /** * Executes and closes the given { @ code preparedStatement } . * @ param preparedStatement * the statement that will be executed and immediately closed * @ throws SQLException * if error occurred while executing the given * { @ code preparedStatement } * @ see PreparedStatement # close ( ) * @ see PreparedStatement # execute ( ) * @ deprecated ( TODO add version ) replaced by { @ link # execute } */ @ Deprecated public static void executeAndClose ( final PreparedStatement preparedStatement ) throws SQLException { } }
try { preparedStatement . execute ( ) ; } finally { try { preparedStatement . close ( ) ; } catch ( SQLException e ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( e . getMessage ( ) , e ) ; } } }
public class Point { /** * Sets the Point to a default , non - empty state . */ void _setToDefault ( ) { } }
resizeAttributes ( m_description . getTotalComponentCount ( ) ) ; Point . attributeCopy ( m_description . _getDefaultPointAttributes ( ) , m_attributes , m_description . getTotalComponentCount ( ) ) ; m_attributes [ 0 ] = NumberUtils . NaN ( ) ; m_attributes [ 1 ] = NumberUtils . NaN ( ) ;
public class IntRange { /** * { @ inheritDoc } */ public Integer getTo ( ) { } }
if ( inclusive == null ) return to ; if ( from <= to ) return inclusive ? to : to - 1 ; return from ;
public class HttpSupport { /** * Convenience method to get file content from < code > multipart / form - data < / code > request . If more than one files with the same * name are submitted , only one is returned . * @ param fieldName name of form field from the < code > multipart / form - data < / code > request corresponding to the uploaded file . * @ param formItems form items retrieved from < code > multipart / form - data < / code > request . * @ return < code > InputStream < / code > from which to read content of uploaded file or null if FileItem with this name is not found . */ protected org . javalite . activeweb . FileItem getFile ( String fieldName , List < FormItem > formItems ) { } }
for ( FormItem formItem : formItems ) { if ( formItem instanceof org . javalite . activeweb . FileItem && formItem . getFieldName ( ) . equals ( fieldName ) ) { return ( org . javalite . activeweb . FileItem ) formItem ; } } return null ;
public class BaseRunner { /** * Start the Jetty server . */ public void startServer ( ) { } }
server = new Server ( httpPort ) ; server . setHandler ( wrapHandlers ( ) ) ; if ( isWebSocketInClassPath ( ) ) { setupForWebSocket ( ) ; } try { server . start ( ) ; log ( ) . info ( "server started" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( new ShutdownRunnable ( ) ) ) ; if ( useStdInShutdown ) { // generally for use in IDE via JettyRun , Use CTRL - D in IDE console to shutdown BufferedReader systemIn = new BufferedReader ( new InputStreamReader ( System . in , "UTF-8" ) ) ; while ( ( systemIn . readLine ( ) ) != null ) { // ignore anything except CTRL - D by itself } System . out . println ( "Shutdown via CTRL-D" ) ; System . exit ( 0 ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; System . exit ( 100 ) ; }
public class StreamingConnectionImpl { /** * Publish will publish to the cluster and wait for an ACK . */ @ Override public void publish ( String subject , byte [ ] data ) throws IOException , InterruptedException , TimeoutException { } }
final BlockingQueue < String > ch = createErrorChannel ( ) ; publish ( subject , data , null , ch ) ; String err ; if ( ! ch . isEmpty ( ) ) { err = ch . take ( ) ; if ( ! err . isEmpty ( ) ) { throw new IOException ( err ) ; } }
public class CmsExtendedWorkflowManager { /** * Checks whether the user for a given CMS context can manage workflow projects . < p > * @ param userCms the user CMS Context * @ return true if this user can manage workflow projects * @ throws CmsException if something goes wrong */ protected boolean isProjectManager ( CmsObject userCms ) throws CmsException { } }
CmsGroup managerGroup = m_adminCms . readGroup ( getWorkflowProjectManagerGroup ( ) ) ; List < CmsGroup > groups = m_adminCms . getGroupsOfUser ( userCms . getRequestContext ( ) . getCurrentUser ( ) . getName ( ) , false ) ; return groups . contains ( managerGroup ) ;
public class PatternsImpl { /** * Updates a pattern . * @ param appId The application ID . * @ param versionId The version ID . * @ param patternId The pattern ID . * @ param pattern An object representing a pattern . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PatternRuleInfo object */ public Observable < ServiceResponse < PatternRuleInfo > > updatePatternWithServiceResponseAsync ( UUID appId , String versionId , UUID patternId , PatternRuleUpdateObject pattern ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( patternId == null ) { throw new IllegalArgumentException ( "Parameter patternId is required and cannot be null." ) ; } if ( pattern == null ) { throw new IllegalArgumentException ( "Parameter pattern is required and cannot be null." ) ; } Validator . validate ( pattern ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . updatePattern ( appId , versionId , patternId , pattern , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < PatternRuleInfo > > > ( ) { @ Override public Observable < ServiceResponse < PatternRuleInfo > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PatternRuleInfo > clientResponse = updatePatternDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class DisableJSessionIDinUrlApplication { /** * Disable sessionId in the url if it comes from a robot . * @ param webRequest * the web request * @ param httpServletResponse * the http servlet response * @ return the web response */ @ Override protected WebResponse newWebResponse ( final WebRequest webRequest , final HttpServletResponse httpServletResponse ) { } }
return new ServletWebResponse ( ( ServletWebRequest ) webRequest , httpServletResponse ) { @ Override public String encodeRedirectURL ( final CharSequence url ) { return isRobot ( webRequest ) ? url . toString ( ) : super . encodeRedirectURL ( url ) ; } @ Override public String encodeURL ( final CharSequence url ) { return isRobot ( webRequest ) ? url . toString ( ) : super . encodeURL ( url ) ; } private boolean isRobot ( final WebRequest request ) { final String agent = webRequest . getHeader ( "User-Agent" ) ; return BotAgentInspector . isAgent ( agent ) ; } } ;
public class DerivedByInsertionFrom { /** * Gets the value of the other property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the others property . * For example , to add a new item , do as follows : * < pre > * getOthers ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link Other } */ public List < org . openprovenance . prov . model . Other > getOther ( ) { } }
if ( other == null ) { other = new ArrayList < org . openprovenance . prov . model . Other > ( ) ; } return this . other ;
public class VorbisFormatConversionProvider { /** * Returns converted AudioInputStream . * @ param audioInputStream * @ return */ @ Override public AudioInputStream getAudioInputStream ( AudioFormat targetFormat , AudioInputStream audioInputStream ) { } }
if ( isConversionSupported ( targetFormat , audioInputStream . getFormat ( ) ) ) { return new DecodedVorbisAudioInputStream ( targetFormat , audioInputStream ) ; } else { throw new IllegalArgumentException ( "conversion not supported" ) ; }
public class LBiObjLongConsumerBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < T1 , T2 > LBiObjLongConsumer < T1 , T2 > biObjLongConsumerFrom ( Consumer < LBiObjLongConsumerBuilder < T1 , T2 > > buildingFunction ) { } }
LBiObjLongConsumerBuilder builder = new LBiObjLongConsumerBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class DateFormatSymbols { /** * Finds the ResourceBundle containing the date format information for * a specified calendar subclass in a given locale . * The resource bundle name is based on the calendar ' s fully - specified * class name , with " . resources " inserted at the end of the package name * ( just before the class name ) and " Symbols " appended to the end . * For example , the bundle corresponding to " android . icu . util . HebrewCalendar " * is " android . icu . impl . data . HebrewCalendarSymbols " . * < b > Note : < / b > Because of the structural changes in the ICU locale bundle , * this API no longer works as described . This method always returns null . * @ deprecated ICU 4.0 * @ hide original deprecated declaration */ @ Deprecated // This API was formerly @ stable ICU 2.0 static public ResourceBundle getDateFormatBundle ( Class < ? extends Calendar > calendarClass , Locale locale ) throws MissingResourceException { } }
return null ;
public class AbcGrammar { /** * field - userdef - play : : = % x75.3A * WSP userdef header - eol < p > * < tt > u : < / tt > */ Rule FieldUserdefPlay ( ) { } }
return Sequence ( String ( "u:" ) , ZeroOrMore ( WSP ( ) ) . suppressNode ( ) , Userdef ( ) , HeaderEol ( ) ) . label ( FieldUserdefPlay ) ;
public class DbRemoteConfigLoader { /** * 加载adapter配置 */ @ Override public void loadRemoteAdapterConfigs ( ) { } }
try { // 加载远程adapter配置 loadModifiedAdapterConfigs ( ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ElementaryFunctionsType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "sech" ) public JAXBElement < ElementaryFunctionsType > createSech ( ElementaryFunctionsType value ) { } }
return new JAXBElement < ElementaryFunctionsType > ( _Sech_QNAME , ElementaryFunctionsType . class , null , value ) ;
public class USerializedSet { /** * Returns a range of characters contained in the given serialized * set . * @ param rangeIndex a non - negative integer in the range < code > 0 . . * getSerializedRangeCount ( ) - 1 < / code > * @ param range variable to receive the data in the range * @ return true if rangeIndex is valid , otherwise false */ public final boolean getRange ( int rangeIndex , int [ ] range ) { } }
if ( rangeIndex < 0 ) { return false ; } if ( array == null ) { array = new char [ 8 ] ; } if ( range == null || range . length < 2 ) { throw new IllegalArgumentException ( ) ; } rangeIndex *= 2 ; /* address start / limit pairs */ if ( rangeIndex < bmpLength ) { range [ 0 ] = array [ rangeIndex ++ ] ; if ( rangeIndex < bmpLength ) { range [ 1 ] = array [ rangeIndex ] - 1 ; } else if ( rangeIndex < length ) { range [ 1 ] = ( ( ( ( int ) array [ rangeIndex ] ) << 16 ) | array [ rangeIndex + 1 ] ) - 1 ; } else { range [ 1 ] = 0x10ffff ; } return true ; } else { rangeIndex -= bmpLength ; rangeIndex *= 2 ; /* address pairs of pairs of units */ int suppLength = length - bmpLength ; if ( rangeIndex < suppLength ) { int offset = arrayOffset + bmpLength ; range [ 0 ] = ( ( ( int ) array [ offset + rangeIndex ] ) << 16 ) | array [ offset + rangeIndex + 1 ] ; rangeIndex += 2 ; if ( rangeIndex < suppLength ) { range [ 1 ] = ( ( ( ( int ) array [ offset + rangeIndex ] ) << 16 ) | array [ offset + rangeIndex + 1 ] ) - 1 ; } else { range [ 1 ] = 0x10ffff ; } return true ; } else { return false ; } }
public class UpdateThingGroupsForThingRequest { /** * The groups from which the thing will be removed . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setThingGroupsToRemove ( java . util . Collection ) } or { @ link # withThingGroupsToRemove ( java . util . Collection ) } * if you want to override the existing values . * @ param thingGroupsToRemove * The groups from which the thing will be removed . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateThingGroupsForThingRequest withThingGroupsToRemove ( String ... thingGroupsToRemove ) { } }
if ( this . thingGroupsToRemove == null ) { setThingGroupsToRemove ( new java . util . ArrayList < String > ( thingGroupsToRemove . length ) ) ; } for ( String ele : thingGroupsToRemove ) { this . thingGroupsToRemove . add ( ele ) ; } return this ;
public class CmsListMetadata { /** * Returns the html code for a list item . < p > * @ param item the list item to render * @ param odd if the position is odd or even * @ param isPrintable if the list is to be printed * @ return html code */ public String htmlItem ( CmsListItem item , boolean odd , boolean isPrintable ) { } }
StringBuffer html = new StringBuffer ( 1024 ) ; html . append ( "<tr " ) ; if ( ! isPrintable ) { html . append ( "class='" ) ; html . append ( odd ? "oddrowbg" : ( getWp ( ) . useNewStyle ( ) ? "evenrowbg" : "evenrowbgnew" ) ) ; html . append ( "'" ) ; } html . append ( ">\n" ) ; Iterator < CmsListColumnDefinition > itCols = m_columns . elementList ( ) . iterator ( ) ; int width = 0 ; while ( itCols . hasNext ( ) ) { CmsListColumnDefinition col = itCols . next ( ) ; if ( ! col . isVisible ( ) && ! isPrintable ) { continue ; } if ( ! col . isPrintable ( ) && isPrintable ) { continue ; } width ++ ; StringBuffer style = new StringBuffer ( 64 ) ; html . append ( "<td" ) ; CmsListColumnAlignEnum align = col . getAlign ( ) ; if ( ( align != CmsListColumnAlignEnum . ALIGN_LEFT ) && CmsStringUtil . isNotEmpty ( align . toString ( ) ) ) { style . append ( "text-align: " ) ; style . append ( col . getAlign ( ) ) ; style . append ( "; " ) ; } if ( col . isTextWrapping ( ) ) { style . append ( "white-space: normal;" ) ; } if ( isPrintable ) { style . append ( "border-top: 1px solid black;" ) ; } if ( style . length ( ) > 0 ) { html . append ( " style='" ) ; html . append ( style ) ; html . append ( "'" ) ; } html . append ( ">\n" ) ; html . append ( col . htmlCell ( item , isPrintable ) ) ; html . append ( "</td>\n" ) ; } if ( ! isPrintable && hasCheckMultiActions ( ) ) { width ++ ; html . append ( "\t<td class='select' style='text-align: center'>\n" ) ; html . append ( "\t\t<input type='checkbox' class='checkbox' name='listMultiAction' value='" ) ; html . append ( item . getId ( ) ) ; html . append ( "'>\n" ) ; html . append ( "\t</td>\n" ) ; } html . append ( "</tr>\n" ) ; Iterator < CmsListItemDetails > itDet = m_itemDetails . elementList ( ) . iterator ( ) ; while ( itDet . hasNext ( ) ) { CmsListItemDetails lid = itDet . next ( ) ; if ( ! lid . isVisible ( ) && ! isPrintable ) { continue ; } if ( ! lid . isPrintable ( ) && isPrintable ) { continue ; } if ( ( item . get ( lid . getId ( ) ) != null ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( item . get ( lid . getId ( ) ) . toString ( ) ) ) { int padCols = 0 ; itCols = m_columns . elementList ( ) . iterator ( ) ; while ( itCols . hasNext ( ) ) { CmsListColumnDefinition col = itCols . next ( ) ; if ( col . getId ( ) . equals ( lid . getAtColumn ( ) ) ) { break ; } if ( ! col . isVisible ( ) && ! isPrintable ) { continue ; } if ( ! col . isPrintable ( ) && isPrintable ) { continue ; } padCols ++ ; } int spanCols = width - padCols ; html . append ( "<tr " ) ; if ( ! isPrintable ) { html . append ( "class='" ) ; html . append ( odd ? "oddrowbg" : ( getWp ( ) . useNewStyle ( ) ? "evenrowbg" : "evenrowbgnew" ) ) ; html . append ( "'" ) ; } html . append ( ">\n" ) ; if ( padCols > 0 ) { html . append ( "<td colspan='" ) ; html . append ( padCols ) ; html . append ( "'>&nbsp;</td>\n" ) ; } html . append ( "<td colspan='" ) ; html . append ( spanCols ) ; html . append ( "' style='padding-left: 20px; white-space:normal;'>\n" ) ; html . append ( lid . htmlCell ( item , isPrintable ) ) ; html . append ( "\n</td>\n" ) ; html . append ( "\n" ) ; html . append ( "</tr>\n" ) ; } } return html . toString ( ) ;
public class Cluster { /** * Sets the state of the current Ordasity node and notifies others via ZooKeeper . */ private boolean setState ( NodeState to ) { } }
try { NodeInfo myInfo = new NodeInfo ( to . toString ( ) , zk . get ( ) . getSessionId ( ) ) ; byte [ ] encoded = JsonUtil . asJSONBytes ( myInfo ) ; ZKUtils . set ( zk , "/" + name + "/nodes/" + myNodeID , encoded ) ; state . set ( to ) ; return true ; } catch ( Exception e ) { // InterruptedException , IOException LOG . warn ( "Problem trying to setState(" + to + "): " + e . getMessage ( ) , e ) ; return false ; }
public class Tracy { /** * This method is used to capture annotations which should be sent back to the HTTP client * HttpResponse annotations are created by this method and retrieved using { @ link # getHttpResponseBufferAnnotations } * when the HTTP response header is to be returned as shown in example below < br > * < code > < pre > * Tracy . annotate ( " key1 " , " val1 " ) ; * annotateOnHttpResponseBuffer ( " key1 " ) ; * < / pre > < / code > * annotateOnHttpResponseBuffer ( key ) must be called after a ( Tracy frame ) Tracy . annotation ( key , value ) as it will * retrieve the value from the recently created Tracy . annotation . < br > * annotateOnHttpResponseBuffer ( key ) can be called from any point in the Tracy frame stack . Tracy will store them * at the topmost level of the thread context to be easily accessible using { @ link # getHttpResponseBufferAnnotations } method * @ param key defines the recently used key used in Tracy . annotate ( key , val ) which is to be sent back in the HTTP response header */ public static void annotateOnHttpResponseBuffer ( String key ) { } }
TracyThreadContext ctx = threadContext . get ( ) ; if ( isValidContext ( ctx ) ) { ctx . annotateOnHttpResponseBuffer ( key ) ; }
public class Api { /** * Update access mode of one or more resources by tag * @ param accessMode The new access mode , " public " or " authenticated " * @ param tag The tag by which to filter applicable resources * @ param options additional options * < ul > * < li > resource _ type - ( default " image " ) - the type of resources to modify < / li > * < li > max _ results - optional - the maximum resources to process in a single invocation < / li > * < li > next _ cursor - optional - provided by a previous call to the method < / li > * < / ul > * @ return a map of the returned values * < ul > * < li > updated - an array of resources < / li > * < li > next _ cursor - optional - provided if more resources can be processed < / li > * < / ul > * @ throws ApiException an API exception */ public ApiResponse updateResourcesAccessModeByTag ( String accessMode , String tag , Map options ) throws Exception { } }
return updateResourcesAccessMode ( accessMode , "tag" , tag , options ) ;
public class Vector3i { /** * Set the value of the specified component of this vector . * @ param component * the component whose value to set , within < code > [ 0 . . 2 ] < / code > * @ param value * the value to set * @ return this * @ throws IllegalArgumentException if < code > component < / code > is not within < code > [ 0 . . 2 ] < / code > */ public Vector3i setComponent ( int component , int value ) throws IllegalArgumentException { } }
switch ( component ) { case 0 : x = value ; break ; case 1 : y = value ; break ; case 2 : z = value ; break ; default : throw new IllegalArgumentException ( ) ; } return this ;
public class Photo { /** * Get an InputStream for the original image . Callers must close the stream upon completion . * @ deprecated * @ see PhotosInterface # getImageAsStream ( Photo , int ) * @ return The InputStream * @ throws IOException */ @ Deprecated public InputStream getOriginalAsStream ( ) throws IOException , FlickrException { } }
if ( originalFormat != null ) { return getOriginalImageAsStream ( "_o." + originalFormat ) ; } return getOriginalImageAsStream ( DEFAULT_ORIGINAL_IMAGE_SUFFIX ) ;
public class RecursiveXPathBuilder { /** * Establish a namespace context that will be used in for the * XPath . * < p > Without a namespace context ( or with an empty context ) the * XPath expressions will only use local names for elements and * attributes . < / p > * @ param prefix2uri maps from prefix to namespace URI . */ public void setNamespaceContext ( Map < String , String > prefix2uri ) { } }
this . prefix2uri = prefix2uri == null ? Collections . < String , String > emptyMap ( ) : Collections . unmodifiableMap ( prefix2uri ) ;
public class UtilReflection { /** * Store all declared valid interfaces into next . * @ param base The minimum base interface . * @ param currents The current interfaces found . * @ param nexts The next interface to check . */ private static void checkInterfaces ( Class < ? > base , Deque < Class < ? > > currents , Deque < Class < ? > > nexts ) { } }
currents . stream ( ) . map ( Class :: getInterfaces ) . forEach ( types -> Arrays . asList ( types ) . stream ( ) . filter ( type -> base . isAssignableFrom ( type ) && ! type . equals ( base ) ) . forEach ( nexts :: add ) ) ;
public class SmsAlertingSbb { /** * implements javax . slee . Sbb Please refer to JSLEE v1.1 Specification , Early * Draft Review Page 54 for further information . < br > * The SLEE invokes this method after a new instance of the SBB abstract * class is created . During this method , an SBB entity has not been assigned * to the SBB object . The SBB object can take advantage of this method to * allocate and initialize state or connect to resources that are to be held * by the SBB object during its lifetime . Such state and resources cannot be * specific to an SBB entity because the SBB object might be reused during * its lifetime to serve multiple SBB entities . < br > * This method indicates a transition from state " DOES NOT EXIST " to * " POOLED " ( see page 52) */ public void setSbbContext ( SbbContext sbbContext ) { } }
this . sbbContext = sbbContext ; try { Context ctx = ( Context ) new InitialContext ( ) . lookup ( "java:comp/env" ) ; smppProvider = ( SmppProvider ) ctx . lookup ( "slee/resources/smpp/3.4/smppinterface" ) ; smppAcif = ( ActivityContextInterfaceFactory ) ctx . lookup ( "slee/resources/smpp/3.4/factoryprovider" ) ; } catch ( NamingException ne ) { logger . error ( "Could not set SBB context: " + ne . toString ( ) , ne ) ; }
public class UserLayoutHelperImpl { /** * Resets a users layout for all the users profiles * @ param personAttributes */ @ BasePortalJpaDao . PortalTransactional public void resetUserLayoutAllProfiles ( final IPersonAttributes personAttributes ) { } }
final IPerson person = PersonFactory . createRestrictedPerson ( ) ; person . setAttributes ( personAttributes . getAttributes ( ) ) ; // get the integer uid into the person object without creating any new person data int uid = userIdentityStore . getPortalUID ( person , false ) ; person . setID ( uid ) ; final Hashtable < Integer , UserProfile > map = userLayoutStore . getUserProfileList ( person ) ; for ( UserProfile profile : map . values ( ) ) { resetUserLayoutForProfileByName ( person , profile ) ; resetStylesheetUserPreferencesForProfile ( person , profile ) ; }
public class Tags { /** * Return a new { @ code Tags } instance containing tags constructed from the specified key / value pairs . * @ param keyValues the key / value pairs to add * @ return a new { @ code Tags } instance */ public static Tags of ( @ Nullable String ... keyValues ) { } }
if ( keyValues == null || keyValues . length == 0 ) { return empty ( ) ; } if ( keyValues . length % 2 == 1 ) { throw new IllegalArgumentException ( "size must be even, it is a set of key=value pairs" ) ; } Tag [ ] tags = new Tag [ keyValues . length / 2 ] ; for ( int i = 0 ; i < keyValues . length ; i += 2 ) { tags [ i / 2 ] = Tag . of ( keyValues [ i ] , keyValues [ i + 1 ] ) ; } return new Tags ( tags ) ;
public class Utils { /** * This stupidity is required because data types of the source and dest * might be different , but they might have the same value . Like , a number * represented as string in one system could be an int in another . That is * valid , but JSON comparison fails for that , so we check if the two nodes * are * really * different , meaning : for anything but dates , check string * equivalence . For dates , normalize by TZ and check . */ public static boolean reallyDifferent ( JsonNode source , JsonNode dest , boolean ignoreTimestampMSDiffs ) { } }
if ( source == null || source instanceof NullNode ) { if ( dest == null || dest instanceof NullNode ) { return false ; } else { return true ; } } else if ( dest == null || dest instanceof NullNode ) { return true ; } else { String s1 = source . asText ( ) ; String s2 = dest . asText ( ) ; if ( s1 . equals ( s2 ) ) { return false ; } // They are different strings // Do they look like dates ? Date d1 ; Date d2 ; DateFormat fmt = ClientConstants . getDateFormat ( ) ; try { d1 = fmt . parse ( s1 ) ; } catch ( Exception e ) { return true ; } try { d2 = fmt . parse ( s2 ) ; } catch ( Exception e ) { return true ; } if ( ignoreTimestampMSDiffs ) { long d1ms = 1000 * ( d1 . getTime ( ) / 1000 ) ; long d2ms = 1000 * ( d2 . getTime ( ) / 1000 ) ; return d1ms != d2ms ; } else { return d1 . getTime ( ) != d2 . getTime ( ) ; } }
public class OrchestrationShardingDataSource { /** * Renew sharding data source . * @ param dataSourceChangedEvent data source changed event */ @ Subscribe @ SneakyThrows public final synchronized void renew ( final DataSourceChangedEvent dataSourceChangedEvent ) { } }
dataSource . close ( ) ; dataSource = new ShardingDataSource ( DataSourceConverter . getDataSourceMap ( dataSourceChangedEvent . getDataSourceConfigurations ( ) ) , dataSource . getShardingContext ( ) . getShardingRule ( ) , dataSource . getShardingContext ( ) . getShardingProperties ( ) . getProps ( ) ) ;
public class ApplicationSnapshotConfigurationUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ApplicationSnapshotConfigurationUpdate applicationSnapshotConfigurationUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( applicationSnapshotConfigurationUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( applicationSnapshotConfigurationUpdate . getSnapshotsEnabledUpdate ( ) , SNAPSHOTSENABLEDUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PersonType } * { @ code > } */ @ XmlElementDecl ( namespace = "http://www.w3.org/2005/Atom" , name = "author" , scope = FeedType . class ) public JAXBElement < PersonType > createFeedTypeAuthor ( PersonType value ) { } }
return new JAXBElement < PersonType > ( ENTRY_TYPE_AUTHOR_QNAME , PersonType . class , FeedType . class , value ) ;
public class UnicodeSetIterator { /** * Returns the next element in the set , either a single code point * or a string . If there are no more elements in the set , return * false . If < tt > codepoint = = IS _ STRING < / tt > , the value is a * string in the < tt > string < / tt > field . Otherwise the value is a * single code point in the < tt > codepoint < / tt > field . * < p > The order of iteration is all code points in sorted order , * followed by all strings sorted order . < tt > codepointEnd < / tt > is * undefined after calling this method . < tt > string < / tt > is * undefined unless < tt > codepoint = = IS _ STRING < / tt > . Do not mix * calls to < tt > next ( ) < / tt > and < tt > nextRange ( ) < / tt > without * calling < tt > reset ( ) < / tt > between them . The results of doing so * are undefined . * < p > < b > Warning : < / b > For speed , UnicodeSet iteration does not check for concurrent modification . * Do not alter the UnicodeSet while iterating . * @ return true if there was another element in the set and this * object contains the element . */ public boolean next ( ) { } }
if ( nextElement <= endElement ) { codepoint = codepointEnd = nextElement ++ ; return true ; } if ( range < endRange ) { loadRange ( ++ range ) ; codepoint = codepointEnd = nextElement ++ ; return true ; } // stringIterator = = null iff there are no string elements remaining if ( stringIterator == null ) { return false ; } codepoint = IS_STRING ; // signal that value is actually a string string = stringIterator . next ( ) ; if ( ! stringIterator . hasNext ( ) ) { stringIterator = null ; } return true ;
public class CompletionUtils { /** * Find exported variables from raw graph files ( including malformed ones ) . * * @ param appDirectory the application ' s directory ( can be null ) * @ return a non - null map of exported variables ( key = name , value = default value ) */ public static Map < String , String > findAllExportedVariables ( File appDirectory ) { } }
// TreeMap : keys are sorted alphabetically . Map < String , String > result = new TreeMap < > ( ) ; for ( RoboconfTypeBean type : findAllTypes ( appDirectory ) . values ( ) ) { if ( type . exportedVariables . size ( ) > 0 ) result . put ( type . getName ( ) + ".*" , IMPORT_ALL_THE_VARIABLES ) ; for ( Map . Entry < String , String > entry : type . exportedVariables . entrySet ( ) ) { String desc = resolveStringDescription ( entry . getKey ( ) , entry . getValue ( ) ) ; result . put ( entry . getKey ( ) , desc ) ; } } return result ;
public class ConnectionWriteCompletedCallback { /** * Switch the ' idle ' flag back to ' true ' , provided that there is no work available . * @ return true iff ' idle ' was set to ' true ' ; false iff there is now work available . * @ throws SIConnectionDroppedException */ private boolean switchToIdle ( ) throws SIConnectionDroppedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "switchToIdle" ) ; final boolean noMoreWork ; synchronized ( priorityQueue ) { synchronized ( this ) { noMoreWork = ! isWorkAvailable ( ) ; idle = noMoreWork ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "switchToIdle" , Boolean . valueOf ( noMoreWork ) ) ; return noMoreWork ;
public class LinkerDef { /** * Returns an array of active library sets for this linker definition . */ public LibrarySet [ ] getActiveLibrarySets ( final LinkerDef [ ] defaultProviders , final int index ) { } }
if ( isReference ( ) ) { return ( ( LinkerDef ) getCheckedRef ( LinkerDef . class , "LinkerDef" ) ) . getActiveUserLibrarySets ( defaultProviders , index ) ; } final Project p = getProject ( ) ; final Vector libsets = new Vector ( ) ; for ( int i = index ; i < defaultProviders . length ; i ++ ) { defaultProviders [ i ] . addActiveUserLibrarySets ( p , libsets ) ; } addActiveUserLibrarySets ( p , libsets ) ; for ( int i = index ; i < defaultProviders . length ; i ++ ) { defaultProviders [ i ] . addActiveSystemLibrarySets ( p , libsets ) ; } addActiveSystemLibrarySets ( p , libsets ) ; final LibrarySet [ ] sets = new LibrarySet [ libsets . size ( ) ] ; libsets . copyInto ( sets ) ; return sets ;
public class RaftAlgorithm { /** * Transition this server from { @ link Role # CANDIDATE } or { @ link Role # LEADER } to { @ link Role # FOLLOWER } . * < strong > This method is package - private for testing * reasons only ! < / strong > It should < strong > never < / strong > * be called in a non - test context ! * @ param newCurrentTerm new election term for this { @ code RaftAlgorithm } instance * @ param newLeader unique id of the leader server if known . { @ code null } otherwise */ synchronized void becomeFollower ( long newCurrentTerm , @ Nullable String newLeader ) throws StorageException { } }
long currentTerm = store . getCurrentTerm ( ) ; checkArgument ( currentTerm < newCurrentTerm , "currentTerm:%s newCurrentTerm:%s" , currentTerm , newCurrentTerm ) ; logRoleChange ( newCurrentTerm , role , Role . FOLLOWER ) ; stopHeartbeatTimeout ( ) ; store . setCurrentTerm ( newCurrentTerm ) ; setFollowerState ( newLeader ) ;
public class AESFastEngine { /** * Calculate the necessary round keys * The number of calculations depends on key size and block size * AES specified a fixed block size of 128 bits and key sizes 128/192/256 bits * This code is written assuming those are the only possible values */ private int [ ] [ ] generateWorkingKey ( byte [ ] key , boolean forEncryption ) { } }
int keyLen = key . length ; if ( keyLen < 16 || keyLen > 32 || ( keyLen & 7 ) != 0 ) { throw new IllegalArgumentException ( "Key length not 128/192/256 bits." ) ; } int KC = keyLen >>> 2 ; ROUNDS = KC + 6 ; // This is not always true for the generalized Rijndael that allows larger block sizes int [ ] [ ] W = new int [ ROUNDS + 1 ] [ 4 ] ; // 4 words in a block switch ( KC ) { case 4 : { int t0 = Pack . littleEndianToInt ( key , 0 ) ; W [ 0 ] [ 0 ] = t0 ; int t1 = Pack . littleEndianToInt ( key , 4 ) ; W [ 0 ] [ 1 ] = t1 ; int t2 = Pack . littleEndianToInt ( key , 8 ) ; W [ 0 ] [ 2 ] = t2 ; int t3 = Pack . littleEndianToInt ( key , 12 ) ; W [ 0 ] [ 3 ] = t3 ; for ( int i = 1 ; i <= 10 ; ++ i ) { int u = subWord ( shift ( t3 , 8 ) ) ^ rcon [ i - 1 ] ; t0 ^= u ; W [ i ] [ 0 ] = t0 ; t1 ^= t0 ; W [ i ] [ 1 ] = t1 ; t2 ^= t1 ; W [ i ] [ 2 ] = t2 ; t3 ^= t2 ; W [ i ] [ 3 ] = t3 ; } break ; } case 6 : { int t0 = Pack . littleEndianToInt ( key , 0 ) ; W [ 0 ] [ 0 ] = t0 ; int t1 = Pack . littleEndianToInt ( key , 4 ) ; W [ 0 ] [ 1 ] = t1 ; int t2 = Pack . littleEndianToInt ( key , 8 ) ; W [ 0 ] [ 2 ] = t2 ; int t3 = Pack . littleEndianToInt ( key , 12 ) ; W [ 0 ] [ 3 ] = t3 ; int t4 = Pack . littleEndianToInt ( key , 16 ) ; W [ 1 ] [ 0 ] = t4 ; int t5 = Pack . littleEndianToInt ( key , 20 ) ; W [ 1 ] [ 1 ] = t5 ; int rcon = 1 ; int u = subWord ( shift ( t5 , 8 ) ) ^ rcon ; rcon <<= 1 ; t0 ^= u ; W [ 1 ] [ 2 ] = t0 ; t1 ^= t0 ; W [ 1 ] [ 3 ] = t1 ; t2 ^= t1 ; W [ 2 ] [ 0 ] = t2 ; t3 ^= t2 ; W [ 2 ] [ 1 ] = t3 ; t4 ^= t3 ; W [ 2 ] [ 2 ] = t4 ; t5 ^= t4 ; W [ 2 ] [ 3 ] = t5 ; for ( int i = 3 ; i < 12 ; i += 3 ) { u = subWord ( shift ( t5 , 8 ) ) ^ rcon ; rcon <<= 1 ; t0 ^= u ; W [ i ] [ 0 ] = t0 ; t1 ^= t0 ; W [ i ] [ 1 ] = t1 ; t2 ^= t1 ; W [ i ] [ 2 ] = t2 ; t3 ^= t2 ; W [ i ] [ 3 ] = t3 ; t4 ^= t3 ; W [ i + 1 ] [ 0 ] = t4 ; t5 ^= t4 ; W [ i + 1 ] [ 1 ] = t5 ; u = subWord ( shift ( t5 , 8 ) ) ^ rcon ; rcon <<= 1 ; t0 ^= u ; W [ i + 1 ] [ 2 ] = t0 ; t1 ^= t0 ; W [ i + 1 ] [ 3 ] = t1 ; t2 ^= t1 ; W [ i + 2 ] [ 0 ] = t2 ; t3 ^= t2 ; W [ i + 2 ] [ 1 ] = t3 ; t4 ^= t3 ; W [ i + 2 ] [ 2 ] = t4 ; t5 ^= t4 ; W [ i + 2 ] [ 3 ] = t5 ; } u = subWord ( shift ( t5 , 8 ) ) ^ rcon ; t0 ^= u ; W [ 12 ] [ 0 ] = t0 ; t1 ^= t0 ; W [ 12 ] [ 1 ] = t1 ; t2 ^= t1 ; W [ 12 ] [ 2 ] = t2 ; t3 ^= t2 ; W [ 12 ] [ 3 ] = t3 ; break ; } case 8 : { int t0 = Pack . littleEndianToInt ( key , 0 ) ; W [ 0 ] [ 0 ] = t0 ; int t1 = Pack . littleEndianToInt ( key , 4 ) ; W [ 0 ] [ 1 ] = t1 ; int t2 = Pack . littleEndianToInt ( key , 8 ) ; W [ 0 ] [ 2 ] = t2 ; int t3 = Pack . littleEndianToInt ( key , 12 ) ; W [ 0 ] [ 3 ] = t3 ; int t4 = Pack . littleEndianToInt ( key , 16 ) ; W [ 1 ] [ 0 ] = t4 ; int t5 = Pack . littleEndianToInt ( key , 20 ) ; W [ 1 ] [ 1 ] = t5 ; int t6 = Pack . littleEndianToInt ( key , 24 ) ; W [ 1 ] [ 2 ] = t6 ; int t7 = Pack . littleEndianToInt ( key , 28 ) ; W [ 1 ] [ 3 ] = t7 ; int u , rcon = 1 ; for ( int i = 2 ; i < 14 ; i += 2 ) { u = subWord ( shift ( t7 , 8 ) ) ^ rcon ; rcon <<= 1 ; t0 ^= u ; W [ i ] [ 0 ] = t0 ; t1 ^= t0 ; W [ i ] [ 1 ] = t1 ; t2 ^= t1 ; W [ i ] [ 2 ] = t2 ; t3 ^= t2 ; W [ i ] [ 3 ] = t3 ; u = subWord ( t3 ) ; t4 ^= u ; W [ i + 1 ] [ 0 ] = t4 ; t5 ^= t4 ; W [ i + 1 ] [ 1 ] = t5 ; t6 ^= t5 ; W [ i + 1 ] [ 2 ] = t6 ; t7 ^= t6 ; W [ i + 1 ] [ 3 ] = t7 ; } u = subWord ( shift ( t7 , 8 ) ) ^ rcon ; t0 ^= u ; W [ 14 ] [ 0 ] = t0 ; t1 ^= t0 ; W [ 14 ] [ 1 ] = t1 ; t2 ^= t1 ; W [ 14 ] [ 2 ] = t2 ; t3 ^= t2 ; W [ 14 ] [ 3 ] = t3 ; break ; } default : { throw new IllegalStateException ( "Should never get here" ) ; } } if ( ! forEncryption ) { for ( int j = 1 ; j < ROUNDS ; j ++ ) { for ( int i = 0 ; i < 4 ; i ++ ) { W [ j ] [ i ] = inv_mcol ( W [ j ] [ i ] ) ; } } } return W ;
public class RegExAnnotator { /** * Performs any startup tasks required by this annotator . This implementation reads the * configuration parameters and compiles the regular expressions . */ public void initialize ( UimaContext aContext ) throws ResourceInitializationException { } }
super . initialize ( aContext ) ; // initialize annotator logger this . logger = getContext ( ) . getLogger ( ) ; // default initialization for number format this . floatNumberFormat = NumberFormat . getNumberInstance ( ) ; this . integerNumberFormat = NumberFormat . getIntegerInstance ( ) ; // create a concept file parser object ConceptFileParser parser = new ConceptFileParser_impl ( ) ; // get UIMA datapath and tokenize it into its elements StringTokenizer tokenizer = new StringTokenizer ( getContext ( ) . getDataPath ( ) , PATH_SEPARATOR ) ; ArrayList < File > datapathElements = new ArrayList < File > ( ) ; while ( tokenizer . hasMoreTokens ( ) ) { // add datapath elements to the ' datapathElements ' array list datapathElements . add ( new File ( tokenizer . nextToken ( ) ) ) ; } // try to resolve the concept file names ArrayList < Concept > concepts = new ArrayList < Concept > ( ) ; for ( int i = 0 ; i < conceptFileNames . length ; i ++ ) { // try to resolve the relative file name with classpath or datapath String filename = conceptFileNames [ i ] ; List < ConceptFile > cfList = new ArrayList < ConceptFile > ( ) ; if ( containsWildcardChar ( filename ) ) { resolveRelativeWildcardFilePath ( filename , datapathElements , cfList ) ; } else { ConceptFile file = resolveRelativeFilePath ( filename , datapathElements ) ; // if the current concept file wasn ' t found , throw an exception if ( file == null ) { throw new RegexAnnotatorConfigException ( "regex_annotator_resource_not_found" , new Object [ ] { conceptFileNames [ i ] } ) ; } cfList . add ( file ) ; // log concept file path this . logger . logrb ( Level . CONFIG , "RegExAnnotator" , "initialize" , MESSAGE_DIGEST , "regex_annotator_rule_set_file" , new Object [ ] { file . getFilePath ( ) } ) ; } for ( ConceptFile file : cfList ) { // parse concept file to internal objects Concept [ ] currentConcepts = parser . parseConceptFile ( file . getFilePath ( ) , file . getStream ( ) ) ; try { file . getStream ( ) . close ( ) ; } catch ( IOException e ) { this . logger . logrb ( Level . WARNING , "RegExAnnotator" , "initialize" , MESSAGE_DIGEST , "regex_annotator_error_closing_input_stream" , new Object [ ] { file . getFilePath ( ) , e . getMessage ( ) } ) ; } // add all concepts to the concepts list for ( int c = 0 ; c < currentConcepts . length ; c ++ ) { concepts . add ( currentConcepts [ c ] ) ; } } } // get one array that contains all the concepts this . regexConcepts = concepts . toArray ( new Concept [ ] { } ) ; // check duplicate concept names HashSet < String > conceptNames = new HashSet < String > ( this . regexConcepts . length ) ; for ( int i = 0 ; i < this . regexConcepts . length ; i ++ ) { String name = this . regexConcepts [ i ] . getName ( ) ; // check if concept name was set , if not , skip concept if ( name == null ) { continue ; } // concept name was set , check for duplicate concept names // duplicate concept names can occurs , just log a warning ! if ( conceptNames . contains ( name ) ) { this . logger . logrb ( Level . WARNING , "RegExAnnotator" , "initialize" , MESSAGE_DIGEST , "regex_annotator_warning_duplicate_concept_name" , new Object [ ] { name } ) ; } else { // add concept name to the concept name list conceptNames . add ( name ) ; } } // initialize the regex concepts for ( int i = 0 ; i < this . regexConcepts . length ; i ++ ) { ( ( Concept_impl ) this . regexConcepts [ i ] ) . initialize ( this . logger ) ; }
public class Annotate { /** * Attribute and store a semantic representation of the annotation tree { @ code tree } into the * tree . attribute field . * @ param tree the tree representing an annotation * @ param expectedAnnotationType the expected ( super ) type of the annotation * @ param env the current env in where the annotation instance is found */ public Attribute . Compound attributeAnnotation ( JCAnnotation tree , Type expectedAnnotationType , Env < AttrContext > env ) { } }
// The attribute might have been entered if it is Target or Repetable // Because TreeCopier does not copy type , redo this if type is null if ( tree . attribute != null && tree . type != null ) return tree . attribute ; List < Pair < MethodSymbol , Attribute > > elems = attributeAnnotationValues ( tree , expectedAnnotationType , env ) ; Attribute . Compound ac = new Attribute . Compound ( tree . type , elems ) ; return tree . attribute = ac ;
public class GraphObjectModificationState { /** * Update * relationship * changelog for Verb . create */ public void updateChangeLog ( final Principal user , final Verb verb , final String linkType , final String linkId , final String sourceUuid , final String targetUuid ) { } }
if ( ( Settings . ChangelogEnabled . getValue ( ) || Settings . UserChangelogEnabled . getValue ( ) ) ) { final JsonObject obj = new JsonObject ( ) ; obj . add ( "time" , toElement ( System . currentTimeMillis ( ) ) ) ; obj . add ( "userId" , toElement ( user . getUuid ( ) ) ) ; obj . add ( "userName" , toElement ( user . getName ( ) ) ) ; obj . add ( "verb" , toElement ( verb ) ) ; obj . add ( "rel" , toElement ( linkType ) ) ; obj . add ( "relId" , toElement ( linkId ) ) ; obj . add ( "source" , toElement ( sourceUuid ) ) ; obj . add ( "target" , toElement ( targetUuid ) ) ; if ( Settings . ChangelogEnabled . getValue ( ) ) { changeLog . append ( obj . toString ( ) ) ; changeLog . append ( "\n" ) ; } if ( Settings . UserChangelogEnabled . getValue ( ) ) { // remove user for user - centric logging to reduce redundancy obj . remove ( "userId" ) ; obj . remove ( "userName" ) ; appendUserChangelog ( user . getUuid ( ) , obj . toString ( ) ) ; } }
public class SftpClient { /** * Create an OutputStream for writing to a remote file . * @ param remotefile * @ return OutputStream * @ throws SftpStatusException * @ throws SshException */ public OutputStream getOutputStream ( String remotefile ) throws SftpStatusException , SshException { } }
String remotePath = resolveRemotePath ( remotefile ) ; return new SftpFileOutputStream ( sftp . openFile ( remotePath , SftpSubsystemChannel . OPEN_CREATE | SftpSubsystemChannel . OPEN_TRUNCATE | SftpSubsystemChannel . OPEN_WRITE ) ) ;
public class CentroidCluster { /** * { @ inheritDoc } */ public void merge ( Cluster < T > other ) { } }
VectorMath . add ( centroid , other . centroid ( ) ) ; for ( T otherDataPoint : other . dataPointValues ( ) ) VectorMath . add ( centroid , otherDataPoint ) ; for ( int i = other . dataPointIds ( ) . nextSetBit ( 0 ) ; i >= 0 ; i = other . dataPointIds ( ) . nextSetBit ( i + 1 ) ) assignments . set ( i ) ;
public class VideoDevice { /** * This method returns a { @ link RGBFrameGrabber } associated with this * video device . Captured frames will be RGB24 - encoded before being handed * out . The video device must support an appropriate image format that v4l4j * can convert to RGB24 . If it does not , this method will throw an * { @ link ImageFormatException } . To check if RGB24 - encoding is possible , * call { @ link # supportRGBConversion ( ) } . Among all the image formats the * video device supports , v4l4j will choose the first one that can be RGB24 * encoded . If you prefer to specify which image format is to be used , call * { @ link # getRGBFrameGrabber ( int , int , int , int , ImageFormat ) } instead . * This is sometimes required because some video device have a lower frame * rate with some image formats , and a higher one with others . So far , * testing is the only way to find out . The returned { @ link RGBFrameGrabber } * must be released when no longer used by calling * { @ link # releaseFrameGrabber ( ) } . < br > < b > If RGBFrameGrabbers cannot be * created for your video device , please let the author know about it so * RGB24 - encoding can be added . See the README file on how to submit * reports . < / b > * @ param w the desired frame width . This value may be adjusted to the * closest supported by hardware . * @ param h the desired frame height . This value may be adjusted to the * closest supported by hardware . * @ param input the input index , as returned by { @ link InputInfo # getIndex ( ) } * @ param std the video standard , as returned by * { @ link InputInfo # getSupportedStandards ( ) } ( see { @ link V4L4JConstants } ) . * @ return a { @ link RGBFrameGrabber } associated with this video device , if * supported . * @ throws VideoStandardException if the chosen video standard is not * supported * @ throws ImageFormatException if this video device does not have an image * format that can be RGB24 - encoded . < b > If you encounter such device , please * let the author know so support for it can be added . See the README file * on how to submit reports . < / b > * @ throws CaptureChannelException if the given channel number value is not * valid * @ throws ImageDimensionException if the given image dimensions are not * supported * @ throws InitialisationException if the video device file can not be * initialised * @ throws V4L4JException if there is an error applying capture parameters * @ throws StateException if a { @ link FrameGrabber } already exists and must * be released before a RGBFrameGrabber can be allocated , or if the * < code > VideoDevice < / code > has been released . */ public RGBFrameGrabber getRGBFrameGrabber ( int w , int h , int input , int std ) throws V4L4JException { } }
return getRGBFrameGrabber ( w , h , input , std , null ) ;
public class PeasyRecyclerView { /** * Remove all added { @ link RecyclerView . ItemDecoration } */ public final void resetItemDecorations ( ) { } }
for ( int i = 0 ; i < getRecyclerView ( ) . getItemDecorationCount ( ) ; i ++ ) { try { getRecyclerView ( ) . removeItemDecoration ( getRecyclerView ( ) . getItemDecorationAt ( i ) ) ; } catch ( Exception ignored ) { } }
public class JFapUtils { /** * Copies up to ' amount ' bytes from ' src ' to ' dst ' . Returns the number of bytes * actually copied . */ public static int copyWsByteBuffer ( WsByteBuffer src , WsByteBuffer dst , int amount ) { } }
int amountCopied = amount ; int dstRemaining = dst . remaining ( ) ; int srcRemaining = src . remaining ( ) ; if ( amountCopied > dstRemaining ) amountCopied = dstRemaining ; if ( amountCopied > srcRemaining ) amountCopied = srcRemaining ; if ( amountCopied > 0 ) { int srcLimit = src . limit ( ) ; src . limit ( src . position ( ) + amountCopied ) ; dst . put ( src ) ; src . limit ( srcLimit ) ; } return amountCopied ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIDEStructureFLAGS ( ) { } }
if ( ideStructureFLAGSEEnum == null ) { ideStructureFLAGSEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 119 ) ; } return ideStructureFLAGSEEnum ;
public class ContentSpec { /** * Set the publication number for the Content Specification . * @ param pubsNumber The publication number . */ public void setPubsNumber ( final Integer pubsNumber ) { } }
if ( pubsNumber == null && this . pubsNumber == null ) { return ; } else if ( pubsNumber == null ) { removeChild ( this . pubsNumber ) ; this . pubsNumber = null ; } else if ( this . pubsNumber == null ) { this . pubsNumber = new KeyValueNode < Integer > ( CommonConstants . CS_PUBSNUMBER_TITLE , pubsNumber ) ; appendChild ( this . pubsNumber , false ) ; } else { this . pubsNumber . setValue ( pubsNumber ) ; }
public class QYTagAPI { /** * 增加标签成员 。 userlist与partylist非必须 , 但不能同时为空 * @ param tagid 目标标签id 。 必填 * @ param users 企业成员ID列表 。 单次请求长度不能超过1000 * @ param partys 企业部门ID列表 。 单次请求长度不能超过100 * @ return 操作结果 */ public AddTagUsersResponse addTagUsers ( Integer tagid , List < String > users , List < Integer > partys ) { } }
BeanUtil . requireNonNull ( tagid , "tagid不能为空!" ) ; if ( ( users == null || users . size ( ) == 0 ) && ( partys == null || partys . size ( ) == 0 ) ) { throw new WeixinException ( "userlist、partylist不能同时为空!" ) ; } if ( users != null && users . size ( ) > 1000 ) { throw new WeixinException ( "userlist单次请求长度不能大于1000" ) ; } if ( partys != null && partys . size ( ) > 100 ) { throw new WeixinException ( "partylist单次请求长度不能大于100" ) ; } AddTagUsersResponse response ; String url = BASE_API_URL + "cgi-bin/tag/addtagusers?access_token=#" ; final Map < String , Object > params = new HashMap < String , Object > ( ) ; params . put ( "tagid" , tagid ) ; params . put ( "userlist" , users ) ; params . put ( "partylist" , partys ) ; BaseResponse r = executePost ( url , JSONUtil . toJson ( params ) ) ; String jsonResult = isSuccess ( r . getErrcode ( ) ) ? r . getErrmsg ( ) : r . toJsonString ( ) ; response = JSONUtil . toBean ( jsonResult , AddTagUsersResponse . class ) ; return response ;
public class Metrics { /** * Creates a new counter for a particular class and method for a specific application . * @ param appid the application that invoked the request * @ param clazz the Class to be counted * @ param names one or more unique names to identify the counter - usually a method name * @ return a counter */ public static Counter counter ( String appid , Class < ? > clazz , String ... names ) { } }
String className = getClassName ( clazz ) ; return getCounter ( App . isRoot ( appid ) ? SYSTEM_METRICS_NAME : appid , className , names ) ;
public class ExpressionResolver { /** * Resolve property of bean . * @ param object * Bean . * @ param propertyNames * Names of properties to resolve recursively . * @ return Value of property . */ public Object resolveProperty ( Object object , List < String > propertyNames ) { } }
// Always wrap base object . Object value = resolver . wrap ( object ) ; for ( String propertyName : propertyNames ) { if ( value == null ) { return null ; } value = resolver . getValue ( elContext , value , propertyName ) ; } return value ;
public class EvaluatorRestartInfo { /** * Creates an { @ link EvaluatorRestartInfo } object that represents the information of an evaluator that * has failed on driver restart . */ public static EvaluatorRestartInfo createFailedEvaluatorInfo ( final String evaluatorId ) { } }
final ResourceRecoverEvent resourceRecoverEvent = ResourceEventImpl . newRecoveryBuilder ( ) . setIdentifier ( evaluatorId ) . build ( ) ; return new EvaluatorRestartInfo ( resourceRecoverEvent , EvaluatorRestartState . FAILED ) ;
public class ComputationGraph { /** * Generate the output for all examples / batches in the input iterator , and concatenate them into a single array . * Can only be used with ComputationGraphs with 1 output * @ param iterator Data to pass through the network * @ return output for all examples in the iterator */ public INDArray outputSingle ( MultiDataSetIterator iterator ) { } }
Preconditions . checkArgument ( numOutputArrays == 1 , "Cannot use this method with nets that have more" + " than 1 output array. This network has %s outputs" , numOutputArrays ) ; return output ( iterator ) [ 0 ] ;
public class FSNamesystem { /** * Remove an already decommissioned data node who is neither in include nor * exclude hosts lists from the the list of live or dead nodes . This is used * to not display an already decommssioned data node to the operators . * The operation procedure of making a already decommissioned data node not * to be displayed is as following : * < ol > * < li > * Host must have been in the include hosts list and the include hosts list * must not be empty . * < / li > * < li > * Host is decommissioned by remaining in the include hosts list and added * into the exclude hosts list . Name node is updated with the new * information by issuing dfsadmin - refreshNodes command . * < / li > * < li > * Host is removed from both include hosts and exclude hosts lists . Name * node is updated with the new informationby issuing dfsamin - refreshNodes * command . * < li > * < / ol > * @ param nodeList * , array list of live or dead nodes . */ void removeDecommissionedNodeFromList ( ArrayList < DatanodeDescriptor > nodeList ) { } }
// If the include list is empty , any nodes are welcomed and it does not // make sense to exclude any nodes from the cluster . Therefore , no remove . if ( hostsReader . getHosts ( ) . isEmpty ( ) ) { return ; } for ( Iterator < DatanodeDescriptor > it = nodeList . iterator ( ) ; it . hasNext ( ) ; ) { DatanodeDescriptor node = it . next ( ) ; if ( ( ! inHostsList ( node , null ) ) && ( ! inExcludedHostsList ( node , null ) ) && node . isDecommissioned ( ) ) { // Include list is not empty , an existing datanode does not appear // in both include or exclude lists and it has been decommissioned . // Remove it from the node list . it . remove ( ) ; } }
public class PreconditionUtil { /** * Asserts that a condition is true . If it isn ' t it throws an * { @ link AssertionError } with the given message . * @ param message the identifying message for the { @ link AssertionError } ( * < code > null < / code > okay ) * @ param condition condition to be checked */ public static void assertFalse ( boolean condition , String message , Object ... args ) { } }
verify ( ! condition , message , args ) ;
public class FastItemAdapter { /** * add an array of items at the given position within the existing items * @ param position the global position * @ param items the items to add */ @ SafeVarargs public final FastItemAdapter < Item > add ( int position , Item ... items ) { } }
getItemAdapter ( ) . add ( position , items ) ; return this ;
public class BigSegmentHeader { /** * Use a timer which is more insensitive to jumps in time like GCs and context switches . */ private static boolean tryWriteLockMillis ( long address , long timeInMillis , boolean interruptible ) throws InterruptedException { } }
long lastTime = System . currentTimeMillis ( ) ; registerWait ( address ) ; try { do { if ( LOCK . tryWriteLockAndDeregisterWait ( A , null , address + LOCK_OFFSET ) ) return true ; checkInterrupted ( interruptible ) ; ThreadHints . onSpinWait ( ) ; long now = System . currentTimeMillis ( ) ; if ( now != lastTime ) { lastTime = now ; timeInMillis -- ; } } while ( timeInMillis >= 0 ) ; deregisterWait ( address ) ; return false ; } catch ( Throwable t ) { throw tryDeregisterWaitAndRethrow ( address , t ) ; }
public class ConsulClient { /** * Health */ @ Override public Response < List < com . ecwid . consul . v1 . health . model . Check > > getHealthChecksForNode ( String nodeName , QueryParams queryParams ) { } }
return healthClient . getHealthChecksForNode ( nodeName , queryParams ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcCurveInterpolationEnum ( ) { } }
if ( ifcCurveInterpolationEnumEEnum == null ) { ifcCurveInterpolationEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 954 ) ; } return ifcCurveInterpolationEnumEEnum ;
public class ProcessEngineConfigurationImpl { /** * services / / / / / */ protected void initServices ( ) { } }
initService ( repositoryService ) ; initService ( runtimeService ) ; initService ( historyService ) ; initService ( identityService ) ; initService ( taskService ) ; initService ( formService ) ; initService ( managementService ) ; initService ( authorizationService ) ; initService ( caseService ) ; initService ( filterService ) ; initService ( externalTaskService ) ; initService ( decisionService ) ; initService ( optimizeService ) ;
public class JettyBootstrap { /** * Add Handler * @ param handler * Jetty Handler * @ return Handler * @ throws JettyBootstrapException * on failed */ public Handler addHandler ( Handler handler ) throws JettyBootstrapException { } }
JettyHandler jettyHandler = new JettyHandler ( ) ; jettyHandler . setHandler ( handler ) ; handlers . addHandler ( handler ) ; return handler ;
public class AntRunBuilder { /** * Multiple build target names to call . * @ param targets */ public AntRunBuilder targets ( String ... targets ) { } }
action . setTargets ( StringUtils . collectionToCommaDelimitedString ( Arrays . asList ( targets ) ) ) ; return this ;
public class ByteVector { /** * Puts an UTF8 string into this byte vector . The byte vector is automatically enlarged if * necessary . The string length is encoded in two bytes before the encoded characters , if there is * space for that ( i . e . if this . length - offset - 2 & gt ; = 0 ) . * @ param stringValue the String to encode . * @ param offset the index of the first character to encode . The previous characters are supposed * to have already been encoded , using only one byte per character . * @ param maxByteLength the maximum byte length of the encoded string , including the already * encoded characters . * @ return this byte vector . */ final ByteVector encodeUTF8 ( final String stringValue , final int offset , final int maxByteLength ) { } }
int charLength = stringValue . length ( ) ; int byteLength = offset ; for ( int i = offset ; i < charLength ; ++ i ) { char charValue = stringValue . charAt ( i ) ; if ( charValue >= '\u0001' && charValue <= '\u007F' ) { byteLength ++ ; } else if ( charValue <= '\u07FF' ) { byteLength += 2 ; } else { byteLength += 3 ; } } if ( byteLength > maxByteLength ) { throw new IllegalArgumentException ( ) ; } // Compute where ' byteLength ' must be stored in ' data ' , and store it at this location . int byteLengthOffset = length - offset - 2 ; if ( byteLengthOffset >= 0 ) { data [ byteLengthOffset ] = ( byte ) ( byteLength >>> 8 ) ; data [ byteLengthOffset + 1 ] = ( byte ) byteLength ; } if ( length + byteLength - offset > data . length ) { enlarge ( byteLength - offset ) ; } int currentLength = length ; for ( int i = offset ; i < charLength ; ++ i ) { char charValue = stringValue . charAt ( i ) ; if ( charValue >= '\u0001' && charValue <= '\u007F' ) { data [ currentLength ++ ] = ( byte ) charValue ; } else if ( charValue <= '\u07FF' ) { data [ currentLength ++ ] = ( byte ) ( 0xC0 | charValue >> 6 & 0x1F ) ; data [ currentLength ++ ] = ( byte ) ( 0x80 | charValue & 0x3F ) ; } else { data [ currentLength ++ ] = ( byte ) ( 0xE0 | charValue >> 12 & 0xF ) ; data [ currentLength ++ ] = ( byte ) ( 0x80 | charValue >> 6 & 0x3F ) ; data [ currentLength ++ ] = ( byte ) ( 0x80 | charValue & 0x3F ) ; } } length = currentLength ; return this ;
public class SamlSettingsApi { /** * Upload new metadata . * Adds or updates the specified metadata . * @ param metadataFile The metadata as xml file . ( optional ) * @ param location The region where send metadata . ( optional ) * @ return ApiResponse & lt ; SendMetadataResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < SendMetadataResponse > sendMetadataWithHttpInfo ( File metadataFile , String location ) throws ApiException { } }
com . squareup . okhttp . Call call = sendMetadataValidateBeforeCall ( metadataFile , location , null , null ) ; Type localVarReturnType = new TypeToken < SendMetadataResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class WordRefiner { /** * 将一个词拆分成几个 , 返回null表示不能拆分 * @ param word * @ return */ public static List < Word > split ( Word word ) { } }
String value = GENERIC_TRIE . get ( word . getText ( ) ) ; if ( value == null ) { return null ; } List < Word > words = new ArrayList < > ( ) ; for ( String val : value . split ( "\\s+" ) ) { words . add ( new Word ( val ) ) ; } if ( words . isEmpty ( ) ) { return null ; } return words ;
public class ContentTypeHelper { /** * Check whether the given content - type corresponds to " parseable " text . * @ param contentType * the input content - type * @ return true if this represents text or false if not */ public boolean isTextContentType ( String contentType ) { } }
if ( contentType != null ) { String lowerContentType = contentType . toLowerCase ( ) ; for ( String textContentType : this . parsableContentTypes ) { if ( lowerContentType . startsWith ( textContentType ) ) { return true ; } } } return false ;
public class Cluster { /** * Free the specified slot . */ public void freeSlot ( WorkerSlot slot ) { } }
// remove the slot from the existing assignments for ( SchedulerAssignmentImpl assignment : this . assignments . values ( ) ) { if ( assignment . isSlotOccupied ( slot ) ) { assignment . unassignBySlot ( slot ) ; } }
public class GetCurrentMetricDataRequest { /** * The grouping applied to the metrics returned . For example , when grouped by QUEUE , the metrics returned apply to * each queue rather than aggregated for all queues . If you group by CHANNEL , you should include a Channels filter . * The only supported channel is VOICE . * If no < code > Grouping < / code > is included in the request , a summary of < code > CurrentMetrics < / code > is returned . * @ param groupings * The grouping applied to the metrics returned . For example , when grouped by QUEUE , the metrics returned * apply to each queue rather than aggregated for all queues . If you group by CHANNEL , you should include a * Channels filter . The only supported channel is VOICE . < / p > * If no < code > Grouping < / code > is included in the request , a summary of < code > CurrentMetrics < / code > is * returned . * @ return Returns a reference to this object so that method calls can be chained together . * @ see Grouping */ public GetCurrentMetricDataRequest withGroupings ( Grouping ... groupings ) { } }
java . util . ArrayList < String > groupingsCopy = new java . util . ArrayList < String > ( groupings . length ) ; for ( Grouping value : groupings ) { groupingsCopy . add ( value . toString ( ) ) ; } if ( getGroupings ( ) == null ) { setGroupings ( groupingsCopy ) ; } else { getGroupings ( ) . addAll ( groupingsCopy ) ; } return this ;
public class AbstractSchemaGenerator { /** * Actual schema generation . It recursively resolves container types . * @ param typeToken Encapsulate the Java type for generating a { @ link Schema } . * @ param knownRecords Set of record names that has the schema already generated . It is used for * recursive class field references . * @ param acceptRecursion Whether to tolerate type recursion . If false , will throw UnsupportedTypeException if * a recursive type is encountered . * @ return A { @ link Schema } representing the given java { @ link java . lang . reflect . Type } . * @ throws UnsupportedTypeException Indicates schema generation is not support for the * given java { @ link java . lang . reflect . Type } . */ @ SuppressWarnings ( "unchecked" ) protected final Schema doGenerate ( TypeToken < ? > typeToken , Set < String > knownRecords , boolean acceptRecursion ) throws UnsupportedTypeException { } }
Type type = typeToken . getType ( ) ; Class < ? > rawType = typeToken . getRawType ( ) ; if ( SIMPLE_SCHEMAS . containsKey ( rawType ) ) { return SIMPLE_SCHEMAS . get ( rawType ) ; } // Enum type , simply use all the enum constants for ENUM schema . if ( rawType . isEnum ( ) ) { return Schema . enumWith ( ( Class < Enum < ? > > ) rawType ) ; } // Java array , use ARRAY schema . if ( rawType . isArray ( ) ) { Schema componentSchema = doGenerate ( TypeToken . of ( rawType . getComponentType ( ) ) , knownRecords , acceptRecursion ) ; if ( rawType . getComponentType ( ) . isPrimitive ( ) ) { return Schema . arrayOf ( componentSchema ) ; } return Schema . arrayOf ( Schema . unionOf ( componentSchema , Schema . of ( Schema . Type . NULL ) ) ) ; } if ( ! ( type instanceof Class || type instanceof ParameterizedType ) ) { throw new UnsupportedTypeException ( "Type " + type + " is not supported. " + "Only Class or ParameterizedType are supported." ) ; } // Any parameterized Collection class would be represented by ARRAY schema . if ( Collection . class . isAssignableFrom ( rawType ) ) { if ( ! ( type instanceof ParameterizedType ) ) { throw new UnsupportedTypeException ( "Only supports parameterized Collection type." ) ; } TypeToken < ? > componentType = typeToken . resolveType ( ( ( ParameterizedType ) type ) . getActualTypeArguments ( ) [ 0 ] ) ; Schema componentSchema = doGenerate ( componentType , knownRecords , acceptRecursion ) ; return Schema . arrayOf ( Schema . unionOf ( componentSchema , Schema . of ( Schema . Type . NULL ) ) ) ; } // Java Map , use MAP schema . if ( Map . class . isAssignableFrom ( rawType ) ) { if ( ! ( type instanceof ParameterizedType ) ) { throw new UnsupportedTypeException ( "Only supports parameterized Map type." ) ; } Type [ ] typeArgs = ( ( ParameterizedType ) type ) . getActualTypeArguments ( ) ; TypeToken < ? > keyType = typeToken . resolveType ( typeArgs [ 0 ] ) ; TypeToken < ? > valueType = typeToken . resolveType ( typeArgs [ 1 ] ) ; Schema valueSchema = doGenerate ( valueType , knownRecords , acceptRecursion ) ; return Schema . mapOf ( doGenerate ( keyType , knownRecords , acceptRecursion ) , Schema . unionOf ( valueSchema , Schema . of ( Schema . Type . NULL ) ) ) ; } // Any Java class , class name as the record name . String recordName = typeToken . getRawType ( ) . getName ( ) ; if ( knownRecords . contains ( recordName ) ) { // Record already seen before if ( acceptRecursion ) { // simply create a reference RECORD schema by the name . return Schema . recordOf ( recordName ) ; } else { throw new UnsupportedTypeException ( "Recursive type not supported for class " + recordName ) ; } } // Delegate to child class to generate RECORD schema . return generateRecord ( typeToken , knownRecords , acceptRecursion ) ;
public class ResponseToRoomImpl { /** * / * ( non - Javadoc ) * @ see com . tvd12 . ezyfox . core . command . ResponseToRoom # ignore ( java . lang . String [ ] ) */ @ Override public ResponseToRoom ignore ( String ... params ) { } }
excludedVars . addAll ( Arrays . asList ( params ) ) ; return this ;
public class SQLDatabaseWriter { /** * This method will process the given DiffTask and send it to the specified * output . * @ param task * DiffTask * @ throws ConfigurationException * if problems occurred while initializing the components * @ throws IOException * if problems occurred while writing the output ( to file or * archive ) * @ throws SQLConsumerException * if problems occurred while writing the output ( to the sql * producer database ) */ @ Override public void process ( final Task < Diff > task ) throws ConfigurationException , IOException , SQLConsumerException { } }
int i = - 1 ; SQLEncoding [ ] queries = null ; try { queries = sqlEncoder . encodeTask ( task ) ; Statement query ; int size = queries . length ; for ( i = 0 ; i < size ; i ++ ) { query = connection . createStatement ( ) ; query . executeUpdate ( queries [ i ] . getQuery ( ) ) ; query . close ( ) ; } // System . out . println ( task . toString ( ) ) ; } catch ( SQLException e ) { String q ; if ( queries == null || queries . length <= i || queries [ i ] == null ) { q = "<unidentified query>" ; } else { q = queries [ i ] . toString ( ) ; } throw ErrorFactory . createSQLConsumerException ( ErrorKeys . DIFFTOOL_SQLCONSUMER_DATABASEWRITER_EXCEPTION , q , e ) ; } catch ( DecodingException e ) { throw ErrorFactory . createSQLConsumerException ( ErrorKeys . DIFFTOOL_SQLCONSUMER_DATABASEWRITER_EXCEPTION , e ) ; } catch ( EncodingException e ) { throw ErrorFactory . createSQLConsumerException ( ErrorKeys . DIFFTOOL_SQLCONSUMER_FILEWRITER_EXCEPTION , e ) ; }
public class aaauser_vpnintranetapplication_binding { /** * Use this API to fetch aaauser _ vpnintranetapplication _ binding resources of given name . */ public static aaauser_vpnintranetapplication_binding [ ] get ( nitro_service service , String username ) throws Exception { } }
aaauser_vpnintranetapplication_binding obj = new aaauser_vpnintranetapplication_binding ( ) ; obj . set_username ( username ) ; aaauser_vpnintranetapplication_binding response [ ] = ( aaauser_vpnintranetapplication_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class ProxyQueueConversationGroupImpl { /** * Creates a new asynchronous consumer proxy queue for this group . * @ throws SIResourceException * @ throws SIResourceException * @ throws SIIncorrectCallException * @ throws SIIncorrectCallException * @ throws SIErrorException * @ throws SIConnectionUnavailableException * @ throws SISessionUnavailableException * @ throws SIConnectionDroppedException * @ throws SISessionDroppedException * @ see com . ibm . ws . sib . comms . client . proxyqueue . ProxyQueueConversationGroup # createAsynchConsumerProxyQueue ( ) */ public synchronized AsynchConsumerProxyQueue createAsynchConsumerProxyQueue ( OrderingContext oc ) throws SIResourceException , SIIncorrectCallException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createAsynchConsumerProxyQueue" ) ; short id = nextId ( ) ; // begin D249096 AsynchConsumerProxyQueue proxyQueue = null ; if ( oc == null ) { proxyQueue = new NonReadAheadSessionProxyQueueImpl ( this , id , conversation ) ; } else { proxyQueue = new OrderedSessionProxyQueueImpl ( this , id , conversation , oc ) ; } // end D249096 idToProxyQueueMap . put ( new ImmutableId ( id ) , proxyQueue ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createAsynchConsumerProxyQueue" , proxyQueue ) ; return proxyQueue ;
public class ExternalServiceAlertConditionService { /** * Returns the set of alert conditions for the given policy id and name . * @ param policyId The id of the alert policy to return the conditions for * @ param name The name of the conditions * @ return The set of alert conditions */ public Collection < ExternalServiceAlertCondition > list ( long policyId , String name ) { } }
List < ExternalServiceAlertCondition > ret = new ArrayList < ExternalServiceAlertCondition > ( ) ; Collection < ExternalServiceAlertCondition > conditions = list ( policyId ) ; for ( ExternalServiceAlertCondition condition : conditions ) { if ( condition . getName ( ) . equals ( name ) ) ret . add ( condition ) ; } return ret ;
public class FeatureWebSecurityConfigImpl { /** * { @ inheritDoc } */ @ Override public boolean getSSORequiresSSL ( ) { } }
WebAppSecurityConfig globalConfig = WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) ; if ( globalConfig != null ) return WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) . getSSORequiresSSL ( ) ; else return ssoRequiresSSL ;
public class HelpTopic { /** * Returns true if the topic is considered a duplicate . * @ param topic Topic to compare . * @ return True if a duplicate . */ public boolean isDuplicate ( HelpTopic topic ) { } }
return ObjectUtils . equals ( url , topic . url ) && compareTo ( topic ) == 0 ;
public class CharacterParser { public boolean isPunctuator ( Character character ) { } }
Integer code = Character . codePointAt ( character . toString ( ) , 0 ) ; switch ( code ) { case 46 : // . dot case 40 : // ( open bracket case 41 : // ) close bracket case 59 : // ; semicolon case 44 : // , comma case 123 : // { open curly brace case 125 : // } close curly brace case 91 : case 93 : case 58 : case 63 : case 126 : case 37 : case 38 : case 42 : case 43 : case 45 : case 47 : case 60 : case 62 : case 94 : case 124 : case 33 : case 61 : return true ; default : return false ; }
public class WeightedLinkClustering { /** * Computes the similarity of the graph ' s edges and merges them until the * specified number of clusters has been reached . * @ param numClusters the number of clusters to return */ public < E extends WeightedEdge > MultiMap < Integer , Integer > cluster ( final WeightedGraph < E > graph , int numClusters , Properties props ) { } }
if ( props . getProperty ( KEEP_WEIGHT_VECTORS_PROPERTY ) != null ) keepWeightVectors = Boolean . parseBoolean ( props . getProperty ( KEEP_WEIGHT_VECTORS_PROPERTY ) ) ; vertexToWeightVector . clear ( ) ; return super . cluster ( graph , numClusters , props ) ;
public class SymmetryTools { /** * Method that converts a repeats symmetric alignment into an alignment of * whole structures . * Example : if the structure has repeats A , B and C , the original alignment * is A - B - C , and the returned alignment is ABC - BCA - CAB . * @ param symm * CeSymmResult * @ return MultipleAlignment of the full structure superpositions */ public static MultipleAlignment toFullAlignment ( CeSymmResult symm ) { } }
if ( ! symm . isRefined ( ) ) throw new IllegalArgumentException ( "The symmetry result " + "is not refined, repeats cannot be defined" ) ; MultipleAlignment full = symm . getMultipleAlignment ( ) . clone ( ) ; for ( int str = 1 ; str < full . size ( ) ; str ++ ) { // Create a new Block with swapped AlignRes ( move first to last ) Block b = full . getBlock ( full . getBlocks ( ) . size ( ) - 1 ) . clone ( ) ; b . getAlignRes ( ) . add ( b . getAlignRes ( ) . get ( 0 ) ) ; b . getAlignRes ( ) . remove ( 0 ) ; full . getBlockSet ( 0 ) . getBlocks ( ) . add ( b ) ; } return full ;
public class CmsRelationFilter { /** * Returns an extended filter with the given type restriction . < p > * @ param type the relation type to filter * @ return an extended filter with the given type restriction */ public CmsRelationFilter filterType ( CmsRelationType type ) { } }
CmsRelationFilter filter = ( CmsRelationFilter ) clone ( ) ; filter . m_types . add ( type ) ; return filter ;
public class GitkitClient { /** * Gets out - of - band response . Used by oob endpoint for ResetPassword and ChangeEmail operation . * The web site needs to send user an email containing the oobUrl in the response . The user needs * to click the oobUrl to finish the operation . * @ param req http request for the oob endpoint * @ param gitkitToken Gitkit token of authenticated user , required for ChangeEmail operation * @ return the oob response . * @ throws GitkitServerException */ public OobResponse getOobResponse ( HttpServletRequest req , String gitkitToken ) throws GitkitServerException { } }
try { String action = req . getParameter ( "action" ) ; if ( "resetPassword" . equals ( action ) ) { String oobLink = buildOobLink ( buildPasswordResetRequest ( req ) , action ) ; return new OobResponse ( req . getParameter ( "email" ) , null , oobLink , OobAction . RESET_PASSWORD ) ; } else if ( "changeEmail" . equals ( action ) ) { if ( gitkitToken == null ) { return new OobResponse ( "login is required" ) ; } else { String oobLink = buildOobLink ( buildChangeEmailRequest ( req , gitkitToken ) , action ) ; return new OobResponse ( req . getParameter ( "oldEmail" ) , req . getParameter ( "newEmail" ) , oobLink , OobAction . CHANGE_EMAIL ) ; } } else { return new OobResponse ( "unknown request" ) ; } } catch ( GitkitClientException e ) { return new OobResponse ( e . getMessage ( ) ) ; }
public class SequenceSchema { /** * Infers a sequence schema based * on the record * @ param record the record to infer the schema based on * @ return the inferred sequence schema */ public static SequenceSchema inferSequenceMulti ( List < List < List < Writable > > > record ) { } }
SequenceSchema . Builder builder = new SequenceSchema . Builder ( ) ; int minSequenceLength = record . get ( 0 ) . size ( ) ; int maxSequenceLength = record . get ( 0 ) . size ( ) ; for ( int i = 0 ; i < record . size ( ) ; i ++ ) { if ( record . get ( i ) instanceof DoubleWritable ) builder . addColumnDouble ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof IntWritable ) builder . addColumnInteger ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof LongWritable ) builder . addColumnLong ( String . valueOf ( i ) ) ; else if ( record . get ( i ) instanceof FloatWritable ) builder . addColumnFloat ( String . valueOf ( i ) ) ; else throw new IllegalStateException ( "Illegal writable for inferring schema of type " + record . get ( i ) . getClass ( ) . toString ( ) + " with record " + record . get ( 0 ) ) ; builder . minSequenceLength ( Math . min ( record . get ( i ) . size ( ) , minSequenceLength ) ) ; builder . maxSequenceLength ( Math . max ( record . get ( i ) . size ( ) , maxSequenceLength ) ) ; } return builder . build ( ) ;
public class ExportDataSource { /** * set the runnable task that is to be executed on mastership designation * @ param toBeRunOnMastership a { @ link @ Runnable } task * @ param runEveryWhere Set if connector " replicated " property is set to true Like replicated table , every * replicated export stream is its own master . */ public void setOnMastership ( Runnable toBeRunOnMastership ) { } }
Preconditions . checkNotNull ( toBeRunOnMastership , "mastership runnable is null" ) ; m_onMastership = toBeRunOnMastership ; // If connector " replicated " property is set to true then every // replicated export stream is its own master if ( m_runEveryWhere ) { // export stream for run - everywhere clients doesn ' t need ack mailbox m_ackMailboxRefs . set ( null ) ; acceptMastership ( ) ; }
public class AsyncMutateInBuilder { /** * Set both a persistence and a replication durability constraints for the whole mutation . * @ param persistTo the persistence durability constraint to observe . * @ param replicateTo the replication durability constraint to observe . * @ return this builder for chaining . */ public AsyncMutateInBuilder withDurability ( PersistTo persistTo , ReplicateTo replicateTo ) { } }
this . persistTo = persistTo ; this . replicateTo = replicateTo ; return this ;